gt stringclasses 1 value | context stringlengths 2.05k 161k |
|---|---|
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.server;
import com.facebook.presto.TaskSource;
import com.facebook.presto.UnpartitionedPagePartitionFunction;
import com.facebook.presto.execution.ExecutionFailureInfo;
import com.facebook.presto.execution.LocationFactory;
import com.facebook.presto.execution.QueryId;
import com.facebook.presto.execution.QueryInfo;
import com.facebook.presto.execution.QueryManager;
import com.facebook.presto.execution.QueryState;
import com.facebook.presto.execution.QueryStats;
import com.facebook.presto.execution.StageId;
import com.facebook.presto.execution.StageInfo;
import com.facebook.presto.execution.StageState;
import com.facebook.presto.execution.StageStats;
import com.facebook.presto.execution.TaskId;
import com.facebook.presto.execution.TaskInfo;
import com.facebook.presto.spi.ConnectorSession;
import com.facebook.presto.spi.type.Type;
import com.google.common.base.Function;
import com.google.common.base.Preconditions;
import com.google.common.base.Predicates;
import com.google.common.collect.ImmutableList;
import io.airlift.units.Duration;
import javax.annotation.concurrent.ThreadSafe;
import javax.inject.Inject;
import java.net.URI;
import java.util.List;
import java.util.Locale;
import java.util.NoSuchElementException;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.atomic.AtomicInteger;
import static com.facebook.presto.OutputBuffers.INITIAL_EMPTY_OUTPUT_BUFFERS;
import static com.facebook.presto.spi.type.TimeZoneKey.UTC_KEY;
import static com.facebook.presto.spi.type.VarcharType.VARCHAR;
import static com.google.common.collect.Iterables.filter;
import static com.google.common.collect.Iterables.transform;
@ThreadSafe
public class MockQueryManager
implements QueryManager
{
public static final List<Type> TYPES = ImmutableList.<Type>of(VARCHAR);
private final MockTaskManager mockTaskManager;
private final LocationFactory locationFactory;
private final AtomicInteger nextQueryId = new AtomicInteger();
private final ConcurrentMap<QueryId, SimpleQuery> queries = new ConcurrentHashMap<>();
@Inject
public MockQueryManager(MockTaskManager mockTaskManager, LocationFactory locationFactory)
{
Preconditions.checkNotNull(mockTaskManager, "mockTaskManager is null");
Preconditions.checkNotNull(locationFactory, "locationFactory is null");
this.mockTaskManager = mockTaskManager;
this.locationFactory = locationFactory;
}
@Override
public List<QueryInfo> getAllQueryInfo()
{
return ImmutableList.copyOf(filter(transform(queries.values(), new Function<SimpleQuery, QueryInfo>()
{
@Override
public QueryInfo apply(SimpleQuery queryWorker)
{
try {
return queryWorker.getQueryInfo();
}
catch (RuntimeException ignored) {
return null;
}
}
}), Predicates.notNull()));
}
@Override
public Duration waitForStateChange(QueryId queryId, QueryState currentState, Duration maxWait)
throws InterruptedException
{
return maxWait;
}
@Override
public QueryInfo getQueryInfo(QueryId queryId)
{
Preconditions.checkNotNull(queryId, "queryId is null");
SimpleQuery query = queries.get(queryId);
if (query == null) {
throw new NoSuchElementException();
}
return query.getQueryInfo();
}
@Override
public QueryInfo createQuery(ConnectorSession session, String query)
{
Preconditions.checkNotNull(query, "query is null");
TaskId outputTaskId = new TaskId(String.valueOf(nextQueryId.getAndIncrement()), "0", "0");
mockTaskManager.updateTask(session,
outputTaskId,
null,
ImmutableList.<TaskSource>of(),
INITIAL_EMPTY_OUTPUT_BUFFERS.withBuffer("out", new UnpartitionedPagePartitionFunction()).withNoMoreBufferIds());
SimpleQuery simpleQuery = new SimpleQuery(outputTaskId, locationFactory.createQueryLocation(outputTaskId.getQueryId()), mockTaskManager, locationFactory);
queries.put(outputTaskId.getQueryId(), simpleQuery);
return simpleQuery.getQueryInfo();
}
@Override
public void cancelQuery(QueryId queryId)
{
queries.remove(queryId);
}
@Override
public void cancelStage(StageId stageId)
{
// mock queries don't have stages
}
private static class SimpleQuery
{
private final TaskId outputTaskId;
private final URI self;
private final MockTaskManager mockTaskManager;
private final LocationFactory locationFactory;
private SimpleQuery(TaskId outputTaskId, URI self, MockTaskManager mockTaskManager, LocationFactory locationFactory)
{
this.outputTaskId = outputTaskId;
this.self = self;
this.mockTaskManager = mockTaskManager;
this.locationFactory = locationFactory;
}
private QueryInfo getQueryInfo()
{
TaskInfo outputTask = mockTaskManager.getTaskInfo(outputTaskId);
QueryState state;
switch (outputTask.getState()) {
case PLANNED:
case RUNNING:
state = QueryState.RUNNING;
break;
case FINISHED:
state = QueryState.FINISHED;
break;
case CANCELED:
state = QueryState.CANCELED;
break;
case FAILED:
state = QueryState.FAILED;
break;
default:
throw new IllegalStateException("Unknown task state " + outputTask.getState());
}
return new QueryInfo(outputTaskId.getQueryId(),
new ConnectorSession("user", "test", "test_catalog", "test_schema", UTC_KEY, Locale.ENGLISH, null, null),
state,
self,
ImmutableList.of("out"),
"query",
new QueryStats(),
new StageInfo(outputTaskId.getStageId(),
StageState.FINISHED,
locationFactory.createStageLocation(outputTaskId.getStageId()),
null,
TYPES,
new StageStats(),
ImmutableList.of(outputTask),
ImmutableList.<StageInfo>of(),
ImmutableList.<ExecutionFailureInfo>of()),
null,
null,
null);
}
}
}
| |
/*
* Copyright (C) 2007 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.example.android.apis.os;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;
import javax.microedition.khronos.egl.EGLConfig;
import javax.microedition.khronos.opengles.GL10;
import android.app.Activity;
import android.hardware.Sensor;
import android.hardware.SensorEvent;
import android.hardware.SensorEventListener;
import android.hardware.SensorManager;
import android.opengl.GLSurfaceView;
import android.os.Bundle;
/**
* Wrapper activity demonstrating the use of the new
* {@link SensorEvent#values rotation vector sensor}
* ({@link Sensor#TYPE_ROTATION_VECTOR TYPE_ROTATION_VECTOR}).
*
* @see Sensor
* @see SensorEvent
* @see SensorManager
*
*/
public class RotationVectorDemo extends Activity {
private GLSurfaceView mGLSurfaceView;
private SensorManager mSensorManager;
private MyRenderer mRenderer;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
// Get an instance of the SensorManager
mSensorManager = (SensorManager)getSystemService(SENSOR_SERVICE);
// Create our Preview view and set it as the content of our
// Activity
mRenderer = new MyRenderer();
mGLSurfaceView = new GLSurfaceView(this);
mGLSurfaceView.setRenderer(mRenderer);
setContentView(mGLSurfaceView);
}
@Override
protected void onResume() {
// Ideally a game should implement onResume() and onPause()
// to take appropriate action when the activity looses focus
super.onResume();
mRenderer.start();
mGLSurfaceView.onResume();
}
@Override
protected void onPause() {
// Ideally a game should implement onResume() and onPause()
// to take appropriate action when the activity looses focus
super.onPause();
mRenderer.stop();
mGLSurfaceView.onPause();
}
class MyRenderer implements GLSurfaceView.Renderer, SensorEventListener {
private Cube mCube;
private Sensor mRotationVectorSensor;
private final float[] mRotationMatrix = new float[16];
public MyRenderer() {
// find the rotation-vector sensor
mRotationVectorSensor = mSensorManager.getDefaultSensor(
Sensor.TYPE_ROTATION_VECTOR);
mCube = new Cube();
// initialize the rotation matrix to identity
mRotationMatrix[ 0] = 1;
mRotationMatrix[ 4] = 1;
mRotationMatrix[ 8] = 1;
mRotationMatrix[12] = 1;
}
public void start() {
// enable our sensor when the activity is resumed, ask for
// 10 ms updates.
mSensorManager.registerListener(this, mRotationVectorSensor, 10000);
}
public void stop() {
// make sure to turn our sensor off when the activity is paused
mSensorManager.unregisterListener(this);
}
public void onSensorChanged(SensorEvent event) {
// we received a sensor event. it is a good practice to check
// that we received the proper event
if (event.sensor.getType() == Sensor.TYPE_ROTATION_VECTOR) {
// convert the rotation-vector to a 4x4 matrix. the matrix
// is interpreted by Open GL as the inverse of the
// rotation-vector, which is what we want.
SensorManager.getRotationMatrixFromVector(
mRotationMatrix , event.values);
}
}
public void onDrawFrame(GL10 gl) {
// clear screen
gl.glClear(GL10.GL_COLOR_BUFFER_BIT);
// set-up modelview matrix
gl.glMatrixMode(GL10.GL_MODELVIEW);
gl.glLoadIdentity();
gl.glTranslatef(0, 0, -3.0f);
gl.glMultMatrixf(mRotationMatrix, 0);
// draw our object
gl.glEnableClientState(GL10.GL_VERTEX_ARRAY);
gl.glEnableClientState(GL10.GL_COLOR_ARRAY);
mCube.draw(gl);
}
public void onSurfaceChanged(GL10 gl, int width, int height) {
// set view-port
gl.glViewport(0, 0, width, height);
// set projection matrix
float ratio = (float) width / height;
gl.glMatrixMode(GL10.GL_PROJECTION);
gl.glLoadIdentity();
gl.glFrustumf(-ratio, ratio, -1, 1, 1, 10);
}
public void onSurfaceCreated(GL10 gl, EGLConfig config) {
// dither is enabled by default, we don't need it
gl.glDisable(GL10.GL_DITHER);
// clear screen in white
gl.glClearColor(1,1,1,1);
}
class Cube {
// initialize our cube
private FloatBuffer mVertexBuffer;
private FloatBuffer mColorBuffer;
private ByteBuffer mIndexBuffer;
public Cube() {
final float vertices[] = {
-1, -1, -1, 1, -1, -1,
1, 1, -1, -1, 1, -1,
-1, -1, 1, 1, -1, 1,
1, 1, 1, -1, 1, 1,
};
final float colors[] = {
0, 0, 0, 1, 1, 0, 0, 1,
1, 1, 0, 1, 0, 1, 0, 1,
0, 0, 1, 1, 1, 0, 1, 1,
1, 1, 1, 1, 0, 1, 1, 1,
};
final byte indices[] = {
0, 4, 5, 0, 5, 1,
1, 5, 6, 1, 6, 2,
2, 6, 7, 2, 7, 3,
3, 7, 4, 3, 4, 0,
4, 7, 6, 4, 6, 5,
3, 0, 1, 3, 1, 2
};
ByteBuffer vbb = ByteBuffer.allocateDirect(vertices.length*4);
vbb.order(ByteOrder.nativeOrder());
mVertexBuffer = vbb.asFloatBuffer();
mVertexBuffer.put(vertices);
mVertexBuffer.position(0);
ByteBuffer cbb = ByteBuffer.allocateDirect(colors.length*4);
cbb.order(ByteOrder.nativeOrder());
mColorBuffer = cbb.asFloatBuffer();
mColorBuffer.put(colors);
mColorBuffer.position(0);
mIndexBuffer = ByteBuffer.allocateDirect(indices.length);
mIndexBuffer.put(indices);
mIndexBuffer.position(0);
}
public void draw(GL10 gl) {
gl.glEnable(GL10.GL_CULL_FACE);
gl.glFrontFace(GL10.GL_CW);
gl.glShadeModel(GL10.GL_SMOOTH);
gl.glVertexPointer(3, GL10.GL_FLOAT, 0, mVertexBuffer);
gl.glColorPointer(4, GL10.GL_FLOAT, 0, mColorBuffer);
gl.glDrawElements(GL10.GL_TRIANGLES, 36, GL10.GL_UNSIGNED_BYTE, mIndexBuffer);
}
}
public void onAccuracyChanged(Sensor sensor, int accuracy) {
}
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.geronimo.monitoring.console;
import java.net.InetAddress;
import java.sql.Connection;
import java.sql.Statement;
import java.io.IOException;
import java.text.Format;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.Hashtable;
import java.util.Iterator;
import java.util.Properties;
import java.util.Set;
import java.util.TreeMap;
import java.util.TreeSet;
import javax.management.MBeanServerConnection;
import javax.management.ObjectName;
import javax.management.remote.JMXConnector;
import javax.management.remote.JMXConnectorFactory;
import javax.management.remote.JMXServiceURL;
import javax.naming.Context;
import javax.naming.InitialContext;
import org.apache.geronimo.console.util.PortletManager;
import org.apache.geronimo.crypto.EncryptionManager;
import org.apache.geronimo.monitoring.ejb.MasterRemoteControlRemote;
import org.apache.geronimo.monitoring.console.data.Node;
import org.apache.geronimo.system.jmx.RealMBeanServerReference;
public class MRCConnector {
private static String PATH = null;
private MBeanServerConnection mbServerConn;
private MasterRemoteControlRemote mrc = null;
private String protocol = "EJB";
private JMXConnector connector = null;
MRCConnector() {
}
public MRCConnector(Node node) throws Exception {
this(node.getHost(), node.getUserName(), node.getPassword(), node.getPort(), node.getProtocol());
}
/**
* @param ip -
* IP address of mrc-server to connect to
* @param userName -
* Username for JMX connection to the host
* @param password -
* Password for JMX connection to the host
* @throws Exception -
* If the connection to mrc-server fails
*/
public MRCConnector(String ip, String userName, String password, int port,
String protocol) throws Exception {
// decrypt the password
password = (String) EncryptionManager.decrypt(password);
this.protocol = protocol;
if (isEjbProtocol()) {
try {
Properties props = new Properties();
props
.setProperty(Context.INITIAL_CONTEXT_FACTORY,
"org.apache.openejb.client.RemoteInitialContextFactory");
props.setProperty(Context.PROVIDER_URL, "ejbd://" + ip + ":"
+ port);
props.setProperty(Context.SECURITY_PRINCIPAL, userName);
props.setProperty(Context.SECURITY_CREDENTIALS, password);
props.setProperty("openejb.authentication.realmName",
"geronimo-admin");
Context ic = new InitialContext(props);
mrc = (MasterRemoteControlRemote) ic
.lookup("ejb/mgmt/MRCRemote");
} catch (Exception e) {
throw e;
}
} else {
try {
InetAddress host = InetAddress.getLocalHost();//maybe throw a UnknownHostException
if (ip.equals("localhost") || ip.equals(host.getHostAddress())
|| ip.equals(host.getHostName())
||ip.equals("127.0.0.1")) {
mbServerConn = ((RealMBeanServerReference)PortletManager.getKernel().getGBean("MBeanServerReference")).getMBeanServer();
} else {
JMXServiceURL serviceURL = new JMXServiceURL(
"service:jmx:rmi:///jndi/rmi://" + ip + ":" + port
+ "/JMXConnector");
Hashtable<String, Object> env = new Hashtable<String, Object>();
String[] credentials = new String[2];
credentials[0] = userName;
credentials[1] = password;
env.put(JMXConnector.CREDENTIALS, credentials);
connector = JMXConnectorFactory.connect(
serviceURL, env);
mbServerConn = connector.getMBeanServerConnection();
}
// retrieve the mbean name to the agent-car-jmx plugin
if(PATH == null) {
Set<ObjectName> mbeanNames = mbServerConn.queryNames(new ObjectName("*:name=MasterRemoteControlJMX,*"), null);
for(Iterator<ObjectName> it = mbeanNames.iterator(); it.hasNext(); ) {
String mbeanName = ((ObjectName)it.next()).getCanonicalName();
if(mbeanName.contains("agent-car-jmx") &&
mbeanName.contains("MasterRemoteControlJMX") &&
mbeanName.contains("GBean")) {
PATH = mbeanName;
break;
}
}
// there must be a PATH to agent-car-jmx plugin
if(PATH == null) {
throw new Exception("[ERROR] Required mbean not found: agent-car-jmx");
}
}
} catch (Exception e) {
e.printStackTrace();
throw e;
}
}
// when the code has reach this point, a connection was successfully
// established
// so we need to update the last_seen attribute for the server
// Format formatter = null;
// formatter = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
// Date date = new Date(System.currentTimeMillis());
// String currentTime = formatter.format(date);
// Connection conn = DBManager.createConnection();
// try {
// Statement stmt = conn.createStatement();
// stmt.executeUpdate("UPDATE SERVERS SET LAST_SEEN = '" + currentTime
// + "' WHERE IP='" + ip + "'");
// } catch (Exception e) {
// throw e;
// } finally {
// try {
// if (conn != null) {
// conn.close();
// }
// } catch (Exception e) {
//
// }
// }
}
public void dispose()
{
try{
if("JMX".equals(this.protocol) && null != this.connector){
connector.close();
connector = null;
}
}
catch(IOException ex)
{
}
}
private boolean isEjbProtocol() {
return "EJB".equals(protocol);
}
/**
* @return - Returns an Long representing the current snapshot duration set
* on the server side
* @throws Exception -
* If the connection to the MRC-Server fails
*/
public Long getSnapshotDuration() throws Exception {
if (isEjbProtocol()) {
return mrc.getSnapshotDuration();
} else {
return (Long) mbServerConn.invoke(new ObjectName(PATH),
"getSnapshotDuration", new Object[] {}, new String[] {});
}
}
/**
* @return - Returns an ArrayList of String objects containing a listing of
* all statistics values being collected
* @throws Exception -
* If the connection to the MRC-Server fails
*/
@SuppressWarnings("unchecked")
public HashMap<String, ArrayList<String>> getDataNameList()
throws Exception {
HashMap<String, ArrayList<String>> DataNameList = new HashMap<String, ArrayList<String>>();
if (isEjbProtocol()) {
try {
DataNameList = mrc.getAllSnapshotStatAttributes();
} catch (Exception e) {
e.printStackTrace();
}
} else {
try {
DataNameList = (HashMap<String, ArrayList<String>>) mbServerConn
.invoke(new ObjectName(PATH),
"getAllSnapshotStatAttributes",
new Object[] {}, new String[] {});
} catch (Exception e) {
e.printStackTrace();
}
}
// Strip out snapshot_date and snapshot_time, we know these exist
for (Iterator<String> it = DataNameList.keySet().iterator(); it
.hasNext();) {
String mbeanName = it.next();
DataNameList.get(mbeanName).remove("snapshot_date");
DataNameList.get(mbeanName).remove("snapshot_time");
}
return DataNameList;
}
/**
* @param snapCount -
* Number of snapshots to request from the server
* @param skipCount -
* Every nth snapshot. A value of 1 will be every 1. A value of 2
* will be every other.
* @return - Returns an ArrayList of Map objects.
* @throws Exception -
* If the connection to the MRC-Server fails
*/
@SuppressWarnings("unchecked")
public ArrayList<HashMap<String, HashMap<String, Object>>> getSnapshots(
int snapCount, int skipCount) throws Exception {
ArrayList<HashMap<String, HashMap<String, Object>>> snapshotList = null;
if (isEjbProtocol()) {
snapshotList = mrc.fetchSnapshotData(snapCount, skipCount);
} else {
snapshotList = (ArrayList<HashMap<String, HashMap<String, Object>>>) mbServerConn
.invoke(new ObjectName(PATH), "fetchSnapshotData",
new Object[] { snapCount, skipCount },
new String[] { "java.lang.Integer",
"java.lang.Integer" });
}
// Check if snapshotList is empty
if (snapshotList.size() == 0) {
return snapshotList;
}
/*
* If there are not enough snapshots available to fill the requested
* number, insert some with values of 0 and the proper times.
*/
while (snapshotList.size() < snapCount) {
// Temporary, always is first element (oldest)
HashMap<String, HashMap<String, Object>> mapTimeFix = snapshotList
.get(0);
// Temporary map, used to generate blank data to be added to
// the
// list at position 0
HashMap<String, HashMap<String, Object>> tempMap = new HashMap<String, HashMap<String, Object>>();
// Temporary submap, used to store 0 elements to be added to
// the
// tempmap
HashMap<String, Object> subMap = new HashMap<String, Object>();
// Calculate appropriate time, add it to the submap, then
// add
// that to the tempMap
subMap.put("snapshot_time", ((Long) mapTimeFix.get("times").get(
"snapshot_time") - (getSnapshotDuration() * skipCount)));
Format formatter = null;
formatter = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
Date date = new Date((Long) subMap.get("snapshot_time"));
subMap.put("snapshot_date", formatter.format(date));
// Add the submap back to the tempmap
tempMap.put("times", new HashMap<String, Object>(subMap));
// Clear out the subMap for use again
subMap.clear();
// Run through the mbeans
// Run through the mbeans
for (Iterator<String> it = mapTimeFix.keySet().iterator(); it
.hasNext();) {
// get the mbean name
String mbeanName = it.next();
HashMap<String, Object> stats = null;
// Verify that it's not times
if (mbeanName.equals(new String("times"))) {
} else {
stats = mapTimeFix.get(mbeanName);
// Run through the stats elements for the particular
// mbean
for (Iterator<String> itt = stats.keySet().iterator(); itt
.hasNext();) {
String key = itt.next();
// Place faux data into the submap
subMap.put(key, new Long(0));
}
// Add the submap to the tempmap, and clear it
tempMap.put(mbeanName, new HashMap<String, Object>(subMap));
}
}
snapshotList.add(0, new HashMap<String, HashMap<String, Object>>(
tempMap));
}
/*
* This is where we will be inserting data to fill 'gaps' in the
* snapshots The initial for-loop will travel from the most recent
* snapshot to the oldest, checking that the snapshot_time along the way
* all align with what they should be
*/
for (int i = snapshotList.size() - 1; i > 0; i--) {
if (i > 0) {
HashMap<String, HashMap<String, Object>> mapTimeFix = snapshotList
.get(i);
HashMap<String, HashMap<String, Object>> mapTimeFix2 = snapshotList
.get(i - 1);
// here is where we will in missing data
while (((((Long) mapTimeFix.get("times").get("snapshot_time") / 1000) / 60)
- (((Long) mapTimeFix2.get("times")
.get("snapshot_time") / 1000) / 60) > (((getSnapshotDuration() / 1000) / 60) * skipCount))) {
HashMap<String, HashMap<String, Object>> tempMap = new HashMap<String, HashMap<String, Object>>();
HashMap<String, Object> subMap = new HashMap<String, Object>();
for (Iterator<String> it = mapTimeFix.keySet().iterator(); it
.hasNext();) {
// get the mbean name
String mbeanName = it.next();
HashMap<String, Object> stats = null;
// Verify that it's not times
if (!mbeanName.equals("times")) {
stats = mapTimeFix.get(mbeanName);
// Run through the stats elements for the
// particular
// mbean
for (Iterator<String> itt = stats.keySet()
.iterator(); itt.hasNext();) {
String key = itt.next();
// Place faux data into the submap
subMap.put(key, new Long(0));
}
// Add the submap to the tempmap, and clear it
tempMap.put(mbeanName, new HashMap<String, Object>(
subMap));
subMap.clear();
}
}
subMap.put("snapshot_time", new Long((Long) mapTimeFix.get(
"times").get("snapshot_time")
- (getSnapshotDuration() * skipCount)));
Format formatter = null;
formatter = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
Date date = new Date((Long) subMap.get("snapshot_time"));
subMap.put("snapshot_date", formatter.format(date));
tempMap.put("times", new HashMap<String, Object>(subMap));
subMap.clear();
snapshotList.add(i,
new HashMap<String, HashMap<String, Object>>(
tempMap));
snapshotList.remove(0);
mapTimeFix = tempMap;
mapTimeFix2 = snapshotList.get(i - 1);
}
}
}
return snapshotList;
}
@SuppressWarnings("unchecked")
public TreeMap<Long, Long> getSpecificStatistics(String mbeanName,
String statsName, int snapCount, int skipCount, boolean showArchive)
throws Exception {
TreeMap<Long, Long> snapshotList = null;
if (isEjbProtocol()) {
snapshotList = mrc.getSpecificStatistics(mbeanName, statsName,
snapCount, skipCount, showArchive);
} else {
snapshotList = (TreeMap<Long, Long>) mbServerConn.invoke(
new ObjectName(PATH), "getSpecificStatistics",
new Object[] { mbeanName, statsName, snapCount, skipCount,
showArchive }, new String[] { "java.lang.String",
"java.lang.String", "java.lang.Integer",
"java.lang.Integer", "java.lang.Boolean" });
}
// Check if snapshotList is empty
if (snapshotList.size() == 0) {
return snapshotList;
}
/*
* If there are not enough snapshots available to fill the requested
* number, insert some with values of 0 and the proper times.
*/
while (snapshotList.size() < snapCount) {
// Temporary, always is first element (oldest)
Long timeFix = snapshotList.firstKey();
// Calculate appropriate time, add it to the submap, then
// add
// that to the tempMap
snapshotList.put((timeFix - (getSnapshotDuration() * skipCount)),
new Long(0));
}
/*
* This is where we will be inserting data to fill 'gaps' in the
* snapshots The initial for-loop will travel from the most recent
* snapshot to the oldest, checking that the snapshot_time along the way
* all align with what they should be
*/
Set tempSet = snapshotList.keySet();
ArrayList<Long> tempArray = new ArrayList(tempSet);
for (int i = tempArray.size() - 1; i > 0; i--) {
Long tempLong1 = tempArray.get(i);
Long tempLong2 = tempArray.get(i - 1);
// here is where we will in missing data
while ((((tempLong1 / 1000) / 60) - ((tempLong2 / 1000) / 60) > (((getSnapshotDuration() / 1000) / 60) * skipCount))
&& i > 0) {
tempLong1 = tempLong1 - (getSnapshotDuration() * skipCount);
snapshotList.remove(tempArray.get(0));
snapshotList.put(tempLong1, new Long(0));
tempArray.remove(0);
i--;
}
}
return snapshotList;
}
@SuppressWarnings("unchecked")
public HashMap<String, HashMap<String, Object>> getLatestSnapshots()
throws Exception {
int snapCount = 1;
int skipCount = 1;
ArrayList<HashMap<String, HashMap<String, Object>>> snapshotList = null;
if (isEjbProtocol()) {
snapshotList = mrc.fetchSnapshotData(snapCount, skipCount);
} else {
snapshotList = (ArrayList<HashMap<String, HashMap<String, Object>>>) mbServerConn
.invoke(new ObjectName(PATH), "fetchSnapshotData",
new Object[] { snapCount, skipCount },
new String[] { "java.lang.Integer",
"java.lang.Integer" });
}
// Check if snapshotList is empty
if (snapshotList.size() == 0) {
return null;
} else
return snapshotList.get(0);
}
/**
* @return - Returns a boolean indicating successful stop
* @throws Exception -
* If the connection to the MRC-Server fails
*/
public boolean stopSnapshotThread() throws Exception {
if (isEjbProtocol()) {
return mrc.stopSnapshot();
} else {
return (Boolean) mbServerConn.invoke(new ObjectName(PATH),
"stopSnapshot", new Object[] {}, new String[] {});
}
}
/**
* @return - Returns a boolean indicating successful stop
* @throws Exception -
* If the connection to the MRC-Server fails
*/
public boolean startSnapshotThread(long time) throws Exception {
if (isEjbProtocol()) {
return mrc.startSnapshot(time);
} else {
return (Boolean) mbServerConn.invoke(new ObjectName(PATH),
"startSnapshot", new Object[] { time },
new String[] { "java.lang.Long" });
}
}
public int isSnapshotRunning() {
Integer running = 0;
if (isEjbProtocol()) {
try {
if (mrc.isSnapshotRunning())
running = 1;
} catch (Exception e) {
return 0;
}
} else {
try {
running = (Integer) mbServerConn.invoke(new ObjectName(PATH),
"SnapshotStatus", new Object[] {}, new String[] {});
} catch (Exception e) {
e.printStackTrace();
return 0;
}
}
return running;
}
@SuppressWarnings("unchecked")
public Set<String> getAllMbeanNames() throws Exception {
if (isEjbProtocol()) {
return mrc.getAllMBeanNames();
} else {
return (Set<String>) mbServerConn.invoke(new ObjectName(PATH),
"getAllMBeanNames", new Object[] {}, new String[] {});
}
}
@SuppressWarnings("unchecked")
public Set<String> getStatisticsProviderBeanNames() throws Exception {
if (isEjbProtocol()) {
return mrc.getStatisticsProviderMBeanNames();
} else {
return (Set<String>) mbServerConn.invoke(new ObjectName(PATH),
"getStatisticsProviderMBeanNames", new Object[] {},
new String[] {});
}
}
@SuppressWarnings("unchecked")
public HashMap<String, ArrayList<String>> getAllSnapshotStatAttributes()
throws Exception {
if (isEjbProtocol()) {
return mrc.getAllSnapshotStatAttributes();
} else {
return (HashMap<String, ArrayList<String>>) mbServerConn.invoke(
new ObjectName(PATH), "getAllSnapshotStatAttributes",
new Object[] {}, new String[] {});
}
}
@SuppressWarnings("unchecked")
public Set<String> getTrackedBeans() throws Exception {
if (isEjbProtocol()) {
return mrc.getTrackedMBeans();
} else {
return (Set<String>) mbServerConn.invoke(new ObjectName(PATH),
"getTrackedMBeans", new Object[] {}, new String[] {});
}
}
@SuppressWarnings("unchecked")
public Set<String> getStatAttributesOnMBean(String mBean) throws Exception {
HashMap<String, ArrayList<String>> allStatAttributes = getAllSnapshotStatAttributes();
ArrayList<String> tempArrayList = allStatAttributes.get(mBean);
Set<String> tempSet = new TreeSet<String>();
Iterator it = tempArrayList.iterator();
while (it.hasNext()) {
tempSet.add(it.next().toString());
}
return tempSet;
}
@SuppressWarnings("unchecked")
public Set<String> getTrackedBeansPretty() throws Exception {
Set trackedBeans = getTrackedBeans();
Set prettybeans = new TreeSet();
Iterator it = trackedBeans.iterator();
while (it.hasNext()) {
String[] temparray1 = it.next().toString().split("name=");
String[] temparray2 = temparray1[1].split(",");
String[] temparray3 = temparray2[0].split("/");
String mbeanName = null;
if (temparray3.length > 1)
mbeanName = temparray3[1];
else
mbeanName = temparray2[0];
prettybeans.add(mbeanName);
}
return prettybeans;
}
@SuppressWarnings("unchecked")
public TreeMap<String, String> getTrackedBeansMap() throws Exception {
Set trackedBeans = getTrackedBeans();
TreeMap<String, String> beanMap = new TreeMap<String, String>();
Iterator it = trackedBeans.iterator();
while (it.hasNext()) {
String mbeanName = it.next().toString();
String[] temparray1 = mbeanName.split("name=");
String[] temparray2 = temparray1[1].split(",");
String[] temparray3 = temparray2[0].split("/");
String mbeanNamePretty = null;
if (temparray3.length > 1)
mbeanNamePretty = temparray3[1];
else
mbeanNamePretty = temparray2[0];
beanMap.put(mbeanNamePretty, mbeanName);
}
return beanMap;
}
@SuppressWarnings("unchecked")
public Set<String> getStatisticsProviderBeanNamesPretty() throws Exception {
Set availableBeans = getStatisticsProviderBeanNames();
Set prettybeans = new TreeSet();
Iterator it = availableBeans.iterator();
while (it.hasNext()) {
String[] temparray1 = it.next().toString().split("name=");
String[] temparray2 = temparray1[1].split(",");
String[] temparray3 = temparray2[0].split("/");
String mbeanName = null;
if (temparray3.length > 1)
mbeanName = temparray3[1];
else
mbeanName = temparray2[0];
prettybeans.add(mbeanName);
}
return prettybeans;
}
@SuppressWarnings("unchecked")
public TreeMap<String, String> getStatisticsProviderBeanNamesMap()
throws Exception {
Set availableBeans = getStatisticsProviderBeanNames();
TreeMap<String, String> beanMap = new TreeMap<String, String>();
Iterator it = availableBeans.iterator();
while (it.hasNext()) {
String mbeanName = it.next().toString();
String[] temparray1 = mbeanName.split("name=");
String[] temparray2 = temparray1[1].split(",");
String[] temparray3 = temparray2[0].split("/");
String mbeanNamePretty = null;
if (temparray3.length > 1)
mbeanNamePretty = temparray3[1];
else
mbeanNamePretty = temparray2[0];
beanMap.put(mbeanNamePretty, mbeanName);
}
return beanMap;
}
@SuppressWarnings("unchecked")
public Set<String> getFreeStatisticsProviderBeanNamesPretty()
throws Exception {
Set<String> availableBeans = getStatisticsProviderBeanNamesPretty();
Set<String> usedBeans = getTrackedBeansPretty();
Set freeBeans = new TreeSet();
Iterator it = availableBeans.iterator();
while (it.hasNext()) {
String mbeanName = it.next().toString();
if (!usedBeans.contains(mbeanName))
freeBeans.add(mbeanName);
}
return freeBeans;
}
@SuppressWarnings("unchecked")
public Set<String> getFreeStatisticsProviderBeanNames() throws Exception {
Set<String> availableBeans = getStatisticsProviderBeanNames();
Set<String> usedBeans = getTrackedBeansPretty();
Set freeBeans = new TreeSet();
Iterator it = availableBeans.iterator();
while (it.hasNext()) {
String mbeanName = it.next().toString();
String[] temparray1 = mbeanName.split("name=");
String[] temparray2 = temparray1[1].split(",");
String[] temparray3 = temparray2[0].split("/");
String mbeanNamePretty = null;
if (temparray3.length > 1)
mbeanNamePretty = temparray3[1];
else
mbeanNamePretty = temparray2[0];
if (!usedBeans.contains(mbeanNamePretty))
freeBeans.add(mbeanName);
}
return freeBeans;
}
@SuppressWarnings("unchecked")
public TreeMap<String, String> getFreeStatisticsProviderBeanNamesMap()
throws Exception {
Set<String> availableBeans = getStatisticsProviderBeanNames();
Set<String> usedBeans = getTrackedBeansPretty();
TreeMap<String, String> beanMap = new TreeMap<String, String>();
Iterator it = availableBeans.iterator();
while (it.hasNext()) {
String mbeanName = it.next().toString();
String[] temparray1 = mbeanName.split("name=");
String[] temparray2 = temparray1[1].split(",");
String[] temparray3 = temparray2[0].split("/");
String mbeanNamePretty = null;
if (temparray3.length > 1)
mbeanNamePretty = temparray3[1];
else
mbeanNamePretty = temparray2[0];
if (!usedBeans.contains(mbeanNamePretty))
beanMap.put(mbeanNamePretty, mbeanName);
}
return beanMap;
}
@SuppressWarnings("unchecked")
public boolean stopTrackingMbean(String MBean) throws Exception {
if (isEjbProtocol()) {
mrc.removeMBeanForSnapshot(MBean);
} else {
mbServerConn
.invoke(new ObjectName(PATH), "removeMBeanForSnapshot",
new Object[] { MBean },
new String[] { "java.lang.String" });
}
return true;
}
@SuppressWarnings("unchecked")
public boolean startTrackingMbean(String MBean) throws Exception {
if (isEjbProtocol()) {
mrc.addMBeanForSnapshot(MBean);
} else {
mbServerConn
.invoke(new ObjectName(PATH), "addMBeanForSnapshot",
new Object[] { MBean },
new String[] { "java.lang.String" });
}
return true;
}
@SuppressWarnings("unchecked")
public HashMap<String, Long> getStats(String MBean) throws Exception {
if (isEjbProtocol()) {
return mrc.getStats(MBean);
} else {
return (HashMap<String, Long>) mbServerConn.invoke(new ObjectName(
PATH), "getStats", new Object[] { MBean },
new String[] { "java.lang.String" });
}
}
public void setSnapshotDuration(long duration) {
if (isEjbProtocol()) {
mrc.setSnapshotDuration(new Long(duration));
} else {
try {
mbServerConn.invoke(new ObjectName(PATH),
"setSnapshotDuration",
new Object[] { new Long(duration) },
new String[] { "java.lang.Long" });
} catch (Exception e) {
e.printStackTrace();
}
}
}
public int getSnapshotRetention() {
if (isEjbProtocol()) {
return Integer.parseInt(mrc.getSnapshotRetention());
} else {
try {
return (Integer) mbServerConn.invoke(new ObjectName(PATH),
"getSnapshotRetention", new Object[] {},
new String[] {});
} catch (Exception e) {
e.printStackTrace();
}
}
return 0;
}
public void setSnapshotRetention(int duration) {
if (isEjbProtocol()) {
mrc.setSnapshotRetention(duration);
} else {
try {
mbServerConn.invoke(new ObjectName(PATH),
"setSnapshotRetention", new Object[] { duration },
new String[] { "java.lang.Integer" });
} catch (Exception e) {
e.printStackTrace();
}
}
}
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.sql.planner.optimizations;
import com.facebook.presto.Session;
import com.facebook.presto.metadata.Metadata;
import com.facebook.presto.spi.type.Type;
import com.facebook.presto.sql.parser.SqlParser;
import com.facebook.presto.sql.planner.DependencyExtractor;
import com.facebook.presto.sql.planner.DeterminismEvaluator;
import com.facebook.presto.sql.planner.EffectivePredicateExtractor;
import com.facebook.presto.sql.planner.EqualityInference;
import com.facebook.presto.sql.planner.ExpressionInterpreter;
import com.facebook.presto.sql.planner.ExpressionSymbolInliner;
import com.facebook.presto.sql.planner.LiteralInterpreter;
import com.facebook.presto.sql.planner.NoOpSymbolResolver;
import com.facebook.presto.sql.planner.PlanNodeIdAllocator;
import com.facebook.presto.sql.planner.Symbol;
import com.facebook.presto.sql.planner.SymbolAllocator;
import com.facebook.presto.sql.planner.plan.AggregationNode;
import com.facebook.presto.sql.planner.plan.AssignUniqueId;
import com.facebook.presto.sql.planner.plan.Assignments;
import com.facebook.presto.sql.planner.plan.ExchangeNode;
import com.facebook.presto.sql.planner.plan.FilterNode;
import com.facebook.presto.sql.planner.plan.GroupIdNode;
import com.facebook.presto.sql.planner.plan.JoinNode;
import com.facebook.presto.sql.planner.plan.MarkDistinctNode;
import com.facebook.presto.sql.planner.plan.PlanNode;
import com.facebook.presto.sql.planner.plan.ProjectNode;
import com.facebook.presto.sql.planner.plan.SampleNode;
import com.facebook.presto.sql.planner.plan.SemiJoinNode;
import com.facebook.presto.sql.planner.plan.SimplePlanRewriter;
import com.facebook.presto.sql.planner.plan.SortNode;
import com.facebook.presto.sql.planner.plan.TableScanNode;
import com.facebook.presto.sql.planner.plan.UnionNode;
import com.facebook.presto.sql.planner.plan.UnnestNode;
import com.facebook.presto.sql.tree.BooleanLiteral;
import com.facebook.presto.sql.tree.ComparisonExpression;
import com.facebook.presto.sql.tree.ComparisonExpressionType;
import com.facebook.presto.sql.tree.Expression;
import com.facebook.presto.sql.tree.ExpressionTreeRewriter;
import com.facebook.presto.sql.tree.LongLiteral;
import com.facebook.presto.sql.tree.NullLiteral;
import com.facebook.presto.sql.tree.SymbolReference;
import com.facebook.presto.util.maps.IdentityLinkedHashMap;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Iterables;
import io.airlift.log.Logger;
import java.util.ArrayList;
import java.util.Collection;
import java.util.EnumSet;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import java.util.function.Predicate;
import java.util.stream.Collectors;
import static com.facebook.presto.sql.ExpressionUtils.combineConjuncts;
import static com.facebook.presto.sql.ExpressionUtils.expressionOrNullSymbols;
import static com.facebook.presto.sql.ExpressionUtils.extractConjuncts;
import static com.facebook.presto.sql.ExpressionUtils.stripNonDeterministicConjuncts;
import static com.facebook.presto.sql.analyzer.ExpressionAnalyzer.getExpressionTypes;
import static com.facebook.presto.sql.planner.DeterminismEvaluator.isDeterministic;
import static com.facebook.presto.sql.planner.EqualityInference.createEqualityInference;
import static com.facebook.presto.sql.planner.plan.JoinNode.Type.FULL;
import static com.facebook.presto.sql.planner.plan.JoinNode.Type.INNER;
import static com.facebook.presto.sql.planner.plan.JoinNode.Type.LEFT;
import static com.facebook.presto.sql.planner.plan.JoinNode.Type.RIGHT;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkState;
import static com.google.common.base.Predicates.equalTo;
import static com.google.common.base.Predicates.in;
import static com.google.common.base.Predicates.not;
import static com.google.common.collect.Iterables.filter;
import static java.util.Collections.emptyList;
import static java.util.Objects.requireNonNull;
public class PredicatePushDown
implements PlanOptimizer
{
private static final Logger log = Logger.get(PredicatePushDown.class);
private final Metadata metadata;
private final SqlParser sqlParser;
public PredicatePushDown(Metadata metadata, SqlParser sqlParser)
{
this.metadata = requireNonNull(metadata, "metadata is null");
this.sqlParser = requireNonNull(sqlParser, "sqlParser is null");
}
@Override
public PlanNode optimize(PlanNode plan, Session session, Map<Symbol, Type> types, SymbolAllocator symbolAllocator, PlanNodeIdAllocator idAllocator)
{
requireNonNull(plan, "plan is null");
requireNonNull(session, "session is null");
requireNonNull(types, "types is null");
requireNonNull(idAllocator, "idAllocator is null");
return SimplePlanRewriter.rewriteWith(new Rewriter(symbolAllocator, idAllocator, metadata, sqlParser, session, types), plan, BooleanLiteral.TRUE_LITERAL);
}
private static class Rewriter
extends SimplePlanRewriter<Expression>
{
private final SymbolAllocator symbolAllocator;
private final PlanNodeIdAllocator idAllocator;
private final Metadata metadata;
private final SqlParser sqlParser;
private final Session session;
private final Map<Symbol, Type> types;
private final ExpressionEquivalence expressionEquivalence;
private Rewriter(
SymbolAllocator symbolAllocator,
PlanNodeIdAllocator idAllocator,
Metadata metadata,
SqlParser sqlParser,
Session session,
Map<Symbol, Type> types)
{
this.symbolAllocator = requireNonNull(symbolAllocator, "symbolAllocator is null");
this.idAllocator = requireNonNull(idAllocator, "idAllocator is null");
this.metadata = requireNonNull(metadata, "metadata is null");
this.sqlParser = requireNonNull(sqlParser, "sqlParser is null");
this.session = requireNonNull(session, "session is null");
this.types = requireNonNull(types, "types is null");
this.expressionEquivalence = new ExpressionEquivalence(metadata, sqlParser);
}
@Override
public PlanNode visitPlan(PlanNode node, RewriteContext<Expression> context)
{
PlanNode rewrittenNode = context.defaultRewrite(node, BooleanLiteral.TRUE_LITERAL);
if (!context.get().equals(BooleanLiteral.TRUE_LITERAL)) {
// Drop in a FilterNode b/c we cannot push our predicate down any further
rewrittenNode = new FilterNode(idAllocator.getNextId(), rewrittenNode, context.get());
}
return rewrittenNode;
}
@Override
public PlanNode visitExchange(ExchangeNode node, RewriteContext<Expression> context)
{
boolean modified = false;
ImmutableList.Builder<PlanNode> builder = ImmutableList.builder();
for (int i = 0; i < node.getSources().size(); i++) {
Map<Symbol, SymbolReference> outputsToInputs = new HashMap<>();
for (int index = 0; index < node.getInputs().get(i).size(); index++) {
outputsToInputs.put(
node.getOutputSymbols().get(index),
node.getInputs().get(i).get(index).toSymbolReference());
}
Expression sourcePredicate = ExpressionTreeRewriter.rewriteWith(new ExpressionSymbolInliner(outputsToInputs), context.get());
PlanNode source = node.getSources().get(i);
PlanNode rewrittenSource = context.rewrite(source, sourcePredicate);
if (rewrittenSource != source) {
modified = true;
}
builder.add(rewrittenSource);
}
if (modified) {
return new ExchangeNode(
node.getId(),
node.getType(),
node.getScope(),
node.getPartitioningScheme(),
builder.build(),
node.getInputs());
}
return node;
}
@Override
public PlanNode visitProject(ProjectNode node, RewriteContext<Expression> context)
{
Set<Symbol> deterministicSymbols = node.getAssignments().entrySet().stream()
.filter(entry -> DeterminismEvaluator.isDeterministic(entry.getValue()))
.map(Map.Entry::getKey)
.collect(Collectors.toSet());
Predicate<Expression> deterministic = conjunct -> DependencyExtractor.extractUnique(conjunct).stream()
.allMatch(deterministicSymbols::contains);
Map<Boolean, List<Expression>> conjuncts = extractConjuncts(context.get()).stream().collect(Collectors.partitioningBy(deterministic));
// Push down conjuncts from the inherited predicate that don't depend on non-deterministic assignments
PlanNode rewrittenNode = context.defaultRewrite(node,
ExpressionTreeRewriter.rewriteWith(new ExpressionSymbolInliner(node.getAssignments().getMap()), combineConjuncts(conjuncts.get(true))));
// All non-deterministic conjuncts, if any, will be in the filter node.
if (!conjuncts.get(false).isEmpty()) {
rewrittenNode = new FilterNode(idAllocator.getNextId(), rewrittenNode, combineConjuncts(conjuncts.get(false)));
}
return rewrittenNode;
}
@Override
public PlanNode visitGroupId(GroupIdNode node, RewriteContext<Expression> context)
{
checkState(!DependencyExtractor.extractUnique(context.get()).contains(node.getGroupIdSymbol()), "groupId symbol cannot be referenced in predicate");
Map<Symbol, SymbolReference> commonGroupingSymbolMapping = node.getGroupingSetMappings().entrySet().stream()
.filter(entry -> node.getCommonGroupingColumns().contains(entry.getKey()))
.collect(Collectors.toMap(Map.Entry::getKey, entry -> entry.getValue().toSymbolReference()));
Predicate<Expression> pushdownEligiblePredicate = conjunct -> DependencyExtractor.extractUnique(conjunct).stream()
.allMatch(commonGroupingSymbolMapping.keySet()::contains);
Map<Boolean, List<Expression>> conjuncts = extractConjuncts(context.get()).stream().collect(Collectors.partitioningBy(pushdownEligiblePredicate));
// Push down conjuncts from the inherited predicate that apply to common grouping symbols
PlanNode rewrittenNode = context.defaultRewrite(node,
ExpressionTreeRewriter.rewriteWith(new ExpressionSymbolInliner(commonGroupingSymbolMapping), combineConjuncts(conjuncts.get(true))));
// All other conjuncts, if any, will be in the filter node.
if (!conjuncts.get(false).isEmpty()) {
rewrittenNode = new FilterNode(idAllocator.getNextId(), rewrittenNode, combineConjuncts(conjuncts.get(false)));
}
return rewrittenNode;
}
@Override
public PlanNode visitMarkDistinct(MarkDistinctNode node, RewriteContext<Expression> context)
{
checkState(!DependencyExtractor.extractUnique(context.get()).contains(node.getMarkerSymbol()), "predicate depends on marker symbol");
return context.defaultRewrite(node, context.get());
}
@Override
public PlanNode visitSort(SortNode node, RewriteContext<Expression> context)
{
return context.defaultRewrite(node, context.get());
}
@Override
public PlanNode visitUnion(UnionNode node, RewriteContext<Expression> context)
{
boolean modified = false;
ImmutableList.Builder<PlanNode> builder = ImmutableList.builder();
for (int i = 0; i < node.getSources().size(); i++) {
Expression sourcePredicate = ExpressionTreeRewriter.rewriteWith(new ExpressionSymbolInliner(node.sourceSymbolMap(i)), context.get());
PlanNode source = node.getSources().get(i);
PlanNode rewrittenSource = context.rewrite(source, sourcePredicate);
if (rewrittenSource != source) {
modified = true;
}
builder.add(rewrittenSource);
}
if (modified) {
return new UnionNode(node.getId(), builder.build(), node.getSymbolMapping(), node.getOutputSymbols());
}
return node;
}
@Deprecated
@Override
public PlanNode visitFilter(FilterNode node, RewriteContext<Expression> context)
{
return context.rewrite(node.getSource(), combineConjuncts(node.getPredicate(), context.get()));
}
@Override
public PlanNode visitJoin(JoinNode node, RewriteContext<Expression> context)
{
Expression inheritedPredicate = context.get();
// See if we can rewrite outer joins in terms of a plain inner join
node = tryNormalizeToOuterToInnerJoin(node, inheritedPredicate);
Expression leftEffectivePredicate = EffectivePredicateExtractor.extract(node.getLeft(), symbolAllocator.getTypes());
Expression rightEffectivePredicate = EffectivePredicateExtractor.extract(node.getRight(), symbolAllocator.getTypes());
Expression joinPredicate = extractJoinPredicate(node);
Expression leftPredicate;
Expression rightPredicate;
Expression postJoinPredicate;
Expression newJoinPredicate;
switch (node.getType()) {
case INNER:
InnerJoinPushDownResult innerJoinPushDownResult = processInnerJoin(inheritedPredicate,
leftEffectivePredicate,
rightEffectivePredicate,
joinPredicate,
node.getLeft().getOutputSymbols());
leftPredicate = innerJoinPushDownResult.getLeftPredicate();
rightPredicate = innerJoinPushDownResult.getRightPredicate();
postJoinPredicate = innerJoinPushDownResult.getPostJoinPredicate();
newJoinPredicate = innerJoinPushDownResult.getJoinPredicate();
break;
case LEFT:
OuterJoinPushDownResult leftOuterJoinPushDownResult = processLimitedOuterJoin(inheritedPredicate,
leftEffectivePredicate,
rightEffectivePredicate,
joinPredicate,
node.getLeft().getOutputSymbols());
leftPredicate = leftOuterJoinPushDownResult.getOuterJoinPredicate();
rightPredicate = leftOuterJoinPushDownResult.getInnerJoinPredicate();
postJoinPredicate = leftOuterJoinPushDownResult.getPostJoinPredicate();
newJoinPredicate = leftOuterJoinPushDownResult.getJoinPredicate();
break;
case RIGHT:
OuterJoinPushDownResult rightOuterJoinPushDownResult = processLimitedOuterJoin(inheritedPredicate,
rightEffectivePredicate,
leftEffectivePredicate,
joinPredicate,
node.getRight().getOutputSymbols());
leftPredicate = rightOuterJoinPushDownResult.getInnerJoinPredicate();
rightPredicate = rightOuterJoinPushDownResult.getOuterJoinPredicate();
postJoinPredicate = rightOuterJoinPushDownResult.getPostJoinPredicate();
newJoinPredicate = rightOuterJoinPushDownResult.getJoinPredicate();
break;
case FULL:
leftPredicate = BooleanLiteral.TRUE_LITERAL;
rightPredicate = BooleanLiteral.TRUE_LITERAL;
postJoinPredicate = inheritedPredicate;
newJoinPredicate = joinPredicate;
break;
default:
throw new UnsupportedOperationException("Unsupported join type: " + node.getType());
}
newJoinPredicate = simplifyExpression(newJoinPredicate);
// TODO: find a better way to directly optimize FALSE LITERAL in join predicate
if (newJoinPredicate.equals(BooleanLiteral.FALSE_LITERAL)) {
newJoinPredicate = new ComparisonExpression(ComparisonExpressionType.EQUAL, new LongLiteral("0"), new LongLiteral("1"));
}
PlanNode leftSource = context.rewrite(node.getLeft(), leftPredicate);
PlanNode rightSource = context.rewrite(node.getRight(), rightPredicate);
PlanNode output = node;
if (leftSource != node.getLeft() ||
rightSource != node.getRight() ||
!expressionEquivalence.areExpressionsEquivalent(session, newJoinPredicate, joinPredicate, types) ||
node.getCriteria().isEmpty()) {
// Create identity projections for all existing symbols
Assignments.Builder leftProjections = Assignments.builder();
leftProjections.putAll(node.getLeft()
.getOutputSymbols().stream()
.collect(Collectors.toMap(key -> key, Symbol::toSymbolReference)));
Assignments.Builder rightProjections = Assignments.builder();
rightProjections.putAll(node.getRight()
.getOutputSymbols().stream()
.collect(Collectors.toMap(key -> key, Symbol::toSymbolReference)));
// Create new projections for the new join clauses
ImmutableList.Builder<JoinNode.EquiJoinClause> joinConditionBuilder = ImmutableList.builder();
ImmutableList.Builder<Expression> joinFilterBuilder = ImmutableList.builder();
for (Expression conjunct : extractConjuncts(newJoinPredicate)) {
if (joinEqualityExpression(node.getLeft().getOutputSymbols()).test(conjunct)) {
ComparisonExpression equality = (ComparisonExpression) conjunct;
boolean alignedComparison = Iterables.all(DependencyExtractor.extractUnique(equality.getLeft()), in(node.getLeft().getOutputSymbols()));
Expression leftExpression = (alignedComparison) ? equality.getLeft() : equality.getRight();
Expression rightExpression = (alignedComparison) ? equality.getRight() : equality.getLeft();
Symbol leftSymbol = symbolForExpression(leftExpression);
if (!node.getLeft().getOutputSymbols().contains(leftSymbol)) {
leftProjections.put(leftSymbol, leftExpression);
}
Symbol rightSymbol = symbolForExpression(rightExpression);
if (!node.getRight().getOutputSymbols().contains(rightSymbol)) {
rightProjections.put(rightSymbol, rightExpression);
}
joinConditionBuilder.add(new JoinNode.EquiJoinClause(leftSymbol, rightSymbol));
}
else {
joinFilterBuilder.add(conjunct);
}
}
Optional<Expression> newJoinFilter = Optional.of(combineConjuncts(joinFilterBuilder.build()));
if (newJoinFilter.get() == BooleanLiteral.TRUE_LITERAL) {
newJoinFilter = Optional.empty();
}
leftSource = new ProjectNode(idAllocator.getNextId(), leftSource, leftProjections.build());
rightSource = new ProjectNode(idAllocator.getNextId(), rightSource, rightProjections.build());
output = createJoinNodeWithExpectedOutputs(
node.getOutputSymbols(), idAllocator,
node.getType(),
leftSource,
rightSource,
newJoinFilter,
joinConditionBuilder.build(),
node.getLeftHashSymbol(),
node.getRightHashSymbol());
}
if (!postJoinPredicate.equals(BooleanLiteral.TRUE_LITERAL)) {
output = new FilterNode(idAllocator.getNextId(), output, postJoinPredicate);
}
return output;
}
private Symbol symbolForExpression(Expression expression)
{
if (expression instanceof SymbolReference) {
return Symbol.from(expression);
}
return symbolAllocator.newSymbol(expression, extractType(expression));
}
private static PlanNode createJoinNodeWithExpectedOutputs(
List<Symbol> expectedOutputs,
PlanNodeIdAllocator idAllocator,
JoinNode.Type type,
PlanNode left,
PlanNode right,
Optional<Expression> filter,
List<JoinNode.EquiJoinClause> conditions,
Optional<Symbol> leftHashSymbol,
Optional<Symbol> rightHashSymbol)
{
// TODO: this should be removed once join nodes with output column pruning is supported for cross join
if (conditions.isEmpty() && !filter.isPresent()) {
PlanNode output = new JoinNode(
idAllocator.getNextId(),
type,
left,
right,
conditions,
ImmutableList.<Symbol>builder()
.addAll(left.getOutputSymbols())
.addAll(right.getOutputSymbols())
.build(),
filter,
leftHashSymbol,
rightHashSymbol);
if (!output.getOutputSymbols().equals(expectedOutputs)) {
// Introduce a projection to constrain the outputs to what was originally expected
// Some nodes are sensitive to what's produced (e.g., DistinctLimit node)
output = new ProjectNode(
idAllocator.getNextId(),
output,
Assignments.identity(expectedOutputs));
}
return output;
}
else {
return new JoinNode(idAllocator.getNextId(), type, left, right, conditions, expectedOutputs, filter, leftHashSymbol, rightHashSymbol);
}
}
private static OuterJoinPushDownResult processLimitedOuterJoin(Expression inheritedPredicate, Expression outerEffectivePredicate, Expression innerEffectivePredicate, Expression joinPredicate, Collection<Symbol> outerSymbols)
{
checkArgument(Iterables.all(DependencyExtractor.extractUnique(outerEffectivePredicate), in(outerSymbols)), "outerEffectivePredicate must only contain symbols from outerSymbols");
checkArgument(Iterables.all(DependencyExtractor.extractUnique(innerEffectivePredicate), not(in(outerSymbols))), "innerEffectivePredicate must not contain symbols from outerSymbols");
ImmutableList.Builder<Expression> outerPushdownConjuncts = ImmutableList.builder();
ImmutableList.Builder<Expression> innerPushdownConjuncts = ImmutableList.builder();
ImmutableList.Builder<Expression> postJoinConjuncts = ImmutableList.builder();
ImmutableList.Builder<Expression> joinConjuncts = ImmutableList.builder();
// Strip out non-deterministic conjuncts
postJoinConjuncts.addAll(filter(extractConjuncts(inheritedPredicate), not(DeterminismEvaluator::isDeterministic)));
inheritedPredicate = stripNonDeterministicConjuncts(inheritedPredicate);
outerEffectivePredicate = stripNonDeterministicConjuncts(outerEffectivePredicate);
innerEffectivePredicate = stripNonDeterministicConjuncts(innerEffectivePredicate);
joinConjuncts.addAll(filter(extractConjuncts(joinPredicate), not(DeterminismEvaluator::isDeterministic)));
joinPredicate = stripNonDeterministicConjuncts(joinPredicate);
// Generate equality inferences
EqualityInference inheritedInference = createEqualityInference(inheritedPredicate);
EqualityInference outerInference = createEqualityInference(inheritedPredicate, outerEffectivePredicate);
EqualityInference.EqualityPartition equalityPartition = inheritedInference.generateEqualitiesPartitionedBy(in(outerSymbols));
Expression outerOnlyInheritedEqualities = combineConjuncts(equalityPartition.getScopeEqualities());
EqualityInference potentialNullSymbolInference = createEqualityInference(outerOnlyInheritedEqualities, outerEffectivePredicate, innerEffectivePredicate, joinPredicate);
// See if we can push inherited predicates down
for (Expression conjunct : EqualityInference.nonInferrableConjuncts(inheritedPredicate)) {
Expression outerRewritten = outerInference.rewriteExpression(conjunct, in(outerSymbols));
if (outerRewritten != null) {
outerPushdownConjuncts.add(outerRewritten);
// A conjunct can only be pushed down into an inner side if it can be rewritten in terms of the outer side
Expression innerRewritten = potentialNullSymbolInference.rewriteExpression(outerRewritten, not(in(outerSymbols)));
if (innerRewritten != null) {
innerPushdownConjuncts.add(innerRewritten);
}
}
else {
postJoinConjuncts.add(conjunct);
}
}
// Add the equalities from the inferences back in
outerPushdownConjuncts.addAll(equalityPartition.getScopeEqualities());
postJoinConjuncts.addAll(equalityPartition.getScopeComplementEqualities());
postJoinConjuncts.addAll(equalityPartition.getScopeStraddlingEqualities());
// See if we can push down any outer effective predicates to the inner side
for (Expression conjunct : EqualityInference.nonInferrableConjuncts(outerEffectivePredicate)) {
Expression rewritten = potentialNullSymbolInference.rewriteExpression(conjunct, not(in(outerSymbols)));
if (rewritten != null) {
innerPushdownConjuncts.add(rewritten);
}
}
// See if we can push down join predicates to the inner side
for (Expression conjunct : EqualityInference.nonInferrableConjuncts(joinPredicate)) {
Expression innerRewritten = potentialNullSymbolInference.rewriteExpression(conjunct, not(in(outerSymbols)));
if (innerRewritten != null) {
innerPushdownConjuncts.add(innerRewritten);
}
else {
joinConjuncts.add(conjunct);
}
}
// Push outer and join equalities into the inner side. For example:
// SELECT * FROM nation LEFT OUTER JOIN region ON nation.regionkey = region.regionkey and nation.name = region.name WHERE nation.name = 'blah'
EqualityInference potentialNullSymbolInferenceWithoutInnerInferred = createEqualityInference(outerOnlyInheritedEqualities, outerEffectivePredicate, joinPredicate);
innerPushdownConjuncts.addAll(potentialNullSymbolInferenceWithoutInnerInferred.generateEqualitiesPartitionedBy(not(in(outerSymbols))).getScopeEqualities());
// TODO: we can further improve simplifying the equalities by considering other relationships from the outer side
EqualityInference.EqualityPartition joinEqualityPartition = createEqualityInference(joinPredicate).generateEqualitiesPartitionedBy(not(in(outerSymbols)));
innerPushdownConjuncts.addAll(joinEqualityPartition.getScopeEqualities());
joinConjuncts.addAll(joinEqualityPartition.getScopeComplementEqualities())
.addAll(joinEqualityPartition.getScopeStraddlingEqualities());
return new OuterJoinPushDownResult(combineConjuncts(outerPushdownConjuncts.build()),
combineConjuncts(innerPushdownConjuncts.build()),
combineConjuncts(joinConjuncts.build()),
combineConjuncts(postJoinConjuncts.build()));
}
private static class OuterJoinPushDownResult
{
private final Expression outerJoinPredicate;
private final Expression innerJoinPredicate;
private final Expression joinPredicate;
private final Expression postJoinPredicate;
private OuterJoinPushDownResult(Expression outerJoinPredicate, Expression innerJoinPredicate, Expression joinPredicate, Expression postJoinPredicate)
{
this.outerJoinPredicate = outerJoinPredicate;
this.innerJoinPredicate = innerJoinPredicate;
this.joinPredicate = joinPredicate;
this.postJoinPredicate = postJoinPredicate;
}
private Expression getOuterJoinPredicate()
{
return outerJoinPredicate;
}
private Expression getInnerJoinPredicate()
{
return innerJoinPredicate;
}
public Expression getJoinPredicate()
{
return joinPredicate;
}
private Expression getPostJoinPredicate()
{
return postJoinPredicate;
}
}
private static InnerJoinPushDownResult processInnerJoin(Expression inheritedPredicate, Expression leftEffectivePredicate, Expression rightEffectivePredicate, Expression joinPredicate, Collection<Symbol> leftSymbols)
{
checkArgument(Iterables.all(DependencyExtractor.extractUnique(leftEffectivePredicate), in(leftSymbols)), "leftEffectivePredicate must only contain symbols from leftSymbols");
checkArgument(Iterables.all(DependencyExtractor.extractUnique(rightEffectivePredicate), not(in(leftSymbols))), "rightEffectivePredicate must not contain symbols from leftSymbols");
ImmutableList.Builder<Expression> leftPushDownConjuncts = ImmutableList.builder();
ImmutableList.Builder<Expression> rightPushDownConjuncts = ImmutableList.builder();
ImmutableList.Builder<Expression> joinConjuncts = ImmutableList.builder();
// Strip out non-deterministic conjuncts
joinConjuncts.addAll(filter(extractConjuncts(inheritedPredicate), not(DeterminismEvaluator::isDeterministic)));
inheritedPredicate = stripNonDeterministicConjuncts(inheritedPredicate);
joinConjuncts.addAll(filter(extractConjuncts(joinPredicate), not(DeterminismEvaluator::isDeterministic)));
joinPredicate = stripNonDeterministicConjuncts(joinPredicate);
leftEffectivePredicate = stripNonDeterministicConjuncts(leftEffectivePredicate);
rightEffectivePredicate = stripNonDeterministicConjuncts(rightEffectivePredicate);
// Generate equality inferences
EqualityInference allInference = createEqualityInference(inheritedPredicate, leftEffectivePredicate, rightEffectivePredicate, joinPredicate);
EqualityInference allInferenceWithoutLeftInferred = createEqualityInference(inheritedPredicate, rightEffectivePredicate, joinPredicate);
EqualityInference allInferenceWithoutRightInferred = createEqualityInference(inheritedPredicate, leftEffectivePredicate, joinPredicate);
// Sort through conjuncts in inheritedPredicate that were not used for inference
for (Expression conjunct : EqualityInference.nonInferrableConjuncts(inheritedPredicate)) {
Expression leftRewrittenConjunct = allInference.rewriteExpression(conjunct, in(leftSymbols));
if (leftRewrittenConjunct != null) {
leftPushDownConjuncts.add(leftRewrittenConjunct);
}
Expression rightRewrittenConjunct = allInference.rewriteExpression(conjunct, not(in(leftSymbols)));
if (rightRewrittenConjunct != null) {
rightPushDownConjuncts.add(rightRewrittenConjunct);
}
// Drop predicate after join only if unable to push down to either side
if (leftRewrittenConjunct == null && rightRewrittenConjunct == null) {
joinConjuncts.add(conjunct);
}
}
// See if we can push the right effective predicate to the left side
for (Expression conjunct : EqualityInference.nonInferrableConjuncts(rightEffectivePredicate)) {
Expression rewritten = allInference.rewriteExpression(conjunct, in(leftSymbols));
if (rewritten != null) {
leftPushDownConjuncts.add(rewritten);
}
}
// See if we can push the left effective predicate to the right side
for (Expression conjunct : EqualityInference.nonInferrableConjuncts(leftEffectivePredicate)) {
Expression rewritten = allInference.rewriteExpression(conjunct, not(in(leftSymbols)));
if (rewritten != null) {
rightPushDownConjuncts.add(rewritten);
}
}
// See if we can push any parts of the join predicates to either side
for (Expression conjunct : EqualityInference.nonInferrableConjuncts(joinPredicate)) {
Expression leftRewritten = allInference.rewriteExpression(conjunct, in(leftSymbols));
if (leftRewritten != null) {
leftPushDownConjuncts.add(leftRewritten);
}
Expression rightRewritten = allInference.rewriteExpression(conjunct, not(in(leftSymbols)));
if (rightRewritten != null) {
rightPushDownConjuncts.add(rightRewritten);
}
if (leftRewritten == null && rightRewritten == null) {
joinConjuncts.add(conjunct);
}
}
// Add equalities from the inference back in
leftPushDownConjuncts.addAll(allInferenceWithoutLeftInferred.generateEqualitiesPartitionedBy(in(leftSymbols)).getScopeEqualities());
rightPushDownConjuncts.addAll(allInferenceWithoutRightInferred.generateEqualitiesPartitionedBy(not(in(leftSymbols))).getScopeEqualities());
joinConjuncts.addAll(allInference.generateEqualitiesPartitionedBy(in(leftSymbols)).getScopeStraddlingEqualities()); // scope straddling equalities get dropped in as part of the join predicate
return new InnerJoinPushDownResult(combineConjuncts(leftPushDownConjuncts.build()), combineConjuncts(rightPushDownConjuncts.build()), combineConjuncts(joinConjuncts.build()), BooleanLiteral.TRUE_LITERAL);
}
private static class InnerJoinPushDownResult
{
private final Expression leftPredicate;
private final Expression rightPredicate;
private final Expression joinPredicate;
private final Expression postJoinPredicate;
private InnerJoinPushDownResult(Expression leftPredicate, Expression rightPredicate, Expression joinPredicate, Expression postJoinPredicate)
{
this.leftPredicate = leftPredicate;
this.rightPredicate = rightPredicate;
this.joinPredicate = joinPredicate;
this.postJoinPredicate = postJoinPredicate;
}
private Expression getLeftPredicate()
{
return leftPredicate;
}
private Expression getRightPredicate()
{
return rightPredicate;
}
private Expression getJoinPredicate()
{
return joinPredicate;
}
private Expression getPostJoinPredicate()
{
return postJoinPredicate;
}
}
private static Expression extractJoinPredicate(JoinNode joinNode)
{
ImmutableList.Builder<Expression> builder = ImmutableList.builder();
for (JoinNode.EquiJoinClause equiJoinClause : joinNode.getCriteria()) {
builder.add(equalsExpression(equiJoinClause.getLeft(), equiJoinClause.getRight()));
}
joinNode.getFilter().ifPresent(builder::add);
return combineConjuncts(builder.build());
}
private static Expression equalsExpression(Symbol symbol1, Symbol symbol2)
{
return new ComparisonExpression(ComparisonExpressionType.EQUAL,
symbol1.toSymbolReference(),
symbol2.toSymbolReference());
}
private Type extractType(Expression expression)
{
return getExpressionTypes(session, metadata, sqlParser, symbolAllocator.getTypes(), expression, emptyList() /* parameters have already been replaced */).get(expression);
}
private JoinNode tryNormalizeToOuterToInnerJoin(JoinNode node, Expression inheritedPredicate)
{
checkArgument(EnumSet.of(INNER, RIGHT, LEFT, FULL).contains(node.getType()), "Unsupported join type: %s", node.getType());
if (node.getType() == JoinNode.Type.INNER) {
return node;
}
if (node.getType() == JoinNode.Type.FULL) {
boolean canConvertToLeftJoin = canConvertOuterToInner(node.getLeft().getOutputSymbols(), inheritedPredicate);
boolean canConvertToRightJoin = canConvertOuterToInner(node.getRight().getOutputSymbols(), inheritedPredicate);
if (!canConvertToLeftJoin && !canConvertToRightJoin) {
return node;
}
if (canConvertToLeftJoin && canConvertToRightJoin) {
return new JoinNode(node.getId(), INNER, node.getLeft(), node.getRight(), node.getCriteria(), node.getOutputSymbols(), node.getFilter(), node.getLeftHashSymbol(), node.getRightHashSymbol());
}
else {
return new JoinNode(node.getId(), canConvertToLeftJoin ? LEFT : RIGHT,
node.getLeft(), node.getRight(), node.getCriteria(), node.getOutputSymbols(), node.getFilter(), node.getLeftHashSymbol(), node.getRightHashSymbol());
}
}
if (node.getType() == JoinNode.Type.LEFT && !canConvertOuterToInner(node.getRight().getOutputSymbols(), inheritedPredicate) ||
node.getType() == JoinNode.Type.RIGHT && !canConvertOuterToInner(node.getLeft().getOutputSymbols(), inheritedPredicate)) {
return node;
}
return new JoinNode(node.getId(), JoinNode.Type.INNER, node.getLeft(), node.getRight(), node.getCriteria(), node.getOutputSymbols(), node.getFilter(), node.getLeftHashSymbol(), node.getRightHashSymbol());
}
private boolean canConvertOuterToInner(List<Symbol> innerSymbolsForOuterJoin, Expression inheritedPredicate)
{
Set<Symbol> innerSymbols = ImmutableSet.copyOf(innerSymbolsForOuterJoin);
for (Expression conjunct : extractConjuncts(inheritedPredicate)) {
if (DeterminismEvaluator.isDeterministic(conjunct)) {
// Ignore a conjunct for this test if we can not deterministically get responses from it
Object response = nullInputEvaluator(innerSymbols, conjunct);
if (response == null || response instanceof NullLiteral || Boolean.FALSE.equals(response)) {
// If there is a single conjunct that returns FALSE or NULL given all NULL inputs for the inner side symbols of an outer join
// then this conjunct removes all effects of the outer join, and effectively turns this into an equivalent of an inner join.
// So, let's just rewrite this join as an INNER join
return true;
}
}
}
return false;
}
// Temporary implementation for joins because the SimplifyExpressions optimizers can not run properly on join clauses
private Expression simplifyExpression(Expression expression)
{
IdentityLinkedHashMap<Expression, Type> expressionTypes = getExpressionTypes(
session,
metadata,
sqlParser,
symbolAllocator.getTypes(),
expression,
emptyList() /* parameters have already been replaced */);
ExpressionInterpreter optimizer = ExpressionInterpreter.expressionOptimizer(expression, metadata, session, expressionTypes);
return LiteralInterpreter.toExpression(optimizer.optimize(NoOpSymbolResolver.INSTANCE), expressionTypes.get(expression));
}
/**
* Evaluates an expression's response to binding the specified input symbols to NULL
*/
private Object nullInputEvaluator(final Collection<Symbol> nullSymbols, Expression expression)
{
IdentityLinkedHashMap<Expression, Type> expressionTypes = getExpressionTypes(
session,
metadata,
sqlParser,
symbolAllocator.getTypes(),
expression,
emptyList() /* parameters have already been replaced */);
return ExpressionInterpreter.expressionOptimizer(expression, metadata, session, expressionTypes)
.optimize(symbol -> nullSymbols.contains(symbol) ? null : symbol.toSymbolReference());
}
private static Predicate<Expression> joinEqualityExpression(final Collection<Symbol> leftSymbols)
{
return expression -> {
// At this point in time, our join predicates need to be deterministic
if (isDeterministic(expression) && expression instanceof ComparisonExpression) {
ComparisonExpression comparison = (ComparisonExpression) expression;
if (comparison.getType() == ComparisonExpressionType.EQUAL) {
Set<Symbol> symbols1 = DependencyExtractor.extractUnique(comparison.getLeft());
Set<Symbol> symbols2 = DependencyExtractor.extractUnique(comparison.getRight());
if (symbols1.isEmpty() || symbols2.isEmpty()) {
return false;
}
return (Iterables.all(symbols1, in(leftSymbols)) && Iterables.all(symbols2, not(in(leftSymbols)))) ||
(Iterables.all(symbols2, in(leftSymbols)) && Iterables.all(symbols1, not(in(leftSymbols))));
}
}
return false;
};
}
@Override
public PlanNode visitSemiJoin(SemiJoinNode node, RewriteContext<Expression> context)
{
Expression inheritedPredicate = context.get();
Expression sourceEffectivePredicate = EffectivePredicateExtractor.extract(node.getSource(), symbolAllocator.getTypes());
List<Expression> sourceConjuncts = new ArrayList<>();
List<Expression> filteringSourceConjuncts = new ArrayList<>();
List<Expression> postJoinConjuncts = new ArrayList<>();
// TODO: see if there are predicates that can be inferred from the semi join output
// Push inherited and source predicates to filtering source via a contrived join predicate (but needs to avoid touching NULL values in the filtering source)
Expression joinPredicate = equalsExpression(node.getSourceJoinSymbol(), node.getFilteringSourceJoinSymbol());
EqualityInference joinInference = createEqualityInference(inheritedPredicate, sourceEffectivePredicate, joinPredicate);
for (Expression conjunct : Iterables.concat(EqualityInference.nonInferrableConjuncts(inheritedPredicate), EqualityInference.nonInferrableConjuncts(sourceEffectivePredicate))) {
Expression rewrittenConjunct = joinInference.rewriteExpression(conjunct, equalTo(node.getFilteringSourceJoinSymbol()));
if (rewrittenConjunct != null && DeterminismEvaluator.isDeterministic(rewrittenConjunct)) {
// Alter conjunct to include an OR filteringSourceJoinSymbol IS NULL disjunct
Expression rewrittenConjunctOrNull = expressionOrNullSymbols(Predicate.isEqual(node.getFilteringSourceJoinSymbol())).apply(rewrittenConjunct);
filteringSourceConjuncts.add(rewrittenConjunctOrNull);
}
}
EqualityInference.EqualityPartition joinInferenceEqualityPartition = joinInference.generateEqualitiesPartitionedBy(equalTo(node.getFilteringSourceJoinSymbol()));
filteringSourceConjuncts.addAll(joinInferenceEqualityPartition.getScopeEqualities().stream()
.map(expressionOrNullSymbols(Predicate.isEqual(node.getFilteringSourceJoinSymbol())))
.collect(Collectors.toList()));
// Push inheritedPredicates down to the source if they don't involve the semi join output
EqualityInference inheritedInference = createEqualityInference(inheritedPredicate);
for (Expression conjunct : EqualityInference.nonInferrableConjuncts(inheritedPredicate)) {
Expression rewrittenConjunct = inheritedInference.rewriteExpression(conjunct, in(node.getSource().getOutputSymbols()));
// Since each source row is reflected exactly once in the output, ok to push non-deterministic predicates down
if (rewrittenConjunct != null) {
sourceConjuncts.add(rewrittenConjunct);
}
else {
postJoinConjuncts.add(conjunct);
}
}
// Add the inherited equality predicates back in
EqualityInference.EqualityPartition equalityPartition = inheritedInference.generateEqualitiesPartitionedBy(in(node.getSource().getOutputSymbols()));
sourceConjuncts.addAll(equalityPartition.getScopeEqualities());
postJoinConjuncts.addAll(equalityPartition.getScopeComplementEqualities());
postJoinConjuncts.addAll(equalityPartition.getScopeStraddlingEqualities());
PlanNode rewrittenSource = context.rewrite(node.getSource(), combineConjuncts(sourceConjuncts));
PlanNode rewrittenFilteringSource = context.rewrite(node.getFilteringSource(), combineConjuncts(filteringSourceConjuncts));
PlanNode output = node;
if (rewrittenSource != node.getSource() || rewrittenFilteringSource != node.getFilteringSource()) {
output = new SemiJoinNode(node.getId(), rewrittenSource, rewrittenFilteringSource, node.getSourceJoinSymbol(), node.getFilteringSourceJoinSymbol(), node.getSemiJoinOutput(), node.getSourceHashSymbol(), node.getFilteringSourceHashSymbol());
}
if (!postJoinConjuncts.isEmpty()) {
output = new FilterNode(idAllocator.getNextId(), output, combineConjuncts(postJoinConjuncts));
}
return output;
}
@Override
public PlanNode visitAggregation(AggregationNode node, RewriteContext<Expression> context)
{
if (node.getGroupingKeys().isEmpty()) {
// cannot push predicates down through aggregations without any grouping columns
return visitPlan(node, context);
}
Expression inheritedPredicate = context.get();
EqualityInference equalityInference = createEqualityInference(inheritedPredicate);
List<Expression> pushdownConjuncts = new ArrayList<>();
List<Expression> postAggregationConjuncts = new ArrayList<>();
// Strip out non-deterministic conjuncts
postAggregationConjuncts.addAll(ImmutableList.copyOf(filter(extractConjuncts(inheritedPredicate), not(DeterminismEvaluator::isDeterministic))));
inheritedPredicate = stripNonDeterministicConjuncts(inheritedPredicate);
// Sort non-equality predicates by those that can be pushed down and those that cannot
for (Expression conjunct : EqualityInference.nonInferrableConjuncts(inheritedPredicate)) {
Expression rewrittenConjunct = equalityInference.rewriteExpression(conjunct, in(node.getGroupingKeys()));
if (rewrittenConjunct != null) {
pushdownConjuncts.add(rewrittenConjunct);
}
else {
postAggregationConjuncts.add(conjunct);
}
}
// Add the equality predicates back in
EqualityInference.EqualityPartition equalityPartition = equalityInference.generateEqualitiesPartitionedBy(in(node.getGroupingKeys()));
pushdownConjuncts.addAll(equalityPartition.getScopeEqualities());
postAggregationConjuncts.addAll(equalityPartition.getScopeComplementEqualities());
postAggregationConjuncts.addAll(equalityPartition.getScopeStraddlingEqualities());
PlanNode rewrittenSource = context.rewrite(node.getSource(), combineConjuncts(pushdownConjuncts));
PlanNode output = node;
if (rewrittenSource != node.getSource()) {
output = new AggregationNode(node.getId(),
rewrittenSource,
node.getAggregations(),
node.getFunctions(),
node.getMasks(),
node.getGroupingSets(),
node.getStep(),
node.getHashSymbol(),
node.getGroupIdSymbol());
}
if (!postAggregationConjuncts.isEmpty()) {
output = new FilterNode(idAllocator.getNextId(), output, combineConjuncts(postAggregationConjuncts));
}
return output;
}
@Override
public PlanNode visitUnnest(UnnestNode node, RewriteContext<Expression> context)
{
Expression inheritedPredicate = context.get();
EqualityInference equalityInference = createEqualityInference(inheritedPredicate);
List<Expression> pushdownConjuncts = new ArrayList<>();
List<Expression> postUnnestConjuncts = new ArrayList<>();
// Strip out non-deterministic conjuncts
postUnnestConjuncts.addAll(ImmutableList.copyOf(filter(extractConjuncts(inheritedPredicate), not(DeterminismEvaluator::isDeterministic))));
inheritedPredicate = stripNonDeterministicConjuncts(inheritedPredicate);
// Sort non-equality predicates by those that can be pushed down and those that cannot
for (Expression conjunct : EqualityInference.nonInferrableConjuncts(inheritedPredicate)) {
Expression rewrittenConjunct = equalityInference.rewriteExpression(conjunct, in(node.getReplicateSymbols()));
if (rewrittenConjunct != null) {
pushdownConjuncts.add(rewrittenConjunct);
}
else {
postUnnestConjuncts.add(conjunct);
}
}
// Add the equality predicates back in
EqualityInference.EqualityPartition equalityPartition = equalityInference.generateEqualitiesPartitionedBy(in(node.getReplicateSymbols()));
pushdownConjuncts.addAll(equalityPartition.getScopeEqualities());
postUnnestConjuncts.addAll(equalityPartition.getScopeComplementEqualities());
postUnnestConjuncts.addAll(equalityPartition.getScopeStraddlingEqualities());
PlanNode rewrittenSource = context.rewrite(node.getSource(), combineConjuncts(pushdownConjuncts));
PlanNode output = node;
if (rewrittenSource != node.getSource()) {
output = new UnnestNode(node.getId(), rewrittenSource, node.getReplicateSymbols(), node.getUnnestSymbols(), node.getOrdinalitySymbol());
}
if (!postUnnestConjuncts.isEmpty()) {
output = new FilterNode(idAllocator.getNextId(), output, combineConjuncts(postUnnestConjuncts));
}
return output;
}
@Override
public PlanNode visitSample(SampleNode node, RewriteContext<Expression> context)
{
return context.defaultRewrite(node, context.get());
}
@Override
public PlanNode visitTableScan(TableScanNode node, RewriteContext<Expression> context)
{
Expression predicate = simplifyExpression(context.get());
if (!BooleanLiteral.TRUE_LITERAL.equals(predicate)) {
return new FilterNode(idAllocator.getNextId(), node, predicate);
}
return node;
}
@Override
public PlanNode visitAssignUniqueId(AssignUniqueId node, RewriteContext<Expression> context)
{
Set<Symbol> predicateSymbols = DependencyExtractor.extractUnique(context.get());
checkState(!predicateSymbols.contains(node.getIdColumn()), "UniqueId in predicate is not yet supported");
return context.defaultRewrite(node, context.get());
}
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.tez.dag.api;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceAudience.Private;
import org.apache.hadoop.classification.InterfaceAudience.Public;
import org.apache.hadoop.yarn.api.records.LocalResource;
import org.apache.hadoop.yarn.api.records.Resource;
import org.apache.tez.common.TezCommonUtils;
import org.apache.tez.dag.api.VertexGroup.GroupInfo;
import org.apache.tez.dag.api.TaskLocationHint;
import org.apache.tez.runtime.api.LogicalIOProcessor;
import com.google.common.base.Preconditions;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
/**
* Defines a vertex in the DAG. It represents the application logic that
* processes and transforms the input data to create the output data. The
* vertex represents the template from which tasks are created to execute
* the application in parallel across a distributed execution environment.
*/
@Public
public class Vertex {
private final String vertexName;
private final ProcessorDescriptor processorDescriptor;
private int parallelism;
private VertexLocationHint locationHint;
private Resource taskResource;
private final Map<String, LocalResource> taskLocalResources = new HashMap<String, LocalResource>();
private Map<String, String> taskEnvironment = new HashMap<String, String>();
private final List<RootInputLeafOutput<InputDescriptor, InputInitializerDescriptor>> additionalInputs
= new ArrayList<RootInputLeafOutput<InputDescriptor, InputInitializerDescriptor>>();
private final List<RootInputLeafOutput<OutputDescriptor, OutputCommitterDescriptor>> additionalOutputs
= new ArrayList<RootInputLeafOutput<OutputDescriptor, OutputCommitterDescriptor>>();
private VertexManagerPluginDescriptor vertexManagerPlugin;
private final List<Vertex> inputVertices = new ArrayList<Vertex>();
private final List<Vertex> outputVertices = new ArrayList<Vertex>();
private final List<Edge> inputEdges = new ArrayList<Edge>();
private final List<Edge> outputEdges = new ArrayList<Edge>();
private final Map<String, GroupInfo> groupInputs = Maps.newHashMap();
private final List<DataSourceDescriptor> dataSources = Lists.newLinkedList();
private final List<DataSinkDescriptor> dataSinks = Lists.newLinkedList();
private String taskLaunchCmdOpts = "";
@InterfaceAudience.Private
Vertex(String vertexName,
ProcessorDescriptor processorDescriptor,
int parallelism,
Resource taskResource) {
this(vertexName, processorDescriptor, parallelism, taskResource, false);
}
private Vertex(String vertexName, ProcessorDescriptor processorDescriptor, int parallelism) {
this(vertexName, processorDescriptor, parallelism, null, true);
}
private Vertex(String vertexName, ProcessorDescriptor processorDescriptor) {
this(vertexName, processorDescriptor, -1);
}
private Vertex(String vertexName,
ProcessorDescriptor processorDescriptor,
int parallelism,
Resource taskResource,
boolean allowIncomplete) {
this.vertexName = vertexName;
this.processorDescriptor = processorDescriptor;
this.parallelism = parallelism;
this.taskResource = taskResource;
if (parallelism < -1) {
throw new IllegalArgumentException(
"Parallelism should be -1 if determined by the AM"
+ ", otherwise should be >= 0");
}
if (!allowIncomplete && taskResource == null) {
throw new IllegalArgumentException("Resource cannot be null");
}
}
/**
* Create a new vertex with the given name.
*
* @param vertexName
* Name of the vertex
* @param processorDescriptor
* Description of the processor that is executed in every task of
* this vertex
* @param parallelism
* Number of tasks in this vertex. Set to -1 if this is going to be
* decided at runtime. Parallelism may change at runtime due to graph
* reconfigurations.
* @param taskResource
* Physical resources like memory/cpu thats used by each task of this
* vertex.
* @return a new Vertex with the given parameters
*/
public static Vertex create(String vertexName,
ProcessorDescriptor processorDescriptor,
int parallelism,
Resource taskResource) {
return new Vertex(vertexName, processorDescriptor, parallelism, taskResource);
}
/**
* Create a new vertex with the given name. <br>
* The vertex task resource will be picked from configuration <br>
* The vertex parallelism will be inferred. If it cannot be inferred then an
* error will be reported. This constructor may be used for vertices that have
* data sources, or connected via 1-1 edges or have runtime parallelism
* estimation via data source initializers or vertex managers. Calling this
* constructor is equivalent to calling
* {@link Vertex#Vertex(String, ProcessorDescriptor, int)} with the
* parallelism set to -1.
*
* @param vertexName
* Name of the vertex
* @param processorDescriptor
* Description of the processor that is executed in every task of
* this vertex
* @return a new Vertex with the given parameters
*/
public static Vertex create(String vertexName, ProcessorDescriptor processorDescriptor) {
return new Vertex(vertexName, processorDescriptor);
}
/**
* Create a new vertex with the given name and parallelism. <br>
* The vertex task resource will be picked from configuration
* {@link TezConfiguration#TEZ_TASK_RESOURCE_MEMORY_MB} &
* {@link TezConfiguration#TEZ_TASK_RESOURCE_CPU_VCORES} Applications that
* want more control over their task resource specification may create their
* own logic to determine task resources and use
* {@link Vertex#Vertex(String, ProcessorDescriptor, int, Resource)} to create
* the Vertex.
*
* @param vertexName
* Name of the vertex
* @param processorDescriptor
* Description of the processor that is executed in every task of
* this vertex
* @param parallelism
* Number of tasks in this vertex. Set to -1 if this is going to be
* decided at runtime. Parallelism may change at runtime due to graph
* reconfigurations.
* @return a new Vertex with the given parameters
*/
public static Vertex create(String vertexName, ProcessorDescriptor processorDescriptor,
int parallelism) {
return new Vertex(vertexName, processorDescriptor, parallelism);
}
/**
* Get the vertex name
* @return vertex name
*/
public String getName() {
return vertexName;
}
/**
* Get the vertex task processor descriptor
* @return process descriptor
*/
public ProcessorDescriptor getProcessorDescriptor() {
return this.processorDescriptor;
}
/**
* Get the specified number of tasks specified to run in this vertex. It may
* be -1 if the parallelism is defined at runtime. Parallelism may change at
* runtime
* @return vertex parallelism
*/
public int getParallelism() {
return parallelism;
}
/**
* Set the number of tasks for this vertex
* @param parallelism Parallelism for this vertex
*/
void setParallelism(int parallelism) {
this.parallelism = parallelism;
}
/**
* Get the resources for the vertex
* @return the physical resources like pcu/memory of each vertex task
*/
public Resource getTaskResource() {
return taskResource;
}
/**
* Specify location hints for the tasks of this vertex. Hints must be specified
* for all tasks as defined by the parallelism
* @param locationHint list of locations for each task in the vertex
* @return this Vertex
*/
public Vertex setLocationHint(VertexLocationHint locationHint) {
List<TaskLocationHint> locations = locationHint.getTaskLocationHints();
if (locations == null) {
return this;
}
Preconditions.checkArgument((locations.size() == parallelism),
"Locations array length must match the parallelism set for the vertex");
this.locationHint = locationHint;
return this;
}
// used internally to create parallelism location resource file
VertexLocationHint getLocationHint() {
return locationHint;
}
/**
* Set the files etc that must be provided to the tasks of this vertex
* @param localFiles
* files that must be available locally for each task. These files
* may be regular files, archives etc. as specified by the value
* elements of the map.
* @return this Vertex
*/
public Vertex addTaskLocalFiles(Map<String, LocalResource> localFiles) {
if (localFiles != null) {
TezCommonUtils.addAdditionalLocalResources(localFiles, taskLocalResources, "Vertex " + getName());
}
return this;
}
/**
* Get the files etc that must be provided by the tasks of this vertex
* @return local files of the vertex. Key is the file name.
*/
public Map<String, LocalResource> getTaskLocalFiles() {
return taskLocalResources;
}
/**
* Set the Key-Value pairs of environment variables for tasks of this vertex.
* This method should be used if different vertices need different env. Else,
* set environment for all vertices via Tezconfiguration#TEZ_TASK_LAUNCH_ENV
* @param environment
* @return this Vertex
*/
public Vertex setTaskEnvironment(Map<String, String> environment) {
Preconditions.checkArgument(environment != null);
this.taskEnvironment.putAll(environment);
return this;
}
/**
* Get the environment variables of the tasks
* @return environment variable map
*/
public Map<String, String> getTaskEnvironment() {
return taskEnvironment;
}
/**
* Set the command opts for tasks of this vertex. This method should be used
* when different vertices have different opts. Else, set the launch opts for '
* all vertices via Tezconfiguration#TEZ_TASK_LAUNCH_CMD_OPTS
* @param cmdOpts
* @return this Vertex
*/
public Vertex setTaskLaunchCmdOpts(String cmdOpts){
this.taskLaunchCmdOpts = cmdOpts;
return this;
}
/**
* Specifies an external data source for a Vertex. This is meant to be used
* when a Vertex reads Input directly from an external source </p>
*
* For vertices which read data generated by another vertex - use the
* {@link DAG addEdge} method.
*
* If a vertex needs to use data generated by another vertex in the DAG and
* also from an external source, a combination of this API and the DAG.addEdge
* API can be used. </p>
*
* Note: If more than one RootInput exists on a vertex, which generates events
* which need to be routed, or generates information to set parallelism, a
* custom vertex manager should be setup to handle this. Not using a custom
* vertex manager for such a scenario will lead to a runtime failure.
*
* @param inputName
* the name of the input. This will be used when accessing the input
* in the {@link LogicalIOProcessor}
* @param dataSourceDescriptor
* the @{link DataSourceDescriptor} for this input.
* @return this Vertex
*/
public Vertex addDataSource(String inputName, DataSourceDescriptor dataSourceDescriptor) {
additionalInputs
.add(new RootInputLeafOutput<InputDescriptor, InputInitializerDescriptor>(
inputName, dataSourceDescriptor.getInputDescriptor(),
dataSourceDescriptor.getInputInitializerDescriptor()));
this.dataSources.add(dataSourceDescriptor);
return this;
}
/**
* Specifies an external data sink for a Vertex. This is meant to be used when
* a Vertex writes Output directly to an external destination. </p>
*
* If an output of the vertex is meant to be consumed by another Vertex in the
* DAG - use the {@link DAG addEdge} method.
*
* If a vertex needs generate data to an external source as well as for
* another Vertex in the DAG, a combination of this API and the DAG.addEdge
* API can be used.
*
* @param outputName
* the name of the output. This will be used when accessing the
* output in the {@link LogicalIOProcessor}
* @param dataSinkDescriptor
* the {@link DataSinkDescriptor} for this output
* @return this Vertex
*/
public Vertex addDataSink(String outputName, DataSinkDescriptor dataSinkDescriptor) {
additionalOutputs
.add(new RootInputLeafOutput<OutputDescriptor, OutputCommitterDescriptor>(
outputName, dataSinkDescriptor.getOutputDescriptor(),
dataSinkDescriptor.getOutputCommitterDescriptor()));
this.dataSinks.add(dataSinkDescriptor);
return this;
}
Vertex addAdditionalDataSink(RootInputLeafOutput<OutputDescriptor, OutputCommitterDescriptor> output) {
additionalOutputs.add(output);
return this;
}
/**
* Specifies a {@link VertexManagerPlugin} for the vertex. This plugin can be
* used to modify the parallelism or reconfigure the vertex at runtime using
* user defined code embedded in the plugin
*
* @param vertexManagerPluginDescriptor
* @return this Vertex
*/
public Vertex setVertexManagerPlugin(
VertexManagerPluginDescriptor vertexManagerPluginDescriptor) {
this.vertexManagerPlugin = vertexManagerPluginDescriptor;
return this;
}
/**
* Get the launch command opts for tasks in this vertex
* @return launch command opts
*/
public String getTaskLaunchCmdOpts(){
return taskLaunchCmdOpts;
}
@Override
public String toString() {
return "[" + vertexName + " : " + processorDescriptor.getClassName() + "]";
}
VertexManagerPluginDescriptor getVertexManagerPlugin() {
return vertexManagerPlugin;
}
Map<String, GroupInfo> getGroupInputs() {
return groupInputs;
}
void addGroupInput(String groupName, GroupInfo groupInputInfo) {
if (groupInputs.put(groupName, groupInputInfo) != null) {
throw new IllegalStateException(
"Vertex: " + getName() +
" already has group input with name:" + groupName);
}
}
void addInputVertex(Vertex inputVertex, Edge edge) {
inputVertices.add(inputVertex);
inputEdges.add(edge);
}
void addOutputVertex(Vertex outputVertex, Edge edge) {
outputVertices.add(outputVertex);
outputEdges.add(edge);
}
/**
* Get the input vertices for this vertex
* @return List of input vertices
*/
public List<Vertex> getInputVertices() {
return Collections.unmodifiableList(inputVertices);
}
/**
* Get the output vertices for this vertex
* @return List of output vertices
*/
public List<Vertex> getOutputVertices() {
return Collections.unmodifiableList(outputVertices);
}
/**
* Set the cpu/memory etc resources used by tasks of this vertex
* @param resource {@link Resource} for the tasks of this vertex
*/
void setTaskResource(Resource resource) {
this.taskResource = resource;
}
@Private
public List<DataSourceDescriptor> getDataSources() {
return dataSources;
}
@Private
public List<DataSinkDescriptor> getDataSinks() {
return dataSinks;
}
List<Edge> getInputEdges() {
return inputEdges;
}
List<Edge> getOutputEdges() {
return outputEdges;
}
List<RootInputLeafOutput<InputDescriptor, InputInitializerDescriptor>> getInputs() {
return additionalInputs;
}
List<RootInputLeafOutput<OutputDescriptor, OutputCommitterDescriptor>> getOutputs() {
return additionalOutputs;
}
}
| |
/*
* Copyright 2015 OpenMarket Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package im.vector.fragments;
import android.app.Activity;
import android.content.Intent;
import android.os.Bundle;
import android.text.TextUtils;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.MotionEvent;
import android.view.View;
import android.view.ViewGroup;
import org.matrix.androidsdk.adapters.MessagesAdapter;
import org.matrix.androidsdk.data.EventTimeline;
import org.matrix.androidsdk.data.RoomState;
import org.matrix.androidsdk.rest.model.Event;
import java.util.ArrayList;
import java.util.List;
import im.vector.R;
import im.vector.activity.VectorBaseSearchActivity;
import im.vector.activity.VectorRoomActivity;
import im.vector.adapters.VectorSearchMessagesListAdapter;
public class VectorSearchMessagesListFragment extends VectorMessageListFragment {
// parameters
protected String mPendingPattern;
protected String mSearchingPattern;
protected ArrayList<OnSearchResultListener> mSearchListeners = new ArrayList<OnSearchResultListener>();
protected View mProgressView = null;
/**
* static constructor
* @param matrixId the session Id.
* @param layoutResId the used layout.
* @return
*/
public static VectorSearchMessagesListFragment newInstance(String matrixId, String roomId, int layoutResId) {
VectorSearchMessagesListFragment frag = new VectorSearchMessagesListFragment();
Bundle args = new Bundle();
args.putInt(ARG_LAYOUT_ID, layoutResId);
args.putString(ARG_MATRIX_ID, matrixId);
if (null != roomId) {
args.putString(ARG_ROOM_ID, roomId);
}
frag.setArguments(args);
return frag;
}
@Override
public MessagesAdapter createMessagesAdapter() {
return new VectorSearchMessagesListAdapter(mSession, getActivity(), (null == mRoom), getMXMediasCache());
}
@Override
public void onPause() {
super.onPause();
if (mSession.isAlive()) {
cancelSearch();
if (mIsMediaSearch) {
mSession.cancelSearchMediaName();
} else {
mSession.cancelSearchMessageText();
}
mSearchingPattern = null;
}
}
@Override
public void onResume() {
super.onResume();
if (mSession.isAlive()) {
if (getActivity() instanceof VectorBaseSearchActivity.IVectorSearchActivity) {
((VectorBaseSearchActivity.IVectorSearchActivity) getActivity()).refreshSearch();
}
}
}
/**
* Called when a fragment is first attached to its activity.
* {@link #onCreate(Bundle)} will be called after this.
*
* @param aHostActivity parent activity
*/
@Override
public void onAttach(Activity aHostActivity) {
super.onAttach(aHostActivity);
mProgressView = getActivity().findViewById(R.id.search_load_oldest_progress);
}
/**
* The user scrolls the list.
* Apply an expected behaviour
* @param event the scroll event
*/
@Override
public void onListTouch(MotionEvent event) {
}
/**
* return true to display all the events.
* else the unknown events will be hidden.
*/
@Override
public boolean isDisplayAllEvents() {
return true;
}
/**
* Display a global spinner or any UI item to warn the user that there are some pending actions.
*/
@Override
public void showLoadingBackProgress() {
if (null != mProgressView) {
mProgressView.setVisibility(View.VISIBLE);
}
}
/**
* Dismiss any global spinner.
*/
@Override
public void hideLoadingBackProgress() {
if (null != mProgressView) {
mProgressView.setVisibility(View.GONE);
}
}
/**
* Scroll the fragment to the bottom
*/
@Override
public void scrollToBottom() {
if (0 != mAdapter.getCount()) {
mMessageListView.setSelection(mAdapter.getCount() - 1);
}
}
/**
* Tell if the search is allowed for a dedicated pattern
* @param pattern the searched pattern.
* @return true if the search is allowed.
*/
protected boolean allowSearch(String pattern) {
// ConsoleMessageListFragment displays the list of unfiltered messages when there is no pattern
// in the search case, clear the list and hide it
return !TextUtils.isEmpty(pattern);
}
/**
* Update the searched pattern.
* @param pattern the pattern to find out. null to disable the search mode
*/
@Override
public void searchPattern(final String pattern, final OnSearchResultListener onSearchResultListener) {
// add the listener to list to warn when the search is done.
if (null != onSearchResultListener) {
mSearchListeners.add(onSearchResultListener);
}
// wait that the fragment is displayed
if (null == mMessageListView) {
mPendingPattern = pattern;
return;
}
// please wait
if (TextUtils.equals(mSearchingPattern, pattern)) {
mSearchListeners.add(onSearchResultListener);
return;
}
if (!allowSearch(pattern)) {
mPattern = null;
mMessageListView.setVisibility(View.GONE);
getActivity().runOnUiThread(new Runnable() {
@Override
public void run() {
for (OnSearchResultListener listener : mSearchListeners) {
try {
listener.onSearchSucceed(0);
} catch (Exception e) {
}
}
mSearchListeners.clear();
mSearchingPattern = null;
}
});
} else {
// start a search
mAdapter.clear();
mSearchingPattern = pattern;
if (mAdapter instanceof VectorSearchMessagesListAdapter) {
((VectorSearchMessagesListAdapter) mAdapter).setTextToHighlight(pattern);
}
super.searchPattern(pattern, mIsMediaSearch, new OnSearchResultListener() {
@Override
public void onSearchSucceed(int nbrMessages) {
// the pattern has been updated while search
if (!TextUtils.equals(pattern, mSearchingPattern)) {
mAdapter.clear();
mMessageListView.setVisibility(View.GONE);
} else {
mIsInitialSyncing = false;
mMessageListView.setOnScrollListener(mScrollListener);
mMessageListView.setAdapter(mAdapter);
// scroll to the bottom
scrollToBottom();
mMessageListView.setVisibility(View.VISIBLE);
for (OnSearchResultListener listener : mSearchListeners) {
try {
listener.onSearchSucceed(nbrMessages);
} catch (Exception e) {
}
}
mSearchListeners.clear();
mSearchingPattern = null;
}
}
@Override
public void onSearchFailed() {
mMessageListView.setVisibility(View.GONE);
// clear the results list if teh search fails
mAdapter.clear();
for (OnSearchResultListener listener : mSearchListeners) {
try {
listener.onSearchFailed();
} catch (Exception e) {
}
}
mSearchListeners.clear();
mSearchingPattern = null;
}
});
}
}
@Override
public boolean onRowLongClick(int position) {
onContentClick(position);
return true;
}
@Override
public void onContentClick(int position) {
Event event = mAdapter.getItem(position).getEvent();
Intent intent = new Intent(getActivity(), VectorRoomActivity.class);
intent.putExtra(VectorRoomActivity.EXTRA_MATRIX_ID, mSession.getMyUserId());
intent.putExtra(VectorRoomActivity.EXTRA_ROOM_ID, event.roomId);
intent.putExtra(VectorRoomActivity.EXTRA_EVENT_ID, event.eventId);
getActivity().startActivity(intent);
}
/**
* Called when a long click is performed on the message content
* @param position the cell position
* @return true if managed
*/
@Override
public boolean onContentLongClick(int position) {
return false;
}
//==============================================================================================================
// rooms events management : ignore any update on the adapter while searching
//==============================================================================================================
@Override
public void onEvent(final Event event, final EventTimeline.Direction direction, final RoomState roomState) {
}
@Override
public void onLiveEventsChunkProcessed() {
}
@Override
public void onReceiptEvent(List<String> senderIds){
}
}
| |
/**
* Copyright 2007-2015, Kaazing Corporation. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kaazing.k3po.driver.internal.netty.channel;
import static org.jboss.netty.channel.Channels.close;
import static org.jboss.netty.channel.Channels.future;
import org.jboss.netty.channel.Channel;
import org.jboss.netty.channel.ChannelDownstreamHandler;
import org.jboss.netty.channel.ChannelFuture;
import org.jboss.netty.channel.ChannelHandlerContext;
import org.jboss.netty.channel.ChannelPipeline;
import org.jboss.netty.channel.ChannelUpstreamHandler;
import org.kaazing.k3po.driver.internal.netty.bootstrap.channel.AbstractChannel;
public final class Channels {
/**
* Sends a {@code "shutdownInput"} event to the
* {@link ChannelUpstreamHandler} which is placed in the closest upstream
* from the handler associated with the specified
* {@link ChannelHandlerContext}.
*/
public static void fireInputShutdown(ChannelHandlerContext ctx) {
ctx.getPipeline().sendUpstream(
new UpstreamShutdownInputEvent(ctx.getChannel()));
}
/**
* Sends a {@code "shutdownInput"} event to the first
* {@link ChannelUpstreamHandler} in the {@link ChannelPipeline} of
* the specified {@link Channel}.
*/
public static void fireInputShutdown(Channel channel) {
channel.getPipeline().sendUpstream(
new UpstreamShutdownInputEvent(channel));
}
/**
* Sends a {@code "flushed"} event to the first
* {@link ChannelUpstreamHandler} in the {@link ChannelPipeline} of
* the specified {@link Channel}.
*/
public static void fireFlushed(Channel channel) {
channel.getPipeline().sendUpstream(
new UpstreamFlushEvent(channel));
}
/**
* Sends a {@code "shutdownInput"} request to the last
* {@link ChannelDownstreamHandler} in the {@link ChannelPipeline} of
* the specified {@link Channel}.
*
* @param channel the channel to bind
*
* @return the {@link ChannelFuture} which will be notified when the
* shutdownInput operation is done
*/
public static ChannelFuture shutdownInput(Channel channel) {
ChannelFuture future = future(channel);
channel.getPipeline().sendDownstream(
new DownstreamShutdownInputEvent(channel, future));
return future;
}
/**
* Sends a {@code "shutdownInput"} request to the
* {@link ChannelDownstreamHandler} which is placed in the closest
* downstream from the handler associated with the specified
* {@link ChannelHandlerContext}.
*
* @param ctx the context
* @param future the future which will be notified when the shutdownInput
* operation is done
*/
public static void shutdownInput(ChannelHandlerContext ctx, ChannelFuture future) {
ctx.sendDownstream(
new DownstreamShutdownInputEvent(ctx.getChannel(), future));
}
/**
* Sends a {@code "shutdownOutput"} event to the
* {@link ChannelUpstreamHandler} which is placed in the closest upstream
* from the handler associated with the specified
* {@link ChannelHandlerContext}.
*/
public static void fireOutputShutdown(ChannelHandlerContext ctx) {
ctx.getPipeline().sendUpstream(
new UpstreamShutdownOutputEvent(ctx.getChannel()));
}
/**
* Sends a {@code "shutdownOutput"} event to the first
* {@link ChannelUpstreamHandler} in the {@link ChannelPipeline} of
* the specified {@link Channel}.
*/
public static void fireOutputShutdown(Channel channel) {
channel.getPipeline().sendUpstream(
new UpstreamShutdownOutputEvent(channel));
}
/**
* Sends a {@code "shutdownOutput"} request to the last
* {@link ChannelDownstreamHandler} in the {@link ChannelPipeline} of
* the specified {@link Channel}.
*
* @param channel the channel to bind
*
* @return the {@link ChannelFuture} which will be notified when the
* shutdownOutput operation is done
*/
public static ChannelFuture shutdownOutput(Channel channel) {
ChannelFuture future = future(channel);
channel.getPipeline().sendDownstream(
new DownstreamShutdownOutputEvent(channel, future));
return future;
}
/**
* Sends a {@code "shutdownOutput"} request to the
* {@link ChannelDownstreamHandler} which is placed in the closest
* downstream from the handler associated with the specified
* {@link ChannelHandlerContext}.
*
* @param ctx the context
* @param future the future which will be notified when the shutdownOutput
* operation is done
*/
public static void shutdownOutput(ChannelHandlerContext ctx, ChannelFuture future) {
ctx.sendDownstream(
new DownstreamShutdownOutputEvent(ctx.getChannel(), future));
}
/**
* Sends a {@code "flush"} request to the last
* {@link ChannelDownstreamHandler} in the {@link ChannelPipeline} of
* the specified {@link Channel}.
*
* @param channel the channel to bind
*
* @return the {@link ChannelFuture} which will be notified when the
* flush operation is done
*/
public static ChannelFuture flush(Channel channel) {
ChannelFuture future = future(channel);
channel.getPipeline().sendDownstream(
new DownstreamFlushEvent(channel, future));
return future;
}
/**
* Sends a {@code "flush"} request to the
* {@link ChannelDownstreamHandler} which is placed in the closest
* downstream from the handler associated with the specified
* {@link ChannelHandlerContext}.
*
* @param ctx the context
* @param future the future which will be notified when the flush
* operation is done
*/
public static void flush(ChannelHandlerContext ctx, ChannelFuture future) {
ctx.sendDownstream(
new DownstreamFlushEvent(ctx.getChannel(), future));
}
public static ChannelFuture abortOutput(Channel channel) {
ChannelFuture future = future(channel);
channel.getPipeline().sendDownstream(
new DownstreamWriteAbortEvent(channel, future));
return future;
}
public static void abortOutput(ChannelHandlerContext ctx, ChannelFuture future) {
ctx.sendDownstream(new DownstreamWriteAbortEvent(ctx.getChannel(), future));
}
public static void fireOutputAborted(Channel channel) {
channel.getPipeline().sendUpstream(new UpstreamWriteAbortEvent(channel));
}
public static ChannelFuture abortInput(Channel channel) {
ChannelFuture future = future(channel);
channel.getPipeline().sendDownstream(
new DownstreamReadAbortEvent(channel, future));
return future;
}
public static void abortInput(ChannelHandlerContext ctx, ChannelFuture future) {
ctx.sendDownstream(new DownstreamReadAbortEvent(ctx.getChannel(), future));
}
public static void fireInputAborted(Channel channel) {
channel.getPipeline().sendUpstream(new UpstreamReadAbortEvent(channel));
}
public static ChannelFuture adviseOutput(Channel channel, Object value) {
ChannelFuture future = future(channel);
channel.getPipeline().sendDownstream(
new DownstreamWriteAdviseEvent(channel, future, value));
return future;
}
public static void adviseOutput(ChannelHandlerContext ctx, ChannelFuture future, Object value) {
ctx.sendDownstream(new DownstreamWriteAdviseEvent(ctx.getChannel(), future, value));
}
public static void fireOutputAdvised(Channel channel, Object value) {
channel.getPipeline().sendUpstream(new UpstreamWriteAdviseEvent(channel, value));
}
public static ChannelFuture adviseInput(Channel channel, Object value) {
ChannelFuture future = future(channel);
channel.getPipeline().sendDownstream(
new DownstreamReadAdviseEvent(channel, future, value));
return future;
}
public static void adviseInput(ChannelHandlerContext ctx, ChannelFuture future, Object value) {
ctx.sendDownstream(new DownstreamReadAdviseEvent(ctx.getChannel(), future, value));
}
public static void fireInputAdvised(Channel channel, Object value) {
channel.getPipeline().sendUpstream(new UpstreamReadAdviseEvent(channel, value));
}
public static ChannelFuture shutdownOutputOrClose(Channel channel) {
if (channel instanceof AbstractChannel) {
return shutdownOutput(channel);
}
else {
return close(channel);
}
}
public static void shutdownOutputOrClose(ChannelHandlerContext ctx, ChannelFuture future) {
if (ctx.getChannel() instanceof AbstractChannel) {
shutdownOutput(ctx, future);
}
else {
close(ctx, future);
}
}
public static ChannelFuture abortOutputOrClose(Channel channel) {
if (channel instanceof AbstractChannel) {
return abortOutput(channel);
}
else {
return close(channel);
}
}
public static void abortOutputOrClose(ChannelHandlerContext ctx, ChannelFuture future) {
if (ctx.getChannel() instanceof AbstractChannel) {
abortOutput(ctx, future);
}
else {
close(ctx, future);
}
}
public static void abortInputOrSuccess(ChannelHandlerContext ctx, ChannelFuture future) {
if (ctx.getChannel() instanceof AbstractChannel) {
abortInput(ctx, future);
}
else {
future.setSuccess();
}
}
private Channels() {
// no instances
}
}
| |
package org.openntf.domino.demoApp;
/*
<!--
Copyright 2015 Paul Withers
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and limitations under the License
-->
*/
import java.util.concurrent.ConcurrentHashMap;
import java.util.logging.Logger;
import org.openntf.domino.demoApp.components.HeaderComponent;
import org.openntf.domino.demoApp.components.TargetSelector.Target;
import org.openntf.domino.demoApp.pages.DatabaseView;
import org.openntf.domino.demoApp.pages.DocumentView;
import org.openntf.domino.demoApp.pages.DominoElseView;
import org.openntf.domino.demoApp.pages.ErrorView;
import org.openntf.domino.demoApp.pages.SessionView;
import org.openntf.domino.demoApp.pages.ViewView;
import org.openntf.domino.demoApp.pages.XotsView;
import org.openntf.domino.demoApp.utils.FactoryUtils;
import org.vaadin.sliderpanel.SliderPanel;
import org.vaadin.sliderpanel.SliderPanelBuilder;
import org.vaadin.sliderpanel.SliderPanelStyles;
import org.vaadin.sliderpanel.client.SliderMode;
import org.vaadin.sliderpanel.client.SliderTabPosition;
import com.vaadin.annotations.Theme;
import com.vaadin.annotations.VaadinServletConfiguration;
import com.vaadin.annotations.Viewport;
import com.vaadin.annotations.Widgetset;
import com.vaadin.navigator.Navigator;
import com.vaadin.navigator.View;
import com.vaadin.server.FontAwesome;
import com.vaadin.server.Responsive;
import com.vaadin.server.VaadinRequest;
import com.vaadin.server.VaadinServlet;
import com.vaadin.shared.ui.label.ContentMode;
import com.vaadin.ui.Button;
import com.vaadin.ui.Button.ClickEvent;
import com.vaadin.ui.Button.ClickListener;
import com.vaadin.ui.HorizontalLayout;
import com.vaadin.ui.Label;
import com.vaadin.ui.MenuBar;
import com.vaadin.ui.MenuBar.MenuItem;
import com.vaadin.ui.Notification;
import com.vaadin.ui.Notification.Type;
import com.vaadin.ui.UI;
import com.vaadin.ui.VerticalLayout;
import com.vaadin.ui.themes.ValoTheme;
@Viewport("user-scalable=no,initial-scale=1.0")
@SuppressWarnings("serial")
@Theme("OdaDemoTheme")
@Widgetset("org.openntf.domino.demoApp.widgetset.Oda_demoappWidgetset")
public class DemoUI extends UI {
private static final Logger logger = Logger.getLogger(DemoUI.class.getName());
private Navigator uiNavigator;
private HeaderComponent header;
private VerticalLayout body;
private SliderPanel rightSlider;
private boolean setup;
private Target appTarget;
private Label configDetails;
private ConcurrentHashMap<String, Integer> createdDocs;
private ConcurrentHashMap<String, Integer> updatedDocs;
@VaadinServletConfiguration(ui = DemoUI.class, productionMode = false, heartbeatInterval = 300)
public static class UIServlet extends VaadinServlet {
}
public Navigator getUiNavigator() {
return uiNavigator;
}
public void setUiNavigator(Navigator uiNavigator) {
this.uiNavigator = uiNavigator;
}
@Override
protected void init(VaadinRequest request) {
Responsive.makeResponsive(this);
setAppTarget(Target.BOTH);
addStyleName(ValoTheme.UI_WITH_MENU);
getPage().setTitle("OpenNTF Domino API Demo App");
setStyleName("main-screen");
// Component that allows bottom SliderPanel
final VerticalLayout outerLayout = new VerticalLayout();
outerLayout.setSizeFull();
outerLayout.setMargin(false);
outerLayout.setSpacing(false);
// Bottom Slider Panel
VerticalLayout cfgSettings = new VerticalLayout();
cfgSettings.setStyleName("config-settings");
Label cfgBody;
Button button1 = new Button("Refresh");
button1.setIcon(FontAwesome.REFRESH);
button1.addStyleName(ValoTheme.BUTTON_ICON_ONLY);
button1.addStyleName(ValoTheme.BUTTON_QUIET);
button1.addStyleName("right");
button1.addClickListener(new ClickListener() {
private static final long serialVersionUID = 1L;
@Override
public void buttonClick(ClickEvent event) {
getConfigDetails().setValue(FactoryUtils.dumpConfigSettings());
}
});
setConfigDetails(new Label(FactoryUtils.dumpConfigSettings(), ContentMode.HTML));
cfgSettings.addComponents(button1, getConfigDetails());
SliderPanel cfgSlider = new SliderPanelBuilder(cfgSettings).caption("CONFIGURATION SETTINGS")
.mode(SliderMode.BOTTOM).style(SliderPanelStyles.COLOR_GRAY).tabPosition(SliderTabPosition.MIDDLE)
.build();
// contentLayout component contains Header and Body, stored in
// middleLayout
final VerticalLayout contentLayout = new VerticalLayout();
contentLayout.setSpacing(false);
contentLayout.setSizeFull();
// Add header to contentMayout
setHeader(new HeaderComponent(this));
getHeader().setHeight("70px");
contentLayout.addComponent(getHeader());
// Add body to content layout
setBody(new VerticalLayout());
getBody().setStyleName("body-layout");
getBody().setHeight(100, Unit.PERCENTAGE);
contentLayout.addComponent(getBody());
contentLayout.setExpandRatio(getBody(), 1);
// Component allowing Right Slider Panel
final HorizontalLayout innerLayout = new HorizontalLayout();
innerLayout.setSizeFull();
innerLayout.addComponent(contentLayout);
innerLayout.setExpandRatio(contentLayout, 1);
setRightSlider(new SliderPanelBuilder(new VerticalLayout()).caption("INFORMATION").mode(SliderMode.RIGHT)
.style(SliderPanelStyles.COLOR_WHITE).tabPosition(SliderTabPosition.MIDDLE).fixedContentSize(700)
.build());
innerLayout.addComponent(getRightSlider());
// Add Navigator and menu items
setUiNavigator(new Navigator(this, getBody()));
getUiNavigator().setErrorView(ErrorView.class);
addNewMenuItem(SessionView.VIEW_NAME, SessionView.VIEW_LABEL, new SessionView());
addNewMenuItem(DatabaseView.VIEW_NAME, DatabaseView.VIEW_LABEL, new DatabaseView());
addNewMenuItem(ViewView.VIEW_NAME, ViewView.VIEW_LABEL, new ViewView());
addNewMenuItem(DocumentView.VIEW_NAME, DocumentView.VIEW_LABEL, new DocumentView());
addNewMenuItem(DominoElseView.VIEW_NAME, DominoElseView.VIEW_LABEL, new DominoElseView());
addNewMenuItem(XotsView.VIEW_NAME, XotsView.VIEW_LABEL, new XotsView());
// Add inner layout to outer layout
outerLayout.addComponent(innerLayout);
outerLayout.setExpandRatio(innerLayout, 1);
outerLayout.addComponent(cfgSlider);
// Add outer layout to DemoUI
setContent(outerLayout);
}
public void addNewMenuItem(final String viewName, final String viewLabel, final View viewObj) {
getUiNavigator().addView(viewName, viewObj);
getHeader().getMenubar().addItem(viewLabel, new MenuBar.Command() {
@Override
public void menuSelected(MenuItem selectedItem) {
if (isSetup()) {
for (MenuItem itm : getHeader().getMenubar().getItems()) {
if ("highlight".equals(itm.getStyleName())) {
itm.setStyleName("");
}
}
selectedItem.setStyleName("highlight");
DemoUI.getCurrent().getNavigator().navigateTo(viewName);
} else {
addMessage("", "You must set up the database before navigating the application",
Type.ERROR_MESSAGE);
}
}
});
}
public void addMessage(String title, String msg, Type notificationType) {
Notification.show(title, msg, notificationType);
}
public static DemoUI get() {
return (DemoUI) UI.getCurrent();
}
public HeaderComponent getHeader() {
return header;
}
public void setHeader(HeaderComponent header) {
this.header = header;
}
public VerticalLayout getBody() {
return body;
}
public void setBody(VerticalLayout body) {
this.body = body;
}
public SliderPanel getRightSlider() {
return rightSlider;
}
public void setRightSlider(SliderPanel rightSlider) {
this.rightSlider = rightSlider;
}
public boolean isSetup() {
return setup;
}
public void setSetup(boolean setup) {
this.setup = setup;
}
public Target getAppTarget() {
return appTarget;
}
public void setAppTarget(Target appTarget) {
this.appTarget = appTarget;
}
public Label getConfigDetails() {
return configDetails;
}
public void setConfigDetails(Label configDetails) {
this.configDetails = configDetails;
}
public ConcurrentHashMap<String, Integer> getCreatedDocs() {
return createdDocs;
}
public void setCreatedDocs(ConcurrentHashMap<String, Integer> createdDocs) {
this.createdDocs = createdDocs;
}
public ConcurrentHashMap<String, Integer> getUpdatedDocs() {
return updatedDocs;
}
public void setUpdatedDocs(ConcurrentHashMap<String, Integer> updatedDocs) {
this.updatedDocs = updatedDocs;
}
}
| |
package org.red5.server.classloading;
import java.io.File;
import java.io.FilenameFilter;
import java.net.MalformedURLException;
import java.net.URL;
import java.net.URLClassLoader;
import java.util.ArrayList;
import java.util.List;
public class ServerClassLoader extends URLClassLoader {
private static String PLATFORM;
static {
String jvmName = System.getProperty("java.vm.name", "").toLowerCase();
String osName = System.getProperty("os.name", "").toLowerCase();
String osArch = System.getProperty("os.arch", "").toLowerCase();
String abiType = System.getProperty("sun.arch.abi", "").toLowerCase();
String libPath = System.getProperty("sun.boot.library.path", "").toLowerCase();
if (jvmName.startsWith("dalvik") && osName.startsWith("linux")) {
osName = "android";
} else if (jvmName.startsWith("robovm") && osName.startsWith("darwin")) {
osName = "ios";
osArch = "arm";
} else if (osName.startsWith("mac os x") || osName.startsWith("darwin")) {
osName = "macosx";
} else {
int spaceIndex = osName.indexOf(' ');
if (spaceIndex > 0) {
osName = osName.substring(0, spaceIndex);
}
}
if (osArch.equals("i386") || osArch.equals("i486") || osArch.equals("i586") || osArch.equals("i686")) {
osArch = "x86";
} else if (osArch.equals("amd64") || osArch.equals("x86-64") || osArch.equals("x64")) {
osArch = "x86_64";
} else if (osArch.startsWith("aarch64") || osArch.startsWith("armv8") || osArch.startsWith("arm64")) {
osArch = "arm64";
} else if ((osArch.startsWith("arm")) && ((abiType.equals("gnueabihf")) || (libPath.contains("openjdk-armhf")))) {
osArch = "armhf";
} else if (osArch.startsWith("arm")) {
osArch = "arm";
}
PLATFORM = osName + "-" + osArch;
}
/**
* Filters jar files
*/
public final static class JarFileFilter implements FilenameFilter {
/**
* Check whether file matches filter rules
*
* @param dir
* Directory
* @param name
* File name
* @return true If file does match filter rules, false otherwise
*/
public boolean accept(File dir, String name) {
return name.endsWith(".jar");
}
}
public ServerClassLoader(java.lang.ClassLoader parent) {
super(getJars(), parent);
}
public static URL[] getJars() {
List<URL> urlList = new ArrayList<>();
JarFileFilter jarFileFilter = new JarFileFilter();
String home = System.getProperty("red5.root");
if (home == null || ".".equals(home)) {
// if home is still null look it up via this classes loader
String classLocation = ServerClassLoader.class.getProtectionDomain().getCodeSource().getLocation().toString();
// System.out.printf("Classloader location: %s\n",
// classLocation);
// snip off anything beyond the last slash
home = classLocation.substring(0, classLocation.lastIndexOf('/'));
if (home.startsWith("file:")) {
home = home.substring("file:".length());
}
}
//Add jars in the lib directory
String libPath = home + File.separator +"lib";
File libDir = new File(libPath);
File[] libFiles = libDir.listFiles(jarFileFilter);
for (File lib : libFiles) {
try {
urlList.add(lib.toURI().toURL());
} catch (MalformedURLException e) {
e.printStackTrace();
}
}
// get config dir
String conf = home + File.separator + "conf";
try {
URL confUrl = new File(conf).toURI().toURL();
if (!urlList.contains(confUrl)) {
urlList.add(confUrl);
}
} catch (MalformedURLException e) {
e.printStackTrace();
}
try {
String serverJar = home + File.separator + "ant-media-server.jar";
URL serverJarURL = new File(serverJar).toURI().toURL();
urlList.add(serverJarURL);
} catch (MalformedURLException e) {
e.printStackTrace();
}
// create the directory if it doesnt exist
String pluginsPath = home + File.separator +"plugins";
File pluginsDir = new File(pluginsPath);
// add the plugin directory to the path so that configs
// will be resolved and not have to be copied to conf
try {
URL pluginsUrl = pluginsDir.toURI().toURL();
if (!urlList.contains(pluginsUrl)) {
urlList.add(pluginsUrl);
}
} catch (MalformedURLException e) {
e.printStackTrace();
}
// get all the plugin jars
File[] pluginsFiles = pluginsDir.listFiles(jarFileFilter);
// this can be null if the dir doesnt exist
loadPlugins(pluginsFiles, urlList, PLATFORM);
URL[] urls = urlList.toArray(new URL[0]);
System.out.println("Selected libraries: (" + urls.length + " items)");
return urls;
}
public static void loadPlugins(File[] pluginsFiles, List<URL> urlList, String platform) {
if (pluginsFiles != null) {
for (File plugin : pluginsFiles) {
try {
String parseUrl = parseUrl(plugin.toURI().toURL());
if (parseUrl.endsWith("x86") || parseUrl.endsWith("x86-gpl") ||
parseUrl.endsWith("x86_64") || parseUrl.endsWith("x86_64-gpl") ||
parseUrl.endsWith("arm64") || parseUrl.endsWith("arm64-gpl") ||
parseUrl.endsWith("armhf") || parseUrl.endsWith("armhf-gpl") ||
parseUrl.endsWith("ppc64le") || parseUrl.endsWith("ppc64le-gpl") ||
parseUrl.endsWith("arm") || parseUrl.endsWith("arm-gpl"))
{
if (parseUrl.contains(platform))
{
urlList.add(plugin.toURI().toURL());
}
}
else {
urlList.add(plugin.toURI().toURL());
}
} catch (MalformedURLException e) {
e.printStackTrace();
}
}
}
}
/**
* Parses url and returns the jar filename stripped of the ending .jar
*
* @param url
* @return
*/
private static String parseUrl(URL url) {
String external = url.toExternalForm().toLowerCase();
// get everything after the last slash
String[] parts = external.split("/");
// last part
String libName = parts[parts.length - 1];
// strip .jar
libName = libName.substring(0, libName.length() - 4);
return libName;
}
}
| |
/*
* Copyright 2012 Google Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.errorprone.bugpatterns;
import static com.google.errorprone.BugPattern.Category.JDK;
import static com.google.errorprone.BugPattern.MaturityLevel.MATURE;
import static com.google.errorprone.BugPattern.SeverityLevel.ERROR;
import static com.google.errorprone.matchers.Matchers.methodSelect;
import static com.google.errorprone.matchers.Matchers.staticMethod;
import static com.sun.source.tree.Tree.Kind.*;
import static javax.lang.model.element.Modifier.STATIC;
import com.google.errorprone.BugPattern;
import com.google.errorprone.VisitorState;
import com.google.errorprone.bugpatterns.BugChecker.AssignmentTreeMatcher;
import com.google.errorprone.bugpatterns.BugChecker.VariableTreeMatcher;
import com.google.errorprone.fixes.Fix;
import com.google.errorprone.fixes.SuggestedFix;
import com.google.errorprone.matchers.Description;
import com.google.errorprone.util.ASTHelpers;
import com.google.errorprone.util.EditDistance;
import com.sun.source.tree.*;
import com.sun.source.util.TreePath;
import com.sun.tools.javac.code.Flags;
import com.sun.tools.javac.code.Symbol;
import com.sun.tools.javac.code.Type;
import com.sun.tools.javac.tree.JCTree;
import com.sun.tools.javac.tree.JCTree.JCVariableDecl;
import com.sun.tools.javac.tree.JCTree.*;
/**
* TODO(eaftan): Consider cases where the parent is not a statement or there is no parent?
*
* @author eaftan@google.com (Eddie Aftandilian)
* @author scottjohnson@google.com (Scott Johnson)
*/
@BugPattern(name = "SelfAssignment",
summary = "Variable assigned to itself",
explanation = "The left-hand side and right-hand side of this assignment are the same. " +
"It has no effect.\n\n" +
"This also handles assignments in which the right-hand side is a call to " +
"Preconditions.checkNotNull(), which returns the variable that was checked for " +
"non-nullity. If you just intended to check that the variable is non-null, please " +
"don't assign the result to the checked variable; just call Preconditions.checkNotNull() " +
"as a bare statement.",
category = JDK, severity = ERROR, maturity = MATURE)
public class SelfAssignment extends BugChecker
implements AssignmentTreeMatcher, VariableTreeMatcher {
@Override
public Description matchAssignment(AssignmentTree tree, VisitorState state) {
ExpressionTree expression = stripCheckNotNull(tree.getExpression(), state);
if(ASTHelpers.sameVariable(tree.getVariable(), expression)) {
return describeForAssignment(tree, state);
}
return Description.NO_MATCH;
}
@Override
public Description matchVariable(VariableTree tree, VisitorState state) {
ExpressionTree initializer = stripCheckNotNull(tree.getInitializer(), state);
Tree parent = state.getPath().getParentPath().getLeaf();
// must be a static class variable with member select initializer
if (initializer == null || initializer.getKind() != MEMBER_SELECT || parent.getKind() != CLASS
|| !tree.getModifiers().getFlags().contains(STATIC)) {
return Description.NO_MATCH;
}
MemberSelectTree rhs = (MemberSelectTree) initializer;
Symbol rhsClass = ASTHelpers.getSymbol(rhs.getExpression());
Symbol lhsClass = ASTHelpers.getSymbol(parent);
if (rhsClass != null && lhsClass != null
&& rhsClass.equals(lhsClass) && rhs.getIdentifier().contentEquals(tree.getName())) {
return describeForVarDecl(tree, state);
}
return Description.NO_MATCH;
}
/**
* If the given expression is a call to checkNotNull(x), returns x.
* Otherwise, returns the given expression.
*
* TODO(eaftan): Also match calls to Java 7's Objects.requireNonNull() method.
*/
private ExpressionTree stripCheckNotNull(ExpressionTree expression, VisitorState state) {
if (expression != null && expression.getKind() == METHOD_INVOCATION && methodSelect(
staticMethod("com.google.common.base.Preconditions", "checkNotNull"))
.matches((MethodInvocationTree) expression, state)) {
return ((MethodInvocationTree) expression).getArguments().get(0);
}
return expression;
}
public Description describeForVarDecl(VariableTree tree, VisitorState state) {
String varDeclStr = tree.toString();
int equalsIndex = varDeclStr.indexOf('=');
if (equalsIndex < 0) {
throw new IllegalStateException("Expected variable declaration to have an initializer: "
+ tree.toString());
}
varDeclStr = varDeclStr.substring(0, equalsIndex - 1) + ";";
// Delete the initializer but still declare the variable.
return describeMatch(tree, new SuggestedFix().replace(tree, varDeclStr));
}
/**
* We expect that the lhs is a field and the rhs is an identifier, specifically
* a parameter to the method. We base our suggested fixes on this expectation.
*
* Case 1: If lhs is a field and rhs is an identifier, find a method parameter
* of the same type and similar name and suggest it as the rhs. (Guess that they
* have misspelled the identifier.)
*
* Case 2: If lhs is a field and rhs is not an identifier, find a method parameter
* of the same type and similar name and suggest it as the rhs.
*
* Case 3: If lhs is not a field and rhs is an identifier, find a class field
* of the same type and similar name and suggest it as the lhs.
*
* Case 4: Otherwise suggest deleting the assignment.
*/
public Description describeForAssignment(AssignmentTree assignmentTree, VisitorState state) {
// the statement that is the parent of the self-assignment expression
Tree parent = state.getPath().getParentPath().getLeaf();
// default fix is to delete assignment
Fix fix = new SuggestedFix().delete(parent);
ExpressionTree lhs = assignmentTree.getVariable();
ExpressionTree rhs = assignmentTree.getExpression();
// if this is a method invocation, they must be calling checkNotNull()
if (assignmentTree.getExpression().getKind() == METHOD_INVOCATION) {
// change the default fix to be "checkNotNull(x)" instead of "x = checkNotNull(x)"
fix = new SuggestedFix().replace(assignmentTree, rhs.toString());
// new rhs is first argument to checkNotNull()
rhs = stripCheckNotNull(rhs, state);
}
if (lhs.getKind() == MEMBER_SELECT) {
// find a method parameter of the same type and similar name and suggest it
// as the rhs
// rhs should be either identifier or field access
assert(rhs.getKind() == IDENTIFIER || rhs.getKind() == MEMBER_SELECT);
// get current name of rhs
String rhsName = null;
if (rhs.getKind() == IDENTIFIER) {
rhsName = ((JCIdent) rhs).name.toString();
} else if (rhs.getKind() == MEMBER_SELECT) {
rhsName = ((JCFieldAccess) rhs).name.toString();
}
// find method parameters of the same type
Type type = ((JCFieldAccess) lhs).type;
TreePath path = state.getPath();
while (path != null && path.getLeaf().getKind() != METHOD) {
path = path.getParentPath();
}
JCMethodDecl method = (JCMethodDecl) path.getLeaf();
int minEditDistance = Integer.MAX_VALUE;
String replacement = null;
for (JCVariableDecl var : method.params) {
if (var.type == type) {
int editDistance = EditDistance.getEditDistance(rhsName, var.name.toString());
if (editDistance < minEditDistance) {
// pick one with minimum edit distance
minEditDistance = editDistance;
replacement = var.name.toString();
}
}
}
if (replacement != null) {
// suggest replacing rhs with the parameter
fix = new SuggestedFix().replace(rhs, replacement);
}
} else if (rhs.getKind() == IDENTIFIER) {
// find a field of the same type and similar name and suggest it as the lhs
// lhs should be identifier
assert(lhs.getKind() == IDENTIFIER);
// get current name of lhs
String lhsName = ((JCIdent) rhs).name.toString();
// find class instance fields of the same type
Type type = ((JCIdent) lhs).type;
TreePath path = state.getPath();
while (path != null && !(path.getLeaf() instanceof JCClassDecl)) {
path = path.getParentPath();
}
if (path == null) {
throw new IllegalStateException("Expected to find an enclosing class declaration");
}
JCClassDecl klass = (JCClassDecl) path.getLeaf();
int minEditDistance = Integer.MAX_VALUE;
String replacement = null;
for (JCTree member : klass.getMembers()) {
if (member.getKind() == VARIABLE) {
JCVariableDecl var = (JCVariableDecl) member;
if (!Flags.isStatic(var.sym) && var.type == type) {
int editDistance = EditDistance.getEditDistance(lhsName, var.name.toString());
if (editDistance < minEditDistance) {
// pick one with minimum edit distance
minEditDistance = editDistance;
replacement = var.name.toString();
}
}
}
}
if (replacement != null) {
// suggest replacing lhs with the field
fix = new SuggestedFix().replace(lhs, "this." + replacement);
}
}
return describeMatch(assignmentTree, fix);
}
}
| |
package com.whatdoyouwanttodo.config;
import java.util.LinkedList;
import android.app.Activity;
import android.content.Intent;
import android.content.res.Resources;
import android.os.Bundle;
import android.support.v4.app.FragmentManager;
import android.support.v4.app.FragmentTransaction;
import android.support.v7.app.ActionBarActivity;
import android.util.Log;
import android.view.KeyEvent;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import android.view.View.OnClickListener;
import android.widget.Button;
import android.widget.LinearLayout;
import android.widget.TextView;
import com.whatdoyouwanttodo.ChessboardFragment;
import com.whatdoyouwanttodo.ChessboardLayout;
import com.whatdoyouwanttodo.R;
import com.whatdoyouwanttodo.application.Cell;
import com.whatdoyouwanttodo.application.Chessboard;
import com.whatdoyouwanttodo.application.ChessboardApplication;
import com.whatdoyouwanttodo.db.CellCursor;
import com.whatdoyouwanttodo.db.ChessboardCursor;
import com.whatdoyouwanttodo.db.ChessboardDbUtility;
import com.whatdoyouwanttodo.ui.QuestionDialog;
import com.whatdoyouwanttodo.utils.ActivityUtils;
import com.whatdoyouwanttodo.utils.ImageLoader;
/**
* Attivita' che permette di configurare una tabella di celle
*/
public class GridConfigActivity extends ActionBarActivity implements
GridConfigFragment.OnChessboardChangeListener,
ChessboardLayout.OnSecondaryCellEventListener,
ChessboardLayout.OnCellEventListener {
public static final String CHESSBOARD_ID = "com.whatdoyouwanttodo.config.GridConfigActivity.CHESSBOARD_ID";
// change in swap/normal mode
private TextView previewText;
private LinearLayout previewContainer;
// for swap mode
private boolean swapMode;
private boolean previousCell;
private int previousRow;
private int previousColumn;
private Button swapModeOff;
// cells and chessboard to modify
private FlexibleCellGrid flexGrid; // handle cells
private Chessboard chessboard;
// for packing feature
private int lastWidth;
// true after cell config activity has changed a cell
private boolean cellChanged = false;
private boolean gridDirtyFlag = false;
public static Intent getStartIntent(Activity caller, long chessboardId) {
Intent intent = new Intent(caller, GridConfigActivity.class);
intent.putExtra(GridConfigActivity.CHESSBOARD_ID, chessboardId);
return intent;
}
public static long NO_GRID_ID = -1;
private static long currentGridId = NO_GRID_ID;
public static long getCurrentGrid() {
long gridId = currentGridId;
currentGridId = NO_GRID_ID;
return gridId;
}
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
// get chessboard number
Intent intent = getIntent();
long id = intent.getLongExtra(CHESSBOARD_ID, 0);
if(ChessboardApplication.DEBUG_MODE_IN_OUT_ACTIVITY) {
Log.d(getClass().getName(), "input: " + id);
}
// read from database
readChessboardWithCells(id);
setContentView(R.layout.activity_grid_config);
if (savedInstanceState == null) {
FragmentManager manager = getSupportFragmentManager();
FragmentTransaction transaction = manager.beginTransaction();
GridConfigFragment gridConfigFragment = GridConfigFragment
.newGridConfigFragment(chessboard);
transaction.add(R.id.config_container, gridConfigFragment);
Cell[] configCells = flexGrid.getConfigCells();
ChessboardFragment chessboardFragment = ChessboardFragment
.newChessboardFragmentWithConfigButtons(chessboard, configCells, true, true);
transaction.add(R.id.chessboard_container, chessboardFragment);
transaction.commit();
}
previewText = (TextView) findViewById(R.id.grid_config_preview_text);
previewContainer = (LinearLayout) findViewById(R.id.grid_config_preview_container);
swapMode = false;
previousCell = false;
swapModeOff = (Button) findViewById(R.id.btn_mode_swap_off);
swapModeOff.setVisibility(View.INVISIBLE);
swapModeOff.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View view) {
changeChessboard(chessboard, false);
}
});
currentGridId = chessboard.getId();
gridDirtyFlag = false;
}
@Override
public void onResume() {
super.onResume();
if(cellChanged == true) {
// if resume after CellGridConfig is called refresh chessboard
cellChanged = false;
changeChessboard(chessboard, swapMode);
}
if(AllGridViewActivity.haveReturnParams()) {
long id = AllGridViewActivity.getReturnParams().getId();
if(id != chessboard.getId()) {
// change chessboard
readChessboardWithCells(id);
changeChessboard(chessboard, false);
// replace chessboard config
GridConfigFragment gridConfigFragment = GridConfigFragment
.newGridConfigFragment(chessboard);
FragmentManager manager = getSupportFragmentManager();
FragmentTransaction transaction = manager.beginTransaction();
transaction.replace(R.id.config_container, gridConfigFragment);
transaction.commit();
currentGridId = chessboard.getId();
gridDirtyFlag = false;
}
}
}
@Override
public void onCellEvent(Chessboard chessboard, int row, int column, long param) {
if (swapMode == true) {
if (previousCell == false) {
previousRow = row;
previousColumn = column;
previousCell = true;
Resources res = getResources();
previewText.setText(res.getString(R.string.activity_grid_config_swap1));
} else {
// change cells
if (previousRow == row && previousColumn == column) {
// do nothing
} else {
// swap cells
flexGrid.swapCells(previousRow, previousColumn, row, column);
}
previousCell = false;
previewText.setText(getResources().getString(
R.string.activity_grid_config_swap_mode));
changeChessboard(chessboard, swapMode);
}
} else {
// if cell not exist do nothing
if(flexGrid.existCell(row, column) == true) {
// open single cell configuration
Intent intent = CellGridConfigActivity.getStartIntent(this, row, column, flexGrid);
this.cellChanged = true;
startActivity(intent);
// normally hidden behind the new activity
// changeChessboard(chessboard, swapMode);
}
}
}
@Override
public void onSecondaryCellEvent(boolean longTouch, Chessboard chessboard, int row,
int column, long param) {
if(longTouch == false) {
if (flexGrid.existCell(row, column) == false) {
// create cell
flexGrid.createCellIfNotExist(this, row, column);
} else {
// delete cell
flexGrid.deleteCell(row, column);
}
changeChessboard(chessboard, swapMode);
} else {
changeChessboard(chessboard, true);
}
}
@Override
public void onChessboardChange(Chessboard chessboard) {
changeChessboard(chessboard, swapMode);
}
private void readChessboardWithCells(long id) {
ChessboardDbUtility dbu = new ChessboardDbUtility(this);
dbu.openReadable();
chessboard = null;
ChessboardCursor cursorCb = dbu.getCursorOnChessboard(id);
if (cursorCb != null) {
while (cursorCb.moveToNext()) {
chessboard = cursorCb.getChessboard();
}
cursorCb.close();
}
lastWidth = chessboard.getColumnCount();
LinkedList<Cell> cells = new LinkedList<Cell>();
CellCursor cursor = dbu.getCursorOnCell(id);
if (cursor != null) {
while (cursor.moveToNext()) {
cells.add(cursor.getCell());
}
cursor.close();
}
flexGrid = new FlexibleCellGrid(cells, chessboard.getId());
dbu.close();
}
private void changeChessboard(Chessboard chessboard, boolean swapMode) {
gridDirtyFlag = true;
// set swap mode
if (swapMode == true) {
if (this.swapMode == false) {
swapModeOff.setVisibility(View.VISIBLE);
previewText.setText(getResources().getString(
R.string.activity_grid_config_swap_mode));
previewText
.setTextColor(getResources().getColor(R.color.black));
previewContainer.setBackgroundColor(getResources().getColor(
R.color.grid_preview_color_swap_mode));
this.swapMode = true;
}
} else {
if (this.swapMode == true) {
swapModeOff.setVisibility(View.INVISIBLE);
previewText.setText(getResources().getString(
R.string.activity_grid_config_preview));
previewText
.setTextColor(getResources().getColor(R.color.white));
previewContainer.setBackgroundColor(getResources().getColor(
R.color.grid_preview_color_normal));
previousCell = false;
this.swapMode = false;
}
}
// pack cell when size changed
if (lastWidth != chessboard.getColumnCount()) {
lastWidth = chessboard.getColumnCount();
flexGrid.packCells(chessboard.getColumnCount());
}
// update chessboard
ImageLoader.getInstance().cleanPictures();
Cell[] configCells = flexGrid.getConfigCells();
FragmentTransaction transaction = getSupportFragmentManager()
.beginTransaction();
ChessboardFragment chessboardFragment = ChessboardFragment
.newChessboardFragmentWithConfigButtons(chessboard, configCells, true, true);
transaction.replace(R.id.chessboard_container, chessboardFragment);
transaction.commit();
if(ChessboardApplication.DEBUG_HEAP_LOG) {
ActivityUtils.logHeap();
}
this.chessboard = chessboard;
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
// Inflate the menu; this adds items to the action bar if it is present.
getMenuInflater().inflate(R.menu.grid_config, menu);
return true;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
int id = item.getItemId();
if (id == R.id.action_grid_save) {
saveConfig();
finish();
return true;
} else if(id == R.id.action_overview) {
if(gridDirtyFlag == true) {
Resources res = getResources();
QuestionDialog dialog = new QuestionDialog(
res.getString(R.string.activity_grid_config_exit_dialog_message),
res.getString(R.string.activity_grid_config_exit_dialog_yes),
res.getString(R.string.activity_grid_config_exit_dialog_no),
new QuestionDialog.QuestionDialogListener() {
@Override
public void onPositiveClick() {
saveConfig();
Intent intent = AllGridViewActivity.getStartIntentForOverview(
GridConfigActivity.this, chessboard.getId());
startActivity(intent);
}
@Override
public void onNegativeClick() {
Intent intent = AllGridViewActivity.getStartIntentForOverview(
GridConfigActivity.this, chessboard.getId());
startActivity(intent);
}
});
dialog.show(this);
} else {
Intent intent = AllGridViewActivity.getStartIntentForOverview(
GridConfigActivity.this, chessboard.getId());
startActivity(intent);
}
return true;
} else if(id == R.id.action_dismiss_chessboard) {
finish();
return true;
/* TODO: for unlikely future use
// first grid is not removable
if(chessboard.getId() == 1) {
Resources res = getResources();
String title = res.getString(R.string.activity_grid_config_delete_root_grid);
String message = res.getString(R.string.activity_grid_config_delete_root_grid_message);
String confirm = res.getString(R.string.activity_grid_config_delete_root_grid_confirm);
MessageDialog.showMessage(this, title, message, confirm);
return true;
}
// grid with cells is not removable
if (flexGrid.isEmpty() == false) {
Resources res = getResources();
String title = res.getString(R.string.activity_grid_config_delete_grid);
String message = res.getString(R.string.activity_grid_config_delete_grid_message);
String confirm = res.getString(R.string.activity_grid_config_delete_grid_confirm);
MessageDialog.showMessage(this, title, message, confirm);
} else {
// remove grid after confirm
Resources res = getResources();
String title = res.getString(R.string.activity_grid_config_delete_grid_action);
String message = res.getString(R.string.activity_grid_config_delete_grid_message_action);
String confirm = res.getString(R.string.activity_grid_config_delete_grid_confirm_action);
QuestionDialog dialog = new QuestionDialog(
title, message, confirm,
new QuestionDialog.QuestionDialogListener() {
@Override
public void onPositiveClick() {
ChessboardDbUtility dbu = new ChessboardDbUtility(
GridConfigActivity.this);
dbu.openWritable();
// delete chessboard
dbu.deleteChessboard(chessboard.getId());
// remove all cells
flexGrid.writeOnDatabase(dbu);
List<Cell> toUnlink = new LinkedList<Cell>();
CellCursor cc = dbu.getCursorOnEveryCell();
while (cc.moveToNext()) {
Cell cell = cc.getCell();
if (cell.getActivityType() == Cell.ACTIVITY_TYPE_OPEN_CHESSBOARD
&& cell.getActivityParam() == chessboard
.getId()) {
toUnlink.add(cell);
}
}
cc.close();
Iterator<Cell> cIt = toUnlink.iterator();
while (cIt.hasNext()) {
Cell cell = cIt.next();
dbu.updateCell(cell.getId(), cell.getChessboard(),
cell.getName(), cell.getRow(),
cell.getColumn(), cell.getBackgroundColor(),
cell.getBorderWidth(), cell.getBorderColor(),
cell.getText(), cell.getTextWidth(),
cell.getTextColor(), cell.getImagePath(),
cell.getAudioPath(), Cell.ACTIVITY_TYPE_NONE, 0);
}
dbu.close();
finish();
}
@Override
public void onNegativeClick() {
// do nothing
}
});
dialog.show(this);
}
return true;
*/
}
return super.onOptionsItemSelected(item);
}
@Override
public boolean onKeyDown(int keyCode, KeyEvent event) {
// Handle the back button
if (keyCode == KeyEvent.KEYCODE_BACK) {
// Ask the user if they want to save configuration
Resources res = getResources();
QuestionDialog dialog = new QuestionDialog(res.getString(
R.string.activity_grid_config_exit_dialog_message),
res.getString(R.string.activity_grid_config_exit_dialog_yes),
res.getString(R.string.activity_grid_config_exit_dialog_no),
new QuestionDialog.QuestionDialogListener() {
@Override
public void onPositiveClick() {
saveConfig();
finish();
}
@Override
public void onNegativeClick() {
finish();
}
});
dialog.show(this);
return true;
} else {
return super.onKeyDown(keyCode, event);
}
}
private void saveConfig() {
// debug code
// dbu.close();
// dbu.printAllDatabaseForDebug();
// dbu.openWritable();
ChessboardDbUtility dbu = new ChessboardDbUtility(this);
dbu.openWritable();
dbu.updateChessboard(chessboard.getId(), chessboard.getParentId(),
chessboard.getName(), chessboard.getRowCount(),
chessboard.getColumnCount(), chessboard.getBackgroundColor(),
chessboard.getBorderWidth());
// update cells
flexGrid.writeOnDatabase(dbu);
dbu.close();
}
@Override
public void onDestroy() {
super.onDestroy();
if(ChessboardApplication.DEBUG_MODE_IN_OUT_ACTIVITY) {
Log.d(getClass().getName(), "output: " + flexGrid.toString());
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.druid.indexing.common.task.batch.parallel;
import com.fasterxml.jackson.databind.jsontype.NamedType;
import org.apache.druid.data.input.InputSplit;
import org.apache.druid.indexer.TaskState;
import org.apache.druid.indexer.TaskStatus;
import org.apache.druid.indexing.common.TaskToolbox;
import org.apache.druid.indexing.common.actions.TaskActionClient;
import org.apache.druid.indexing.common.task.NoopTask;
import org.apache.druid.java.util.common.StringUtils;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import java.io.File;
import java.io.IOException;
import java.io.Writer;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.util.Collections;
import java.util.Iterator;
import java.util.Map;
public class ParallelIndexPhaseRunnerTest extends AbstractParallelIndexSupervisorTaskTest
{
private File inputDir;
public ParallelIndexPhaseRunnerTest()
{
super(DEFAULT_TRANSIENT_TASK_FAILURE_RATE, DEFAULT_TRANSIENT_API_FAILURE_RATE);
}
@Before
public void setup() throws IOException
{
inputDir = temporaryFolder.newFolder("data");
// set up data
for (int i = 0; i < 5; i++) {
try (final Writer writer =
Files.newBufferedWriter(new File(inputDir, "test_" + i).toPath(), StandardCharsets.UTF_8)) {
writer.write(StringUtils.format("2017-12-%d,%d th test file\n", 24 + i, i));
writer.write(StringUtils.format("2017-12-%d,%d th test file\n", 25 + i, i));
}
}
for (int i = 0; i < 5; i++) {
try (final Writer writer =
Files.newBufferedWriter(new File(inputDir, "filtered_" + i).toPath(), StandardCharsets.UTF_8)) {
writer.write(StringUtils.format("2017-12-%d,%d th test file\n", 25 + i, i));
}
}
getObjectMapper().registerSubtypes(new NamedType(ReportingNoopTask.class, "reporting_noop"));
}
@After
public void tearDown()
{
temporaryFolder.delete();
}
@Test
public void testLargeEstimatedNumSplits() throws Exception
{
final NoopTask task = NoopTask.create();
final TaskActionClient actionClient = createActionClient(task);
final TaskToolbox toolbox = createTaskToolbox(task, actionClient);
final TestPhaseRunner runner = new TestPhaseRunner(
toolbox,
"supervisorTaskId",
"groupId",
AbstractParallelIndexSupervisorTaskTest.DEFAULT_TUNING_CONFIG_FOR_PARALLEL_INDEXING,
10,
12
);
Assert.assertEquals(TaskState.SUCCESS, runner.run());
}
@Test
public void testSmallEstimatedNumSplits() throws Exception
{
final NoopTask task = NoopTask.create();
final TaskActionClient actionClient = createActionClient(task);
final TaskToolbox toolbox = createTaskToolbox(task, actionClient);
final TestPhaseRunner runner = new TestPhaseRunner(
toolbox,
"supervisorTaskId",
"groupId",
AbstractParallelIndexSupervisorTaskTest.DEFAULT_TUNING_CONFIG_FOR_PARALLEL_INDEXING,
10,
8
);
Assert.assertEquals(TaskState.SUCCESS, runner.run());
}
private static class TestPhaseRunner extends ParallelIndexPhaseRunner<ReportingNoopTask, EmptySubTaskReport>
{
private final int actualNumSubTasks;
private final int estimatedNumSubTasks;
TestPhaseRunner(
TaskToolbox toolbox,
String supervisorTaskId,
String groupId,
ParallelIndexTuningConfig tuningConfig,
int actualNumSubTasks,
int estimatedNumSubTasks
)
{
super(
toolbox,
supervisorTaskId,
groupId,
supervisorTaskId,
tuningConfig,
Collections.emptyMap()
);
this.actualNumSubTasks = actualNumSubTasks;
this.estimatedNumSubTasks = estimatedNumSubTasks;
}
@Override
Iterator<SubTaskSpec<ReportingNoopTask>> subTaskSpecIterator()
{
return new Iterator<SubTaskSpec<ReportingNoopTask>>()
{
int subTaskCount = 0;
@Override
public boolean hasNext()
{
return subTaskCount < actualNumSubTasks;
}
@Override
public SubTaskSpec<ReportingNoopTask> next()
{
return new TestSubTaskSpec(
"specId_" + subTaskCount++,
getGroupId(),
getTaskId(),
getContext(),
new InputSplit<>(new Object()),
TestPhaseRunner.this
);
}
};
}
@Override
int estimateTotalNumSubTasks()
{
return estimatedNumSubTasks;
}
@Override
public String getName()
{
return "TestPhaseRunner";
}
}
private static class TestSubTaskSpec extends SubTaskSpec<ReportingNoopTask>
{
private final TestPhaseRunner phaseRunner;
private TestSubTaskSpec(
String id,
String groupId,
String supervisorTaskId,
Map<String, Object> context,
InputSplit inputSplit,
TestPhaseRunner phaseRunner
)
{
super(id, groupId, supervisorTaskId, context, inputSplit);
this.phaseRunner = phaseRunner;
}
@Override
public ReportingNoopTask newSubTask(int numAttempts)
{
return new ReportingNoopTask(getGroupId(), phaseRunner);
}
}
private static class EmptySubTaskReport implements SubTaskReport
{
private final String taskId;
private EmptySubTaskReport(String taskId)
{
this.taskId = taskId;
}
@Override
public String getTaskId()
{
return taskId;
}
}
private static class ReportingNoopTask extends NoopTask
{
private final TestPhaseRunner phaseRunner;
private ReportingNoopTask(String groupId, TestPhaseRunner phaseRunner)
{
super(
null,
groupId,
null,
10,
0,
null,
null,
Collections.singletonMap(AbstractParallelIndexSupervisorTaskTest.DISABLE_TASK_INJECT_CONTEXT_KEY, true)
);
this.phaseRunner = phaseRunner;
}
@Override
public TaskStatus run(TaskToolbox toolbox) throws Exception
{
final TaskStatus result = super.run(toolbox);
phaseRunner.collectReport(new EmptySubTaskReport(getId()));
return result;
}
}
}
| |
/*
* Copyright (c) 2015, tibbitts
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* * Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*/
package org.puyallupfamilyhistorycenter.service.utils;
import com.amazonaws.ClientConfiguration;
import com.amazonaws.auth.BasicAWSCredentials;
import com.amazonaws.regions.Region;
import com.amazonaws.regions.Regions;
import com.amazonaws.services.simpleemail.AmazonSimpleEmailService;
import com.amazonaws.services.simpleemail.AmazonSimpleEmailServiceClient;
import com.amazonaws.services.simpleemail.model.Body;
import com.amazonaws.services.simpleemail.model.Content;
import com.amazonaws.services.simpleemail.model.Destination;
import com.amazonaws.services.simpleemail.model.Message;
import com.amazonaws.services.simpleemail.model.RawMessage;
import com.amazonaws.services.simpleemail.model.SendEmailRequest;
import com.amazonaws.services.simpleemail.model.SendRawEmailRequest;
import com.google.common.base.Verify;
import java.io.ByteArrayOutputStream;
import java.nio.ByteBuffer;
import java.time.LocalDate;
import java.time.format.DateTimeFormatter;
import java.time.format.FormatStyle;
import java.util.Arrays;
import java.util.Collection;
import java.util.List;
import java.util.Properties;
import java.util.Set;
import java.util.UUID;
import javax.activation.DataHandler;
import javax.activation.DataSource;
import javax.activation.FileDataSource;
import javax.mail.Address;
import javax.mail.Session;
import javax.mail.internet.InternetAddress;
import javax.mail.internet.MimeBodyPart;
import javax.mail.internet.MimeMessage;
import javax.mail.internet.MimeMultipart;
import org.apache.log4j.Logger;
import org.puyallupfamilyhistorycenter.service.ApplicationProperties;
import org.puyallupfamilyhistorycenter.service.models.PersonTemple;
/**
*
* @author tibbitts
*/
public class EmailUtils {
private static final Logger logger = Logger.getLogger(EmailUtils.class);
private static final AmazonSimpleEmailService ses;
private static final Set<String> emailWhitelist;
static {
ClientConfiguration config = new ClientConfiguration();
//config.set
ses = new AmazonSimpleEmailServiceClient(new BasicAWSCredentials(ApplicationProperties.getEmailAWSAccessKey(), ApplicationProperties.getEmailAWSSecretKey()), config);
ses.setRegion(Region.getRegion(Regions.US_WEST_2));
emailWhitelist = ApplicationProperties.getEmailWhitelist();
}
public static void sendReferralEmail(String contactName, String contactEmail, String patronName, String patronEmail, String patronPhone, String patronWard, List<String> interests, String numAdults, String numChildren) {
String emailBody = buildReferralEmailBody(contactName, patronName, patronName, patronEmail, patronPhone, patronWard, LocalDate.now(), interests, numAdults, numChildren);
String subject = "Family history consultant referral for " + patronName;
sendEmail(contactName, contactEmail, new String[] { "normanse@gmail.com" }, subject, emailBody);
}
public static void sendFinalEmail(String userName, String userEmail, Collection<String> imageFiles, Iterable<String> attachmentUrls) {
String emailBody = buildFinalEmailBody(userName, null, attachmentUrls);
String subject = ApplicationProperties.getEmailSubject();
sendEmailWithAttachments(userName, userEmail, new String[] {}, subject, emailBody, imageFiles);
logger.info("Sent final email to " + userEmail);
}
protected static void sendEmail(String recipientName, String recipientEmail, String[] ccList, String subjectString, String bodyString) {
Content subject = new Content(subjectString);
Body body = new Body();
Content bodyContent = new Content(bodyString);
body.setHtml(bodyContent);
Message message = new Message(subject, body);
SendEmailRequest request = new SendEmailRequest();
request.setMessage(message);
request.setSource("admin@puyallupfamilyhistorycenter.org");
request.setDestination(new Destination(Arrays.asList("\"" + recipientName + "\" <" + recipientEmail + ">")).withCcAddresses(Arrays.asList(ccList)));
ses.sendEmail(request);
}
protected static void sendEmailWithAttachments(String recipientName, String recipientEmail, String[] ccList, String subjectString, String bodyString, Collection<String> attachments) {
try {
Verify.verifyNotNull(recipientEmail, "Null email address");
Session session = Session.getDefaultInstance(new Properties());
MimeMessage message = new MimeMessage(session);
message.setSubject(subjectString, "UTF-8");
message.setFrom(new InternetAddress("admin@puyallupfamilyhistorycenter.org"));
message.setReplyTo(new Address[]{new InternetAddress("admin@puyallupfamilyhistorycenter.org")});
message.setRecipients(javax.mail.Message.RecipientType.TO, InternetAddress.parse(recipientEmail));
//TODO: Handle CC?
// Cover wrap
MimeBodyPart wrap = new MimeBodyPart();
// Alternative TEXT/HTML content
MimeMultipart cover = new MimeMultipart("alternative");
MimeBodyPart html = new MimeBodyPart();
cover.addBodyPart(html);
wrap.setContent(cover);
MimeMultipart content = new MimeMultipart("related");
message.setContent(content);
content.addBodyPart(wrap);
// This is just for testing HTML embedding of different type of attachments.
StringBuilder sb = new StringBuilder();
if (attachments != null) {
for (String attachmentFileName : attachments) {
String id = UUID.randomUUID().toString();
sb.append("<img src=\"cid:");
sb.append(id);
sb.append("\" alt=\"ATTACHMENT\"/>\n");
MimeBodyPart attachment = new MimeBodyPart();
DataSource fds = new FileDataSource(attachmentFileName);
attachment.setDataHandler(new DataHandler(fds));
attachment.setHeader("Content-ID", "<" + id + ">");
attachment.setFileName(fds.getName());
content.addBodyPart(attachment);
}
}
html.setContent("<html><body><img style=\"width:100%\" src=\"http://www.puyallupfamilyhistorycenter.org/uploads/4/8/2/9/4829765/1433113473.png?\" alt=\"The Puyllup Family History Center\" />" + bodyString + "</body></html>", "text/html");
// Send the email.
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
message.writeTo(outputStream);
RawMessage rawMessage = new RawMessage(ByteBuffer.wrap(outputStream.toByteArray()));
SendRawEmailRequest rawEmailRequest = new SendRawEmailRequest(rawMessage);
ses.sendRawEmail(rawEmailRequest);
} catch (Exception ex) {
throw new IllegalStateException("Failed to send email to " + recipientEmail, ex);
}
}
protected static final DateTimeFormatter dateFormat = DateTimeFormatter.ofLocalizedDate(FormatStyle.LONG);
protected static final List<String> defaultInterest = Arrays.asList("family history");
static String buildReferralEmailBody(String contactName, String patronFullName, String patronShortName, String patronEmail, String patronPhone, String patronWard, LocalDate visitDate, List<String> interests, String numAdults, String numChildren) {
String uuid = UUID.nameUUIDFromBytes(patronEmail.getBytes()).toString();
StringBuilder builder = new StringBuilder("<html><head></head><body>");
builder.append("<img style=\"width:100%\" src=\"http://www.puyallupfamilyhistorycenter.org/uploads/4/8/2/9/4829765/1433113473.png?").append(uuid).append("\" alt=\"The Puyllup Family History Center\" />");
builder.append("<h3>Dear ").append(contactName).append(",</h3>");
builder.append("<p><strong>").append(patronFullName).append("</strong> from the ").append(patronWard)
.append(" visited the Discovery room at the Puyallup Stake Family History Center on ")
.append(dateFormat.format(visitDate)).append(" with a party of ")
.append(numAdults).append(" adults and ")
.append(numChildren).append(" children.</p>");
if (interests == null || interests.isEmpty()) {
interests = defaultInterest;
}
builder.append("<p>").append(patronShortName).append(" expressed interest in learning more about");
for (int i = 0; i < interests.size(); i++) {
if (i == interests.size() - 1 && interests.size() > 1) {
builder.append(" and");
} else if (i != 0 && interests.size() > 2) {
builder.append(",");
}
builder.append(" <strong>").append(interests.get(i)).append("</strong>");
}
builder.append(".</p>");
builder.append("<p>Please schedule a time for them to meet with a family history consultant ")
.append("so they can learn more about how they can be involved in family history work.</p>");
builder.append("<dt>Email:</dt><dd><a href=\"mailto:").append(patronEmail).append("\">").append(patronEmail).append("</a></dd></dl>");
builder.append("<dt>Phone:</dt><dd><a href=\"tel:").append(patronPhone).append("\">").append(patronPhone).append("</a></dd></dl>");
builder.append("<p>Thank you for your assistance; we appreciate it.</p>");
builder.append("<p>The staff at the Puyallup Stake Family History Center</p>");
builder.append("</body></html>");
return builder.toString();
}
protected static String buildFinalEmailBody(String personName, Iterable<PersonTemple> prospects, Iterable<String> attachmentUrls) {
StringBuilder builder = new StringBuilder();
//TODO: Make this configurable
builder.append("<p>")
.append(ApplicationProperties.getEmailSalutation().replaceAll("\n", "</p><p>").replaceAll("\\$\\{personName\\}", personName))
.append("</p>")
.append("<p>")
.append(ApplicationProperties.getEmailBody().replaceAll("\n", "</p><p>"))
.append("</p>");
if (prospects != null) {
boolean firstProspect = true;
for (PersonTemple prospect : prospects) {
if (firstProspect) {
builder.append("<p>")
.append(ApplicationProperties.getEmailProspectsExplanation().replaceAll("\n", "</p><p>"))
.append("</p><ul>");
firstProspect = false;
}
builder.append("<li><a href='https://familysearch.org/tree/#view=ancestor&person=")
.append(prospect.id)
.append("'>")
.append(prospect.name)
.append("</a></li>");
}
if (!firstProspect) {
builder.append("</ul>");
}
}
builder.append("<p>")
.append(ApplicationProperties.getEmailSignature().replaceAll("\n", "</p><p>"))
.append("</p>");
attachmentUrls.spliterator().forEachRemaining(url -> {
builder.append("<a href='").append(url).append("'><img style='padding: 5px; max-width: 200px' alt='Attachment' src='").append(url).append("' /></a>");
});
return builder.toString();
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.pinterest.secor.common;
import com.google.api.client.repackaged.com.google.common.base.Strings;
import org.apache.commons.configuration.ConfigurationException;
import org.apache.commons.configuration.PropertiesConfiguration;
import org.apache.commons.lang.StringUtils;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import java.util.Properties;
import java.util.TimeZone;
/**
* One-stop shop for Secor configuration options.
*
* @author Pawel Garbacki (pawel@pinterest.com)
*/
public class SecorConfig {
private final PropertiesConfiguration mProperties;
private static final ThreadLocal<SecorConfig> mSecorConfig = new ThreadLocal<SecorConfig>() {
@Override
protected SecorConfig initialValue() {
// Load the default configuration file first
Properties systemProperties = System.getProperties();
String configProperty = systemProperties.getProperty("config");
PropertiesConfiguration properties;
try {
properties = new PropertiesConfiguration(configProperty);
} catch (ConfigurationException e) {
throw new RuntimeException("Error loading configuration from " + configProperty);
}
for (final Map.Entry<Object, Object> entry : systemProperties.entrySet()) {
properties.setProperty(entry.getKey().toString(), entry.getValue());
}
return new SecorConfig(properties);
}
};
public static SecorConfig load() throws ConfigurationException {
return mSecorConfig.get();
}
/**
* Exposed for testability
*
* @param properties
*/
public SecorConfig(PropertiesConfiguration properties) {
mProperties = properties;
}
public String getKafkaSeedBrokerHost() {
return getString("kafka.seed.broker.host");
}
public int getKafkaSeedBrokerPort() {
return getInt("kafka.seed.broker.port");
}
public String getKafkaZookeeperPath() {
return getString("kafka.zookeeper.path");
}
public String getZookeeperQuorum() {
return StringUtils.join(getStringArray("zookeeper.quorum"), ',');
}
public int getConsumerTimeoutMs() {
return getInt("kafka.consumer.timeout.ms");
}
public String getPartitionAssignmentStrategy() {
return getString("kafka.partition.assignment.strategy");
}
public String getRebalanceMaxRetries() {
return getString("kafka.rebalance.max.retries");
}
public String getRebalanceBackoffMs() {
return getString("kafka.rebalance.backoff.ms");
}
public String getFetchMessageMaxBytes() {
return getString("kafka.fetch.message.max.bytes");
}
public String getSocketReceiveBufferBytes() {
return getString("kafka.socket.receive.buffer.bytes");
}
public String getFetchMinBytes() {
return getString("kafka.fetch.min.bytes");
}
public String getFetchWaitMaxMs() {
return getString("kafka.fetch.wait.max.ms");
}
public String getDualCommitEnabled() {
return getString("kafka.dual.commit.enabled");
}
public String getOffsetsStorage() {
return getString("kafka.offsets.storage");
}
public int getGeneration() {
return getInt("secor.generation");
}
public int getConsumerThreads() {
return getInt("secor.consumer.threads");
}
public long getMaxFileSizeBytes() {
return getLong("secor.max.file.size.bytes");
}
public long getMaxFileAgeSeconds() {
return getLong("secor.max.file.age.seconds");
}
public boolean getFileAgeYoungest() {
return getBoolean("secor.file.age.youngest");
}
public long getOffsetsPerPartition() {
return getLong("secor.offsets.per.partition");
}
public int getMessagesPerSecond() {
return getInt("secor.messages.per.second");
}
public String getS3FileSystem() { return getString("secor.s3.filesystem"); }
public boolean getSeparateContainersForTopics() {
return getString("secor.swift.containers.for.each.topic").toLowerCase().equals("true");
}
public String getSwiftContainer() {
return getString("secor.swift.container");
}
public String getSwiftPath() {
return getString("secor.swift.path");
}
public String getS3Bucket() {
return getString("secor.s3.bucket");
}
public String getS3Path() {
return getString("secor.s3.path");
}
public String getS3AlternativePath() {
return getString("secor.s3.alternative.path");
}
public String getS3AlterPathDate() {
return getString("secor.s3.alter.path.date");
}
public String getS3Prefix() {
return getS3FileSystem() + "://" + getS3Bucket() + "/" + getS3Path();
}
public String getLocalPath() {
return getString("secor.local.path");
}
public String getKafkaTopicFilter() {
return getString("secor.kafka.topic_filter");
}
public String getKafkaTopicBlacklist() {
return getString("secor.kafka.topic_blacklist");
}
public String getKafkaTopicUploadAtMinuteMarkFilter() { return getString("secor.kafka.upload_at_minute_mark.topic_filter");}
public int getUploadMinuteMark(){ return getInt("secor.upload.minute_mark");}
public String getKafkaGroup() {
return getString("secor.kafka.group");
}
public int getZookeeperSessionTimeoutMs() {
return getInt("zookeeper.session.timeout.ms");
}
public int getZookeeperSyncTimeMs() {
return getInt("zookeeper.sync.time.ms");
}
public String getMessageParserClass() {
return getString("secor.message.parser.class");
}
public String getUploaderClass() {
return getString("secor.upload.class", "com.pinterest.secor.uploader.Uploader");
}
public String getUploadManagerClass() {
return getString("secor.upload.manager.class");
}
public String getMessageTransformerClass(){
return getString("secor.message.transformer.class");
}
public int getTopicPartitionForgetSeconds() {
return getInt("secor.topic_partition.forget.seconds");
}
public int getLocalLogDeleteAgeHours() {
return getInt("secor.local.log.delete.age.hours");
}
public String getFileExtension() {
return getString("secor.file.extension");
}
public int getOstrichPort() {
return getInt("ostrich.port");
}
public String getCloudService() {
return getString("cloud.service");
}
public String getAwsAccessKey() {
return getString("aws.access.key");
}
public String getAwsSecretKey() {
return getString("aws.secret.key");
}
public String getAwsEndpoint() {
return getString("aws.endpoint");
}
public String getAwsRole() {
return getString("aws.role");
}
public boolean getAwsProxyEnabled(){
return getBoolean("aws.proxy.isEnabled");
}
public String getAwsProxyHttpHost() {
return getString("aws.proxy.http.host");
}
public int getAwsProxyHttpPort() {
return getInt("aws.proxy.http.port");
}
public String getAwsRegion() {
return getString("aws.region");
}
public String getAwsSseType() {
return getString("aws.sse.type");
}
public String getAwsSseKmsKey() {
return getString("aws.sse.kms.key");
}
public String getAwsSseCustomerKey() {
return getString("aws.sse.customer.key");
}
public String getSwiftTenant() {
return getString("swift.tenant");
}
public String getSwiftUsername() {
return getString("swift.username");
}
public String getSwiftPassword() {
return getString("swift.password");
}
public String getSwiftAuthUrl() {
return getString("swift.auth.url");
}
public String getSwiftPublic() {
return getString("swift.public");
}
public String getSwiftPort() {
return getString("swift.port");
}
public String getSwiftGetAuth() {
return getString("swift.use.get.auth");
}
public String getSwiftApiKey() {
return getString("swift.api.key");
}
public String getQuboleApiToken() {
return getString("qubole.api.token");
}
public String getTsdbHostport() {
return getString("tsdb.hostport");
}
public String getStatsDHostPort() {
return getString("statsd.hostport");
}
public String getMonitoringBlacklistTopics() {
return getString("monitoring.blacklist.topics");
}
public String getMonitoringPrefix() {
return getString("monitoring.prefix");
}
public String getMessageTimestampName() {
return getString("message.timestamp.name");
}
public String getMessageTimestampNameSeparator() {
return getString("message.timestamp.name.separator");
}
public int getMessageTimestampId() {
return getInt("message.timestamp.id");
}
public String getMessageTimestampType() {
return getString("message.timestamp.type");
}
public String getMessageTimestampInputPattern() {
return getString("message.timestamp.input.pattern");
}
public boolean isMessageTimestampRequired() {
return mProperties.getBoolean("message.timestamp.required");
}
public int getFinalizerLookbackPeriods() {
return getInt("secor.finalizer.lookback.periods", 10);
}
public String getHivePrefix() {
return getString("secor.hive.prefix");
}
public String getHiveTableName(String topic) {
String key = "secor.hive.table.name." + topic;
return mProperties.getString(key, null);
}
public boolean getQuboleEnabled() {
return getBoolean("secor.enable.qubole");
}
public long getQuboleTimeoutMs() {
return getLong("secor.qubole.timeout.ms");
}
public String getCompressionCodec() {
return getString("secor.compression.codec");
}
public int getMaxMessageSizeBytes() {
return getInt("secor.max.message.size.bytes");
}
public String getFileReaderWriterFactory() {
return getString("secor.file.reader.writer.factory");
}
public String getPerfTestTopicPrefix() {
return getString("secor.kafka.perf_topic_prefix");
}
public String getZookeeperPath() {
return getString("secor.zookeeper.path");
}
public String getGsCredentialsPath() {
return getString("secor.gs.credentials.path");
}
public String getGsBucket() {
return getString("secor.gs.bucket");
}
public String getGsPath() {
return getString("secor.gs.path");
}
public int getGsConnectTimeoutInMs() {
return getInt("secor.gs.connect.timeout.ms", 3 * 60000);
}
public int getGsReadTimeoutInMs() {
return getInt("secor.gs.read.timeout.ms", 3 * 60000);
}
public boolean getGsDirectUpload() {
return getBoolean("secor.gs.upload.direct");
}
public int getFinalizerDelaySeconds() {
return getInt("partitioner.finalizer.delay.seconds");
}
public boolean getS3MD5HashPrefix() {
return getBoolean("secor.s3.prefix.md5hash");
}
public String getAzureEndpointsProtocol() { return getString("secor.azure.endpoints.protocol"); }
public String getAzureAccountName() { return getString("secor.azure.account.name"); }
public String getAzureAccountKey() { return getString("secor.azure.account.key"); }
public String getAzureContainer() { return getString("secor.azure.container.name"); }
public String getAzurePath() { return getString("secor.azure.path"); }
public Map<String, String> getProtobufMessageClassPerTopic() {
String prefix = "secor.protobuf.message.class";
Iterator<String> keys = mProperties.getKeys(prefix);
Map<String, String> protobufClasses = new HashMap<String, String>();
while (keys.hasNext()) {
String key = keys.next();
String className = mProperties.getString(key);
protobufClasses.put(key.substring(prefix.length() + 1), className);
}
return protobufClasses;
}
public TimeZone getTimeZone() {
String timezone = getString("secor.parser.timezone");
return Strings.isNullOrEmpty(timezone) ? TimeZone.getTimeZone("UTC") : TimeZone.getTimeZone(timezone);
}
public boolean getBoolean(String name, boolean defaultValue) {
return mProperties.getBoolean(name, defaultValue);
}
public boolean getBoolean(String name) {
return mProperties.getBoolean(name);
}
public void checkProperty(String name) {
if (!mProperties.containsKey(name)) {
throw new RuntimeException("Failed to find required configuration option '" +
name + "'.");
}
}
public String getString(String name) {
checkProperty(name);
return mProperties.getString(name);
}
public String getString(String name, String defaultValue) {
return mProperties.getString(name, defaultValue);
}
public int getInt(String name) {
checkProperty(name);
return mProperties.getInt(name);
}
public int getInt(String name, int defaultValue) {
return mProperties.getInt(name, defaultValue);
}
public long getLong(String name) {
return mProperties.getLong(name);
}
public String[] getStringArray(String name) {
return mProperties.getStringArray(name);
}
}
| |
package io.dropwizard.testing.common;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.jaxrs.json.JacksonJsonProvider;
import io.dropwizard.jackson.Jackson;
import io.dropwizard.jersey.validation.Validators;
import io.dropwizard.logging.BootstrapLogging;
import io.dropwizard.testing.junit5.ResourceExtension;
import org.glassfish.jersey.client.ClientConfig;
import org.glassfish.jersey.servlet.ServletProperties;
import org.glassfish.jersey.test.DeploymentContext;
import org.glassfish.jersey.test.JerseyTest;
import org.glassfish.jersey.test.ServletDeploymentContext;
import org.glassfish.jersey.test.inmemory.InMemoryTestContainerFactory;
import org.glassfish.jersey.test.spi.TestContainerFactory;
import javax.annotation.Nullable;
import javax.validation.Validator;
import javax.ws.rs.client.Client;
import javax.ws.rs.client.WebTarget;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import java.util.function.Consumer;
import static java.util.Objects.requireNonNull;
public class Resource {
static {
BootstrapLogging.bootstrap();
}
/**
* A {@link Resource} builder which enables configuration of a Jersey testing environment.
*/
@SuppressWarnings("unchecked")
public static class Builder<B extends Builder<B>> {
private final Set<Object> singletons = new HashSet<>();
private final Set<Class<?>> providers = new HashSet<>();
private final Map<String, Object> properties = new HashMap<>();
private ObjectMapper mapper = Jackson.newObjectMapper();
private Validator validator = Validators.newValidator();
private Consumer<ClientConfig> clientConfigurator = c -> {
};
private TestContainerFactory testContainerFactory = new InMemoryTestContainerFactory();
private boolean registerDefaultExceptionMappers = true;
public B setMapper(ObjectMapper mapper) {
this.mapper = mapper;
return (B) this;
}
public B setValidator(Validator validator) {
this.validator = validator;
return (B) this;
}
public B setClientConfigurator(Consumer<ClientConfig> clientConfigurator) {
this.clientConfigurator = clientConfigurator;
return (B) this;
}
public B addResource(Object resource) {
singletons.add(resource);
return (B) this;
}
public B addProvider(Class<?> klass) {
providers.add(klass);
return (B) this;
}
public B addProvider(Object provider) {
singletons.add(provider);
return (B) this;
}
public B addProperty(String property, Object value) {
properties.put(property, value);
return (B) this;
}
public B setTestContainerFactory(TestContainerFactory factory) {
this.testContainerFactory = factory;
return (B) this;
}
public B setRegisterDefaultExceptionMappers(boolean value) {
registerDefaultExceptionMappers = value;
return (B) this;
}
/**
* Builds a {@link Resource} with a configured Jersey testing environment.
*
* @return a new {@link Resource}
*/
protected Resource buildResource() {
return new Resource(new ResourceTestJerseyConfiguration(
singletons, providers, properties, mapper, validator,
clientConfigurator, testContainerFactory, registerDefaultExceptionMappers));
}
}
/**
* Creates a new Jersey testing environment builder for {@link ResourceExtension}
*
* @return a new {@link ResourceExtension.Builder}
*/
public static ResourceExtension.Builder builder() {
return new ResourceExtension.Builder();
}
private ResourceTestJerseyConfiguration configuration;
@Nullable
private JerseyTest test;
private Resource(ResourceTestJerseyConfiguration configuration) {
this.configuration = configuration;
}
public Validator getValidator() {
return configuration.validator;
}
public ObjectMapper getObjectMapper() {
return configuration.mapper;
}
public Consumer<ClientConfig> getClientConfigurator() {
return configuration.clientConfigurator;
}
/**
* Creates a web target to be sent to the resource under testing.
*
* @param path relative path (from tested application base URI) this web target should point to.
* @return the created JAX-RS web target.
*/
public WebTarget target(String path) {
return getJerseyTest().target(path);
}
/**
* Returns the pre-configured {@link Client} for this test. For sending
* requests prefer {@link #target(String)}
*
* @return the {@link JerseyTest} configured {@link Client}
*/
public Client client() {
return getJerseyTest().client();
}
/**
* Returns the underlying {@link JerseyTest}. For sending requests prefer
* {@link #target(String)}.
*
* @return the underlying {@link JerseyTest}
*/
public JerseyTest getJerseyTest() {
return requireNonNull(test);
}
public void before() throws Throwable {
DropwizardTestResourceConfig.CONFIGURATION_REGISTRY.put(configuration.getId(), configuration);
test = new JerseyTest() {
@Override
protected TestContainerFactory getTestContainerFactory() {
return configuration.testContainerFactory;
}
@Override
protected DeploymentContext configureDeployment() {
return ServletDeploymentContext.builder(new DropwizardTestResourceConfig(configuration))
.initParam(ServletProperties.JAXRS_APPLICATION_CLASS,
DropwizardTestResourceConfig.class.getName())
.initParam(DropwizardTestResourceConfig.CONFIGURATION_ID, configuration.getId())
.build();
}
@Override
protected void configureClient(ClientConfig clientConfig) {
final JacksonJsonProvider jsonProvider = new JacksonJsonProvider();
jsonProvider.setMapper(configuration.mapper);
configuration.clientConfigurator.accept(clientConfig);
clientConfig.register(jsonProvider);
}
};
test.setUp();
}
public void after() throws Throwable {
DropwizardTestResourceConfig.CONFIGURATION_REGISTRY.remove(configuration.getId());
requireNonNull(test).tearDown();
}
}
| |
/*
* Copyright (c) 2004-2022, University of Oslo
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* Neither the name of the HISP project nor the names of its contributors may
* be used to endorse or promote products derived from this software without
* specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
* ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.hisp.dhis.program;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import org.hisp.dhis.audit.AuditAttribute;
import org.hisp.dhis.audit.AuditScope;
import org.hisp.dhis.audit.Auditable;
import org.hisp.dhis.common.BaseIdentifiableObject;
import org.hisp.dhis.common.DxfNamespaces;
import org.hisp.dhis.common.SoftDeletableObject;
import org.hisp.dhis.event.EventStatus;
import org.hisp.dhis.message.MessageConversation;
import org.hisp.dhis.organisationunit.OrganisationUnit;
import org.hisp.dhis.relationship.RelationshipItem;
import org.hisp.dhis.trackedentity.TrackedEntityInstance;
import org.hisp.dhis.trackedentitycomment.TrackedEntityComment;
import org.locationtech.jts.geom.Geometry;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.databind.annotation.JsonSerialize;
import com.fasterxml.jackson.dataformat.xml.annotation.JacksonXmlElementWrapper;
import com.fasterxml.jackson.dataformat.xml.annotation.JacksonXmlProperty;
import com.fasterxml.jackson.dataformat.xml.annotation.JacksonXmlRootElement;
/**
* @author Abyot Asalefew
*/
@Auditable( scope = AuditScope.TRACKER )
@JacksonXmlRootElement( localName = "programInstance", namespace = DxfNamespaces.DXF_2_0 )
public class ProgramInstance
extends SoftDeletableObject
{
private Date createdAtClient;
private Date lastUpdatedAtClient;
private ProgramStatus status = ProgramStatus.ACTIVE;
@AuditAttribute
private OrganisationUnit organisationUnit;
private Date incidentDate;
private Date enrollmentDate;
private Date endDate;
private UserInfoSnapshot createdByUserInfo;
private UserInfoSnapshot lastUpdatedByUserInfo;
@AuditAttribute
private TrackedEntityInstance entityInstance;
@AuditAttribute
private Program program;
private Set<ProgramStageInstance> programStageInstances = new HashSet<>();
private Set<RelationshipItem> relationshipItems = new HashSet<>();
private List<MessageConversation> messageConversations = new ArrayList<>();
private Boolean followup = false;
private List<TrackedEntityComment> comments = new ArrayList<>();
private String completedBy;
private Geometry geometry;
private String storedBy;
// -------------------------------------------------------------------------
// Constructors
// -------------------------------------------------------------------------
public ProgramInstance()
{
}
public ProgramInstance( Date enrollmentDate, Date incidentDate, TrackedEntityInstance entityInstance,
Program program )
{
this.enrollmentDate = enrollmentDate;
this.incidentDate = incidentDate;
this.entityInstance = entityInstance;
this.program = program;
}
public ProgramInstance( Program program, TrackedEntityInstance entityInstance, OrganisationUnit organisationUnit )
{
this.program = program;
this.entityInstance = entityInstance;
this.organisationUnit = organisationUnit;
}
@Override
public void setAutoFields()
{
super.setAutoFields();
if ( createdAtClient == null )
{
createdAtClient = created;
}
lastUpdatedAtClient = lastUpdated;
}
// -------------------------------------------------------------------------
// Logic
// -------------------------------------------------------------------------
/**
* Updated the bi-directional associations between this program instance and
* the given entity instance and program.
*
* @param entityInstance the entity instance to enroll.
* @param program the program to enroll the entity instance to.
*/
public void enrollTrackedEntityInstance( TrackedEntityInstance entityInstance, Program program )
{
setEntityInstance( entityInstance );
entityInstance.getProgramInstances().add( this );
setProgram( program );
}
public boolean isCompleted()
{
return this.status == ProgramStatus.COMPLETED;
}
public ProgramStageInstance getProgramStageInstanceByStage( int stage )
{
int count = 1;
for ( ProgramStageInstance programInstanceStage : programStageInstances )
{
if ( count == stage )
{
return programInstanceStage;
}
count++;
}
return null;
}
public ProgramStageInstance getActiveProgramStageInstance()
{
for ( ProgramStageInstance programStageInstance : programStageInstances )
{
if ( programStageInstance.getProgramStage().getOpenAfterEnrollment()
&& !programStageInstance.isCompleted()
&& (programStageInstance.getStatus() != null
&& programStageInstance.getStatus() != EventStatus.SKIPPED) )
{
return programStageInstance;
}
}
for ( ProgramStageInstance programStageInstance : programStageInstances )
{
if ( !programStageInstance.isCompleted()
&& (programStageInstance.getStatus() != null
&& programStageInstance.getStatus() != EventStatus.SKIPPED) )
{
return programStageInstance;
}
}
return null;
}
public boolean hasActiveProgramStageInstance( ProgramStage programStage )
{
for ( ProgramStageInstance programStageInstance : programStageInstances )
{
if ( !programStageInstance.isDeleted()
&& programStageInstance.getProgramStage().getUid().equalsIgnoreCase( programStage.getUid() )
&& programStageInstance.getStatus() == EventStatus.ACTIVE )
{
return true;
}
}
return false;
}
// -------------------------------------------------------------------------
// equals and hashCode
// -------------------------------------------------------------------------
@Override
public int hashCode()
{
final int prime = 31;
int result = super.hashCode();
result = prime * result + ((incidentDate == null) ? 0 : incidentDate.hashCode());
result = prime * result + ((enrollmentDate == null) ? 0 : enrollmentDate.hashCode());
result = prime * result + ((entityInstance == null) ? 0 : entityInstance.hashCode());
result = prime * result + ((program == null) ? 0 : program.hashCode());
return result;
}
@Override
public boolean equals( Object object )
{
if ( this == object )
{
return true;
}
if ( object == null )
{
return false;
}
if ( !getClass().isAssignableFrom( object.getClass() ) )
{
return false;
}
final ProgramInstance other = (ProgramInstance) object;
if ( incidentDate == null )
{
if ( other.incidentDate != null )
{
return false;
}
}
else if ( !incidentDate.equals( other.incidentDate ) )
{
return false;
}
if ( enrollmentDate == null )
{
if ( other.enrollmentDate != null )
{
return false;
}
}
else if ( !enrollmentDate.equals( other.enrollmentDate ) )
{
return false;
}
if ( entityInstance == null )
{
if ( other.entityInstance != null )
{
return false;
}
}
else if ( !entityInstance.equals( other.entityInstance ) )
{
return false;
}
if ( program == null )
{
if ( other.program != null )
{
return false;
}
}
else if ( !program.equals( other.program ) )
{
return false;
}
return true;
}
// -------------------------------------------------------------------------
// Getters and setters
// -------------------------------------------------------------------------
@JsonProperty
@JacksonXmlProperty( namespace = DxfNamespaces.DXF_2_0 )
public Date getCreatedAtClient()
{
return createdAtClient;
}
public void setCreatedAtClient( Date createdAtClient )
{
this.createdAtClient = createdAtClient;
}
@JsonProperty
@JacksonXmlProperty( namespace = DxfNamespaces.DXF_2_0 )
public Date getLastUpdatedAtClient()
{
return lastUpdatedAtClient;
}
public void setLastUpdatedAtClient( Date lastUpdatedAtClient )
{
this.lastUpdatedAtClient = lastUpdatedAtClient;
}
@JsonProperty
@JsonSerialize( as = BaseIdentifiableObject.class )
@JacksonXmlProperty( namespace = DxfNamespaces.DXF_2_0 )
public OrganisationUnit getOrganisationUnit()
{
return organisationUnit;
}
public ProgramInstance setOrganisationUnit( OrganisationUnit organisationUnit )
{
this.organisationUnit = organisationUnit;
return this;
}
@JsonProperty
@JacksonXmlProperty( namespace = DxfNamespaces.DXF_2_0 )
public Date getIncidentDate()
{
return incidentDate;
}
public void setIncidentDate( Date incidentDate )
{
this.incidentDate = incidentDate;
}
@JsonProperty
@JacksonXmlProperty( namespace = DxfNamespaces.DXF_2_0 )
public Date getEnrollmentDate()
{
return enrollmentDate;
}
public void setEnrollmentDate( Date enrollmentDate )
{
this.enrollmentDate = enrollmentDate;
}
@JsonProperty
@JacksonXmlProperty( namespace = DxfNamespaces.DXF_2_0 )
public Date getEndDate()
{
return endDate;
}
public void setEndDate( Date endDate )
{
this.endDate = endDate;
}
@JsonProperty
@JacksonXmlProperty( namespace = DxfNamespaces.DXF_2_0 )
public UserInfoSnapshot getCreatedByUserInfo()
{
return createdByUserInfo;
}
public void setCreatedByUserInfo( UserInfoSnapshot createdByUserInfo )
{
this.createdByUserInfo = createdByUserInfo;
}
@JsonProperty
@JacksonXmlProperty( namespace = DxfNamespaces.DXF_2_0 )
public UserInfoSnapshot getLastUpdatedByUserInfo()
{
return lastUpdatedByUserInfo;
}
public void setLastUpdatedByUserInfo( UserInfoSnapshot lastUpdatedByUserInfo )
{
this.lastUpdatedByUserInfo = lastUpdatedByUserInfo;
}
@JsonProperty
@JacksonXmlProperty( namespace = DxfNamespaces.DXF_2_0 )
public ProgramStatus getStatus()
{
return status;
}
public void setStatus( ProgramStatus status )
{
this.status = status;
}
@JsonProperty( "trackedEntityInstance" )
@JsonSerialize( as = BaseIdentifiableObject.class )
@JacksonXmlProperty( localName = "trackedEntityInstance", namespace = DxfNamespaces.DXF_2_0 )
public TrackedEntityInstance getEntityInstance()
{
return entityInstance;
}
public void setEntityInstance( TrackedEntityInstance entityInstance )
{
this.entityInstance = entityInstance;
}
@JsonProperty
@JsonSerialize( as = BaseIdentifiableObject.class )
@JacksonXmlProperty( namespace = DxfNamespaces.DXF_2_0 )
public Program getProgram()
{
return program;
}
public void setProgram( Program program )
{
this.program = program;
}
@JsonProperty
@JacksonXmlElementWrapper( localName = "programStageInstances", namespace = DxfNamespaces.DXF_2_0 )
@JacksonXmlProperty( localName = "programStageInstance", namespace = DxfNamespaces.DXF_2_0 )
public Set<ProgramStageInstance> getProgramStageInstances()
{
return programStageInstances;
}
public void setProgramStageInstances( Set<ProgramStageInstance> programStageInstances )
{
this.programStageInstances = programStageInstances;
}
@JsonProperty
@JacksonXmlProperty( namespace = DxfNamespaces.DXF_2_0 )
public Boolean getFollowup()
{
return followup;
}
public void setFollowup( Boolean followup )
{
this.followup = followup;
}
@JsonProperty
@JacksonXmlElementWrapper( localName = "messageConversations", namespace = DxfNamespaces.DXF_2_0 )
@JacksonXmlProperty( localName = "messageConversation", namespace = DxfNamespaces.DXF_2_0 )
public List<MessageConversation> getMessageConversations()
{
return messageConversations;
}
public void setMessageConversations( List<MessageConversation> messageConversations )
{
this.messageConversations = messageConversations;
}
@JsonProperty( "trackedEntityComments" )
@JacksonXmlElementWrapper( localName = "trackedEntityComments", namespace = DxfNamespaces.DXF_2_0 )
@JacksonXmlProperty( localName = "trackedEntityComment", namespace = DxfNamespaces.DXF_2_0 )
public List<TrackedEntityComment> getComments()
{
return comments;
}
public void setComments( List<TrackedEntityComment> comments )
{
this.comments = comments;
}
@JsonProperty
@JacksonXmlProperty( namespace = DxfNamespaces.DXF_2_0 )
public String getCompletedBy()
{
return completedBy;
}
public void setCompletedBy( String completedBy )
{
this.completedBy = completedBy;
}
@JsonProperty
@JacksonXmlProperty( namespace = DxfNamespaces.DXF_2_0 )
public Geometry getGeometry()
{
return geometry;
}
public void setGeometry( Geometry geometry )
{
this.geometry = geometry;
}
@JsonProperty
@JacksonXmlProperty( namespace = DxfNamespaces.DXF_2_0 )
public String getStoredBy()
{
return storedBy;
}
public void setStoredBy( String storedBy )
{
this.storedBy = storedBy;
}
@JsonProperty
@JacksonXmlElementWrapper( localName = "relationshipItems", namespace = DxfNamespaces.DXF_2_0 )
@JacksonXmlProperty( localName = "relationshipItem", namespace = DxfNamespaces.DXF_2_0 )
public Set<RelationshipItem> getRelationshipItems()
{
return relationshipItems;
}
public void setRelationshipItems( Set<RelationshipItem> relationshipItems )
{
this.relationshipItems = relationshipItems;
}
@Override
public String toString()
{
return "ProgramInstance{" +
"id=" + id +
", uid='" + uid + '\'' +
", code='" + code + '\'' +
", name='" + name + '\'' +
", created=" + created +
", lastUpdated=" + lastUpdated +
", status=" + status +
", organisationUnit=" + (organisationUnit != null ? organisationUnit.getUid() : "null") +
", incidentDate=" + incidentDate +
", enrollmentDate=" + enrollmentDate +
", entityInstance=" + (entityInstance != null ? entityInstance.getUid() : "null") +
", program=" + program +
", deleted=" + isDeleted() +
", storedBy='" + storedBy + '\'' +
'}';
}
}
| |
/*
Copyright 2013 Industrie IT Pty Ltd
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package com.industrieit.ohr;
import com.industrieit.ohr.array.LongInlineOHRArray;
import com.industrieit.ohr.array.OHRLongArray;
import com.industrieit.ohr.array.BooleanInlineOHRArray;
import com.industrieit.ohr.array.OHRBooleanArray;
import com.industrieit.ohr.array.ByteInlineOHRArray;
import com.industrieit.ohr.array.OHRByteArray;
import com.industrieit.ohr.array.DoubleInlineOHRArray;
import com.industrieit.ohr.array.OHRDoubleArray;
import com.industrieit.ohr.array.FloatInlineOHRArray;
import com.industrieit.ohr.array.OHRFloatArray;
import com.industrieit.ohr.array.IntInlineOHRArray;
import com.industrieit.ohr.array.IntInlineOHRArrayUnchecked;
import com.industrieit.ohr.array.OHRIntArray;
import com.industrieit.ohr.array.ShortInlineOHRArray;
import com.industrieit.ohr.array.OHRShortArray;
import java.beans.BeanInfo;
import java.beans.Introspector;
import java.beans.PropertyDescriptor;
import java.io.Externalizable;
import javassist.CtClass;
import javassist.CtMethod;
import java.io.File;
import java.io.IOException;
import java.lang.reflect.Field;
import java.lang.reflect.Method;
import java.lang.reflect.Modifier;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.IdentityHashMap;
import java.util.List;
import java.util.StringTokenizer;
import javassist.CannotCompileException;
import static javassist.ClassPool.getDefault;
import javassist.CtConstructor;
import javassist.CtField;
import javassist.CtNewMethod;
import javassist.NotFoundException;
import org.apache.commons.io.IOUtils;
import sun.misc.Unsafe;
public class OHRJavassister {
private static Class[] cls = new Class[1000];
private static int clcounter = 33;
//private static int clincrement=133;
private static IdentityHashMap<Class, Class> processed2 = new IdentityHashMap<Class, Class>();
private static Class[] propertyOrdering = {
OHRBase.class,
OHRLongArray.class,
OHRDoubleArray.class,
Object.class,
long.class, double.class,
int.class,
OHRIntArray.class,
float.class,
OHRFloatArray.class,
short.class, OHRShortArray.class, String.class, CharSequence.class, byte.class,OHRByteArray.class, boolean.class, OHRBooleanArray.class };
public static Class getClassForId(int id) {
return cls[id];
}
private static Object incMutex = new Object();
public static int incrementClsCounter() {
synchronized (incMutex) {
clcounter++;
return clcounter;
}
}
static int registerExternalOHR(Class cl) {
int cnt = incrementClsCounter();
cls[cnt] = cl;
return cnt;
}
public static Class ohr(Class cll) {
try {
//System.out.println("++++++++++ "+cll.getName());
/*if (cll.getName().startsWith("ohr."))
{
throw new RuntimeException(cll.getName());
}*/
if (processed2.containsKey(cll)) {
return processed2.get(cll);
}
HashSet<Long> handleOffsets = new HashSet<Long>();
String cnam=cll.getName();
if (!cnam.startsWith("ohr."))
{
cnam = "ohr." + cll.getName();
//cnam=cnam.substring(4);
}
Class cl=Class.forName(cnam.substring(4));
int clnumber = incrementClsCounter();
List<Integer> owned=new ArrayList<Integer>();
//remove the old implementation if its around from another process
String fname = "target/classes/" + cnam.replace(".", "/") + ".class";
System.out.println("deleted" + fname + " " + (new File(fname).delete()));
if (!Modifier.isAbstract(cl.getModifiers())) {
throw new RuntimeException("not an abstract class " + cl.getName());
}
System.out.println("processing ohr " + cnam);
CtClass bc = getDefault().getCtClass(cl.getName());
CtClass cc = getDefault().makeClass(cnam, bc);
StringBuilder initBuilder=new StringBuilder();
initBuilder.append("public void internalInit() {\n");
StringBuilder constructBuilder=new StringBuilder();
constructBuilder.append("{");
String intname = OHRBase.class.getName();
System.out.println("intername is " + intname);
CtClass ci = getDefault().getCtClass(intname);
CtClass extern = getDefault().getCtClass(Externalizable.class.getName());
//cc.addInterface(ci);
cc.setInterfaces(new CtClass[]{ci,extern});
cc.setSuperclass(bc);
//add base implmenetation methods and properties
setBaseMixinsPre(cc,false);
//first long for id and other stuff
long offset = 8;
BeanInfo bi = Introspector.getBeanInfo(cl);
PropertyDescriptor[] pds = bi.getPropertyDescriptors();
for (int co = 0; co < propertyOrdering.length; co++) {
Class cprop = propertyOrdering[co];
for (int i = 0; i < pds.length; i++) {
// Get property name
String propName = pds[i].getName();
if (propName.equals("class")) {
continue;
}
String typ = pds[i].getPropertyType().getName();
Class type = pds[i].getPropertyType();
//if (propName.startsWith("fath"))
//PL.pl("[[[[["+type+" "+propName+" "+cprop);
if (cprop == Object.class) {
//handle refs only
if (type.isPrimitive()) {
continue;
}
if (type == String.class) {
continue;
}
if (type == CharSequence.class) {
continue;
}
if (type == OHRLongArray.class ) {
continue;
}
if (type == OHRIntArray.class ) {
continue;
}
if (type == OHRShortArray.class ) {
continue;
}
if (type == OHRByteArray.class ) {
continue;
}
if (type == OHRBooleanArray.class ) {
continue;
}
if (type == OHRDoubleArray.class ) {
continue;
}
if (type == OHRFloatArray.class ) {
continue;
}
} else if (cprop != type) {
//PL.pl("skipping "+type+" "+cprop);
continue;
}
//PL.pl("[[[[[ " + type + " - " + propName + " - " + cprop);
//System.out.println("--prop--" + propName);
String rname = pds[i].getReadMethod().getName();
String wname = null;
if (pds[i].getWriteMethod()!=null)
{
wname=pds[i].getWriteMethod().getName();
}
boolean reifread = isMethodReifAnnotated(pds[i].getReadMethod());
boolean reifwrite = isMethodReifAnnotated(pds[i].getWriteMethod());
String wcons=getConsistencyAsString(pds[i].getWriteMethod());
String rcons=getConsistencyAsString(pds[i].getReadMethod());
System.out.println("TYPE " + pds[i].getPropertyType().getName() + " " + pds[i].getPropertyType().getInterfaces());
if (pds[i].getPropertyType() == String.class && isInlineString(pds[i])) {
//NOTE - only for inline strings - normal strings are handled as extrefs like any other object
System.out.println("ITS An inline string!!!!");
int length = pds[i].getWriteMethod().getAnnotation(InlineStringReify.class).length();
boolean trim = pds[i].getWriteMethod().getAnnotation(InlineStringReify.class).trimOverflow();
boolean ascii = pds[i].getWriteMethod().getAnnotation(InlineStringReify.class).asciiOnly();
String wmeth = "public void " + wname + "(" + typ + " o) { ohwritestr"+wcons+"(" + offset + "l,o," + length + "," + trim + "," + ascii + "); }";
//add setter
CtMethod wmethod = CtNewMethod.make(wmeth, cc);
cc.addMethod(wmethod);
System.out.println(wmeth);
String rmeth = "public " + typ + " " + rname + "() { return (" + typ + ") ohreadstr"+rcons+"(" + offset + "l," + ascii + "); }";
//add setter
CtMethod rmethod = CtNewMethod.make(rmeth, cc);
//rmethod.getMethodInfo().addAttribute(attr);
cc.addMethod(rmethod);
System.out.println(rmeth);
int bytesperchar = ascii ? 1 : 2;
//pad to 16 bits
int ll = 4 + length * bytesperchar;
if (ll%2!=0)
{
ll++;
}
offset += ll; //lebgth marker as well as unicode 16 encoded characters
} else if (pds[i].getPropertyType() == CharSequence.class && isInlineString(pds[i])) {
//NOTE - only for inline strings - normal strings are handled as extrefs like any other object
System.out.println("ITS An inline charsequence!!!!");
int length = pds[i].getWriteMethod().getAnnotation(InlineStringReify.class).length();
boolean trim = pds[i].getWriteMethod().getAnnotation(InlineStringReify.class).trimOverflow();
boolean ascii = pds[i].getWriteMethod().getAnnotation(InlineStringReify.class).asciiOnly();
String wmeth = "public void " + wname + "(" + typ + " o) { ohwritestr"+wcons+"(" + offset + "l,o," + length + "," + trim + "," + ascii + "); }";
//add setter
CtMethod wmethod = CtNewMethod.make(wmeth, cc);
cc.addMethod(wmethod);
System.out.println(wmeth);
String rmeth = "public " + typ + " " + rname + "() { return (" + typ + ") ohreadcs"+rcons+"(" + offset + "l," + ascii + "); }";
//add setter
CtMethod rmethod = CtNewMethod.make(rmeth, cc);
//rmethod.getMethodInfo().addAttribute(attr);
cc.addMethod(rmethod);
System.out.println(rmeth);
int bytesperchar = ascii ? 1 : 2;
//pad to 8 byte boundary!
int ll = (int) Math.ceil((4.0 + length * bytesperchar) / 8) * 8;
offset += ll; //lebgth marker as well as unicode 16 encoded characters
} else if ( (pds[i].getPropertyType() == OHRLongArray.class
|| pds[i].getPropertyType() == OHRIntArray.class
|| pds[i].getPropertyType() == OHRShortArray.class
|| pds[i].getPropertyType() == OHRByteArray.class
|| pds[i].getPropertyType() == OHRFloatArray.class
|| pds[i].getPropertyType() == OHRDoubleArray.class
|| pds[i].getPropertyType() == OHRBooleanArray.class
) && pds[i].getReadMethod().isAnnotationPresent(InlineArrayReify.class)) {
int bitsperitem=0;
String cldef=null;
Class at=pds[i].getPropertyType();
boolean unchecked=pds[i].getReadMethod().isAnnotationPresent(UncheckedBoundsXXX.class);
if (at==OHRLongArray.class)
{
bitsperitem=8*8;
cldef=LongInlineOHRArray.class.getName();
}
else if (at==OHRIntArray.class)
{
bitsperitem=4*8;
//cldef=IntInlineOHRArrayCop.class.getName();
if (unchecked)
{
cldef=IntInlineOHRArrayUnchecked.class.getName();
}
else
{
cldef=IntInlineOHRArray.class.getName();
}
}
if (at==OHRDoubleArray.class)
{
bitsperitem=8*8;
cldef=DoubleInlineOHRArray.class.getName();
}
if (at==OHRFloatArray.class)
{
bitsperitem=4*8;
cldef=FloatInlineOHRArray.class.getName();
}
if (at==OHRShortArray.class)
{
bitsperitem=2*8;
cldef=ShortInlineOHRArray.class.getName();
}
if (at==OHRByteArray.class)
{
bitsperitem=1*8;
cldef=ByteInlineOHRArray.class.getName();
}
if (at==OHRBooleanArray.class)
{
bitsperitem=1;
cldef=BooleanInlineOHRArray.class.getName();
}
//NOTE - only for inline strings - normal strings are handled as extrefs like any other object
System.out.println("ITS An inline array!!!!");
int length = pds[i].getReadMethod().getAnnotation(InlineArrayReify.class).length();
long bytealloc=OHRInlineArrayHandler.getGenericArrayAllocationSize(bitsperitem, length);
//PL.pl("byte allocation for logn array length "+length+" "+bytealloc);
CtClass ctc = getDefault().getCtClass(cldef);
String varname="var"+i;
CtField cf = new CtField(ctc, varname, cc);
cf.setModifiers(Modifier.PRIVATE);
cc.addField(cf);
//add data to constructor
initBuilder.append("com.industrieit.ohr.OHRInlineArrayHandler.initialiseInlineGenericArray(this.basePtr+"+offset+"l,"+length+"l,"+bitsperitem+");\n");
constructBuilder.append(varname+"=new "+cldef+"(this,"+offset+"l);\n");
//+ "//this.basePtr"+offset+"l);");
//String wmeth = "public void " + wname + "(" + typ + " o) { throw new java.lang.RuntimeException(\"not supported\"); }";
//add setter
//CtMethod wmethod = CtNewMethod.make(wmeth, cc);
//cc.addMethod(wmethod);
//System.out.println(wmeth);
String rmeth = "public " + typ + " " + rname+ "() { return "+varname+"; }";
//add setter
CtMethod rmethod = CtNewMethod.make(rmeth, cc);
//rmethod.getMethodInfo().addAttribute(attr);
cc.addMethod(rmethod);
System.out.println("||||||||"+rmeth+"|||||||||");
offset += bytealloc;
}
else if (pds[i].getPropertyType().isPrimitive()) {
//PL.pl("ITS A PRIMITIVE!");
int vv = 0;
if (cprop == long.class) {
vv = 8;
}
if (cprop == double.class) {
vv = 8;
}
if (cprop == int.class) {
vv = 4;
}
if (cprop == float.class) {
vv = 4;
}
if (cprop == short.class) {
vv = 2;
}
if (cprop == byte.class) {
vv = 1;
}
System.out.println("for " + pds[i].getName() + " typ is " + pds[i].getPropertyType().getName());
String wmeth = "public void " + wname + "(" + typ + " o) { ohwrite"+wcons+"(" + offset + "l,o); }";
//add setter
//ConstPool constpool = cc.getClassFile().getConstPool();
if (reifwrite) {
CtMethod wmethod = CtNewMethod.make(wmeth, cc);
cc.addMethod(wmethod);
System.out.println("&&&&&&&" + wmeth);
}
String rmeth = "public " + typ + " " + rname + "() { return (" + typ + ") ohread" + typ + rcons+"(" + offset + "l); }";
//add setter
//rmethod.getMethodInfo().addAttribute(attr);
if (reifread) {
CtMethod rmethod = CtNewMethod.make(rmeth, cc);
cc.addMethod(rmethod);
System.out.println("&&&&&&&&&&&&&&&&&&&&&&&&&&&" + rmeth + vv);
}
offset += vv;
} else
{
System.out.println("ITS AN ASSUMED REIFY!!!");
if (pds[i].getWriteMethod().isAnnotationPresent(Owned.class))
{
owned.add(i);
}
//CtClass tc = getDefault().getCtClass(pds[i].getPropertyType().getName());
CtClass tc = getDefault().getCtClass(OHRBase.class.getName());
//String fnam="ohrt"+i;
//CtField f = new CtField(tc, fnam, cc);
//f.setModifiers(Modifier.PROTECTED);
//cc.addField(f);
//store by reify
//handleOffsets.add(offset);
String wmeth = "public void " + wname + "(" + typ + " o) { ohwritere"+wcons+"(" + offset + "l,o); }";
//add setter
CtMethod wmethod = CtNewMethod.make(wmeth, cc);
if (reifwrite) {
cc.addMethod(wmethod);
}
System.out.println(wmeth);
//String rmeth = "public " + typ + " " + rname + "() { return (" + typ + ") ohreadre(" + offset + "l," + typ + ".class); }";
String rmeth = "public " + typ + " " + rname + "() { return ("+typ+") ohreadre"+rcons+"(" + offset + "l); };";
//add setter
CtMethod rmethod = CtNewMethod.make(rmeth, cc);
//rmethod.getMethodInfo().addAttribute(attr);
if (reifread) {
cc.addMethod(rmethod);
}
System.out.println(rmeth);
handleOffsets.add(offset);
offset += 8;
}
/* if (!isReif(type)) {
PL.pl(""+pds[i].getName()+" is a non reified handle!!!!");
//store by handle
handleOffsets.add(offset);
String wmeth = "public void " + wname + "(" + typ + " o) { ohwritehand(" + offset + "l,o); }";
//add setter
CtMethod wmethod = CtNewMethod.make(wmeth, cc);
if (reifwrite) {
cc.addMethod(wmethod);
}
System.out.println(wmeth);
String rmeth = "public " + typ + " " + rname + "() { return (" + typ + ") ohreadhand(" + offset + "l); }";
//add setter
CtMethod rmethod = CtNewMethod.make(rmeth, cc);
//rmethod.getMethodInfo().addAttribute(attr);
if (reifread) {
cc.addMethod(rmethod);
}
System.out.println(rmeth);
}*/
}
//PL.pl("offset is "+offset);
}
//offset+=8;
//ok create the get handleoffsets method
//print out total byts allocated
//PL.pl("%%%%%%%%%% TOTAL BYTES = " + offset);
StringBuilder sb = new StringBuilder();
sb.append("public long[] handleOffsets() { ");
sb.append("long a[] = new long[").append(handleOffsets.size()).append("];");
int c = 0;
for (long l : handleOffsets) {
sb.append("a[").append(c).append("]=").append(l).append("l;");
c++;
}
sb.append("return a; }");
System.out.println(sb.toString());
CtMethod om = CtNewMethod.make(sb.toString(), cc);
cc.addMethod(om);
String sizem = "public long gsize() { return " + (offset) + "l; }";
//PL.pl(sizem);
CtMethod sm = CtNewMethod.make(sizem, cc);
cc.addMethod(sm);
//add clsid
CtMethod cmid = CtNewMethod.make("public int ohclassId() { return " + clnumber + "; }", cc);
cc.addMethod(cmid);
setBaseMixinsPost(cc,false,owned,pds, constructBuilder, initBuilder);
cc.writeFile("target/classes");
/*for (Method me : cc.toClass().getDeclaredMethods()) { //test print, ok
//System.out.println(me.getName());
}*/
Class ppp = Class.forName(cnam);
Field f = ppp.getDeclaredField("u");
f.setAccessible(true);
f.set(ppp.newInstance(), USafe.getUnsafe());
//synchronized (mutex)
//{
processed2.put(cl, ppp);
processed2.put(ppp, ppp);
cls[clnumber] = ppp;
return ppp;
//}
} catch (Exception e) {
throw new RuntimeException(e);
}
}
private static Object mutex = new Object();
private static HashMap<Class, Boolean> reifcache = new HashMap<Class, Boolean>();
/*private static boolean isOHROverride(PropertyDescriptor pd)
{
boolean b=false;
PL.pl("write method "+pd.getWriteMethod()+" rm "+pd.getReadMethod());
if (pd.getWriteMethod()!=null)
{
PL.pl("is ohr override write method not false "+pd.getName());
b= pd.getWriteMethod().isAnnotationPresent(OHR.class);
}
PL.pl("is ohr override "+pd.getName()+" "+b);
return b;
}*/
private static boolean isAlreadyReifXXXXX(Class cl) {
Boolean b = reifcache.get(cl);
//PL.pl("jjjjjjjjjjjjj testing reif "+cl+" "+b);
if (b != null) {
return b.booleanValue();
}
reifcache.put(cl, Boolean.TRUE);
return true;
/*for (Class c : cl.getInterfaces()) {
//System.out.println("class "+c.getName());
if (c == OHReify.class) {
//System.out.println("inside");
reifcache.put(cl, Boolean.TRUE);
return true;
}
}
reifcache.put(cl, Boolean.FALSE);
return false;*/
}
/*public long[] r() {
long a[] = new long[3];
a[1] = 2l;
return a;
}*/
static Unsafe u;
static {
u = USafe.getUnsafe();
}
private static boolean isMethodReifAnnotated(Method m) {
if (m==null)
{
return false;
}
return m.isAnnotationPresent(Reify.class);
}
private static boolean isInlineString(PropertyDescriptor pd) {
return pd.getWriteMethod().isAnnotationPresent(InlineStringReify.class);
}
private static String getConsistencyAsString(Method meth) {
return getConsistency(meth).name();
}
private static Consistency getConsistency(Method meth) {
try {
if (meth==null)
{
return Consistency.NORMAL;
}
Reify r=(Reify) meth.getAnnotation(Reify.class);
return r.consistency();
} catch (Exception e) {
//never happens
throw new RuntimeException(e);
}
}
private static void setBaseMixinsPre(CtClass cc, boolean includeChecks) throws CannotCompileException, IOException, NotFoundException {
//add fields
//add basePtr
//CtField cf = new CtField(CtClass.longType, "crapField", cc);
//cf.setModifiers(Modifier.PRIVATE | Modifier.VOLATILE);
//cc.addField(cf);
//add basePtr
CtField f = new CtField(CtClass.longType, "basePtr", cc);
f.setModifiers(Modifier.PRIVATE | Modifier.VOLATILE);
cc.addField(f);
//add instmarker
CtField fm = new CtField(CtClass.intType, "instmarker", cc);
fm.setModifiers(Modifier.PRIVATE | Modifier.VOLATILE);
cc.addField(fm);
CtClass cu = getDefault().get("sun.misc.Unsafe");
CtField f2 = new CtField(cu, "u", cc);
f2.setModifiers(Modifier.STATIC | Modifier.PRIVATE);
cc.addField(f2);
//add check method
String checkmeth = "public void doOHRCheck() { if (u.getInt(basePtr+4)!=instmarker) throw new com.industrieit.ohr.StaleHandleException(\"bad instmarker\"); }";
//PL.pl(checkmeth);
CtMethod cm = CtNewMethod.make(checkmeth, cc);
cc.addMethod(cm);
String str = IOUtils.toString(OHRJavassister.class.getResourceAsStream("/ohr/base/basemethodspre.txt"));
String checkplace="";
if (includeChecks)
{
checkplace="doOHRCheck();";
}
//place in prechecks if desired
str=str.replace("<<READCHECK>>", checkplace);
str=str.replace("<<WRITECHECK>>", checkplace);
StringTokenizer st = new StringTokenizer(str, "~~~");
while (st.hasMoreElements()) {
String meth = st.nextToken();
//PL.pl("adding method: " + meth);
CtMethod wmethod = CtNewMethod.make(meth, cc);
cc.addMethod(wmethod);
}
}
private static void setBaseMixinsPost(CtClass cc, boolean includeChecks, List<Integer> owned, PropertyDescriptor[] pds, StringBuilder constructBuilder, StringBuilder initBuilder) throws CannotCompileException, IOException, NotFoundException {
//empty constrcutor
constructBuilder.append("}");
System.out.println(constructBuilder.toString());
CtConstructor ctc=new CtConstructor(new CtClass[0], cc);
ctc.setBody(constructBuilder.toString());
cc.addConstructor(ctc);
initBuilder.append("\n}");
System.out.println(initBuilder.toString());
CtMethod initmeth = CtNewMethod.make(initBuilder.toString(), cc);
cc.addMethod(initmeth);
StringBuilder sb=new StringBuilder();
sb.append("public void freeOwnedChildren() {");
for (Integer i: owned)
{
String fnam=pds[i].getName();
sb.append("com.industreiit.ohr.Reifier.freeOHR("+pds[i].getReadMethod().getName()+"());");
}
sb.append("}");
CtMethod owm = CtNewMethod.make(sb.toString(), cc);
cc.addMethod(owm);
String str = IOUtils.toString(OHRJavassister.class.getResourceAsStream("/ohr/base/basemethodspost.txt"));
String checkplace="";
if (includeChecks)
{
checkplace="doOHRCheck();";
}
str=str.replace("<<READCHECK>>", checkplace);
str=str.replace("<<WRITECHECK>>", checkplace);
StringTokenizer st = new StringTokenizer(str, "~~~");
while (st.hasMoreElements()) {
String meth = st.nextToken();
//PL.pl("adding method: " + meth);
CtMethod wmethod = CtNewMethod.make(meth, cc);
cc.addMethod(wmethod);
}
}
}
| |
/*
* Licensed to GraphHopper GmbH under one or more contributor
* license agreements. See the NOTICE file distributed with this work for
* additional information regarding copyright ownership.
*
* GraphHopper GmbH licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except in
* compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.graphhopper.util;
import java.io.*;
import java.util.*;
import java.util.Map.Entry;
import static com.graphhopper.util.Helper.*;
/**
* A class which manages the translations in-memory. See here for more information:
* ./docs/core/translations.md
* <p>
*
* @author Peter Karich
*/
public class TranslationMap {
// ISO codes (639-1), use 'en_US' as reference
private static final List<String> LOCALES = Arrays.asList("ar", "ast", "bg", "ca",
"cs_CZ", "da_DK", "de_DE", "el", "eo", "es", "en_US", "fa", "fil", "fi",
"fr_FR", "fr_CH", "gl", "he", "hr_HR", "hsb", "hu_HU", "it", "ja", "ko", "lt_LT", "ne",
"nl", "pl_PL", "pt_BR", "pt_PT", "ro", "ru", "sk", "sl_SI", "sr_RS", "sv_SE", "tr", "uk",
"vi_VI", "zh_CN", "zh_HK");
private final Map<String, Translation> translations = new HashMap<String, Translation>();
public static int countOccurence(String phrase, String splitter) {
if (isEmpty(phrase))
return 0;
return phrase.trim().split(splitter).length;
}
/**
* This loads the translation files from the specified folder.
*/
public TranslationMap doImport(File folder) {
try {
for (String locale : LOCALES) {
TranslationHashMap trMap = new TranslationHashMap(getLocale(locale));
trMap.doImport(new FileInputStream(new File(folder, locale + ".txt")));
add(trMap);
}
postImportHook();
return this;
} catch (Exception ex) {
throw new RuntimeException(ex);
}
}
/**
* This loads the translation files from classpath.
*/
public TranslationMap doImport() {
try {
for (String locale : LOCALES) {
TranslationHashMap trMap = new TranslationHashMap(getLocale(locale));
trMap.doImport(TranslationMap.class.getResourceAsStream(locale + ".txt"));
add(trMap);
}
postImportHook();
return this;
} catch (Exception ex) {
throw new RuntimeException(ex);
}
}
public void add(Translation tr) {
Locale locale = tr.getLocale();
translations.put(locale.toString(), tr);
if (!locale.getCountry().isEmpty() && !translations.containsKey(tr.getLanguage()))
translations.put(tr.getLanguage(), tr);
// Map old Java 'standard' to latest, Java is a bit ugly here: http://stackoverflow.com/q/13974169/194609
// Hebrew
if ("iw".equals(locale.getLanguage()))
translations.put("he", tr);
// Indonesia
if ("in".equals(locale.getLanguage()))
translations.put("id", tr);
}
/**
* Returns the Translation object for the specified locale and falls back to English if the
* locale was not found.
*/
public Translation getWithFallBack(Locale locale) {
Translation tr = get(locale.toString());
if (tr == null) {
tr = get(locale.getLanguage());
if (tr == null)
tr = get("en");
}
return tr;
}
/**
* Returns the Translation object for the specified locale and returns null if not found.
*/
public Translation get(String locale) {
locale = locale.replace("-", "_");
Translation tr = translations.get(locale);
if (locale.contains("_") && tr == null)
tr = translations.get(locale.substring(0, 2));
return tr;
}
/**
* This method does some checks and fills missing translation from en
*/
private void postImportHook() {
Map<String, String> enMap = get("en").asMap();
StringBuilder sb = new StringBuilder();
for (Translation tr : translations.values()) {
Map<String, String> trMap = tr.asMap();
for (Entry<String, String> enEntry : enMap.entrySet()) {
String value = trMap.get(enEntry.getKey());
if (isEmpty(value)) {
trMap.put(enEntry.getKey(), enEntry.getValue());
continue;
}
int expectedCount = countOccurence(enEntry.getValue(), "\\%");
if (expectedCount != countOccurence(value, "\\%")) {
sb.append(tr.getLocale()).append(" - error in ").
append(enEntry.getKey()).append("->").
append(value).append("\n");
} else {
// try if formatting works, many times e.g. '%1$' instead of '%1$s'
Object[] strs = new String[expectedCount];
Arrays.fill(strs, "tmp");
try {
String.format(Locale.ROOT, value, strs);
} catch (Exception ex) {
sb.append(tr.getLocale()).append(" - error ").append(ex.getMessage()).append("in ").
append(enEntry.getKey()).append("->").
append(value).append("\n");
}
}
}
}
if (sb.length() > 0) {
System.out.println(sb);
throw new IllegalStateException(sb.toString());
}
}
@Override
public String toString() {
return translations.toString();
}
public static class TranslationHashMap implements Translation {
final Locale locale;
private final Map<String, String> map = new HashMap<String, String>();
public TranslationHashMap(Locale locale) {
this.locale = locale;
}
public void clear() {
map.clear();
}
@Override
public Locale getLocale() {
return locale;
}
@Override
public String getLanguage() {
return locale.getLanguage();
}
@Override
public String tr(String key, Object... params) {
String val = map.get(toLowerCase(key));
if (isEmpty(val))
return key;
return String.format(Locale.ROOT, val, params);
}
public TranslationHashMap put(String key, String val) {
String existing = map.put(toLowerCase(key), val);
if (existing != null)
throw new IllegalStateException("Cannot overwrite key " + key + " with " + val + ", was: " + existing);
return this;
}
@Override
public String toString() {
return map.toString();
}
@Override
public Map<String, String> asMap() {
return map;
}
public TranslationHashMap doImport(InputStream is) {
if (is == null)
throw new IllegalStateException("No input stream found in class path!?");
try {
for (String line : readFile(new InputStreamReader(is, UTF_CS))) {
if (line.isEmpty() || line.startsWith("//") || line.startsWith("#"))
continue;
int index = line.indexOf('=');
if (index < 0)
continue;
String key = line.substring(0, index);
if (key.isEmpty())
throw new IllegalStateException("No key provided:" + line);
String value = line.substring(index + 1);
if (!value.isEmpty())
put(key, value);
}
} catch (IOException ex) {
throw new RuntimeException(ex);
}
return this;
}
}
}
| |
/*
* Licensed to CRATE Technology GmbH ("Crate") under one or more contributor
* license agreements. See the NOTICE file distributed with this work for
* additional information regarding copyright ownership. Crate licenses
* this file to you under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License. You may
* obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*
* However, if you have executed another commercial license agreement
* with Crate these terms will supersede the license and you may use the
* software solely pursuant to the terms of the relevant commercial agreement.
*/
package io.crate.lucene;
import io.crate.analyze.WhereClause;
import io.crate.analyze.relations.AnalyzedRelation;
import io.crate.analyze.relations.TableRelation;
import io.crate.auth.user.User;
import io.crate.lucene.match.CrateRegexQuery;
import io.crate.metadata.RelationName;
import io.crate.metadata.doc.DocSchemaInfo;
import io.crate.metadata.doc.DocTableInfo;
import io.crate.testing.SQLExecutor;
import io.crate.testing.SqlExpressions;
import io.crate.types.DataType;
import io.crate.types.DataTypes;
import org.apache.lucene.search.BooleanClause;
import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.ConstantScoreQuery;
import org.apache.lucene.search.MatchNoDocsQuery;
import org.apache.lucene.search.PointInSetQuery;
import org.apache.lucene.search.PointRangeQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.RegexpQuery;
import org.apache.lucene.search.TermInSetQuery;
import org.apache.lucene.search.TermQuery;
import org.apache.lucene.search.TermRangeQuery;
import org.apache.lucene.spatial.prefix.IntersectsPrefixTreeQuery;
import org.junit.Test;
import java.util.Arrays;
import java.util.Map;
import static org.hamcrest.Matchers.instanceOf;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.startsWith;
public class CommonQueryBuilderTest extends LuceneQueryBuilderTest {
@Test
public void testNoMatchWhereClause() throws Exception {
Query query = convert(WhereClause.NO_MATCH.queryOrFallback());
assertThat(query, instanceOf(MatchNoDocsQuery.class));
}
@Test
public void testWhereRefEqNullWithDifferentTypes() throws Exception {
for (DataType type : DataTypes.PRIMITIVE_TYPES) {
if (DataTypes.STORAGE_UNSUPPORTED.contains(type)) {
continue;
}
// ensure the test is operating on a fresh, empty cluster state (no existing tables)
resetClusterService();
DocTableInfo tableInfo = SQLExecutor.tableInfo(
new RelationName(DocSchemaInfo.NAME, "test_primitive"),
"create table doc.test_primitive (" +
" x " + type.getName() +
")",
clusterService);
TableRelation tableRelation = new TableRelation(tableInfo);
Map<RelationName, AnalyzedRelation> tableSources = Map.of(tableInfo.ident(), tableRelation);
SqlExpressions sqlExpressions = new SqlExpressions(tableSources, tableRelation, User.CRATE_USER);
Query query = convert(sqlExpressions.normalize(sqlExpressions.asSymbol("x = null")));
// must always become a MatchNoDocsQuery
// string: term query with null would cause NPE
// int/numeric: rangeQuery from null to null would match all
// bool: term would match false too because of the condition in the eq query builder
assertThat(query, instanceOf(MatchNoDocsQuery.class));
}
}
@Test
public void testWhereRefEqRef() throws Exception {
Query query = convert("name = name");
assertThat(query, instanceOf(GenericFunctionQuery.class));
}
@Test
public void testWhereRefEqLiteral() throws Exception {
Query query = convert("10 = x");
assertThat(query.toString(), is("x:[10 TO 10]"));
}
@Test
public void testWhereLiteralEqReference() throws Exception {
Query query = convert("x = 10");
assertThat(query.toString(), is("x:[10 TO 10]"));
}
@Test
public void testLteQuery() throws Exception {
Query query = convert("x <= 10");
assertThat(query.toString(), is("x:[-2147483648 TO 10]"));
}
@Test
public void testNotEqOnNotNullableColumnQuery() throws Exception {
Query query = convert("x != 10");
assertThat(query, instanceOf(BooleanQuery.class));
assertThat(query.toString(), is("+(+*:* -x:[10 TO 10])"));
query = convert("not x = 10");
assertThat(query, instanceOf(BooleanQuery.class));
assertThat(query.toString(), is("+(+*:* -x:[10 TO 10])"));
}
@Test
public void testEqOnTwoArraysBecomesGenericFunctionQuery() throws Exception {
Query query = convert("y_array = [10, 20, 30]");
assertThat(query, instanceOf(BooleanQuery.class));
BooleanQuery booleanQuery = (BooleanQuery) query;
assertThat(booleanQuery.clauses().get(0).getQuery(), instanceOf(PointInSetQuery.class));
assertThat(booleanQuery.clauses().get(1).getQuery(), instanceOf(GenericFunctionQuery.class));
}
@Test
public void testEqOnTwoArraysBecomesGenericFunctionQueryAllValuesNull() throws Exception {
Query query = convert("y_array = [null, null, null]");
assertThat(query, instanceOf(GenericFunctionQuery.class));
}
@Test
public void testEqOnArrayWithTooManyClauses() throws Exception {
Object[] values = new Object[2000]; // should trigger the TooManyClauses exception
Arrays.fill(values, 10L);
Query query = convert("y_array = ?", new Object[] { values });
assertThat(query, instanceOf(BooleanQuery.class));
BooleanQuery booleanQuery = (BooleanQuery) query;
assertThat(booleanQuery.clauses().get(0).getQuery(), instanceOf(PointInSetQuery.class));
assertThat(booleanQuery.clauses().get(1).getQuery(), instanceOf(GenericFunctionQuery.class));
}
@Test
public void testGteQuery() throws Exception {
Query query = convert("x >= 10");
assertThat(query.toString(), is("x:[10 TO 2147483647]"));
}
@Test
public void testGtQuery() throws Exception {
Query query = convert("x > 10");
assertThat(query.toString(), is("x:[11 TO 2147483647]"));
}
@Test
public void testWhereRefInSetLiteralIsConvertedToTermsQuery() throws Exception {
Query query = convert("x in (1, 3)");
assertThat(query, instanceOf(PointInSetQuery.class));
}
@Test
public void testWhereStringRefInSetLiteralIsConvertedToTermsQuery() throws Exception {
Query query = convert("name in ('foo', 'bar')");
assertThat(query, instanceOf(TermInSetQuery.class));
}
/**
* Make sure we still sport the fast Lucene regular
* expression engine when not using PCRE features.
*/
@Test
public void testRegexQueryFast() throws Exception {
Query query = convert("name ~ '[a-z]'");
assertThat(query, instanceOf(ConstantScoreQuery.class));
ConstantScoreQuery scoreQuery = (ConstantScoreQuery) query;
assertThat(scoreQuery.getQuery(), instanceOf(RegexpQuery.class));
}
/**
* When using PCRE features, switch to different
* regex implementation on top of java.util.regex.
*/
@Test
public void testRegexQueryPcre() throws Exception {
Query query = convert("name ~ '\\D'");
assertThat(query, instanceOf(CrateRegexQuery.class));
}
@Test
public void testIdQuery() throws Exception {
Query query = convert("_id = 'i1'");
assertThat(query, instanceOf(TermInSetQuery.class));
query = convert("_id = 1");
assertThat(query, instanceOf(TermInSetQuery.class));
}
@Test
public void testAnyEqArrayLiteral() throws Exception {
Query query = convert("d = any([-1.5, 0.0, 1.5])");
assertThat(query, instanceOf(PointInSetQuery.class));
query = convert("_id in ('test','test2')");
assertThat(query, instanceOf(TermInSetQuery.class));
query = convert("_id in (1, 2)");
assertThat(query, instanceOf(TermInSetQuery.class));
query = convert("_id = any (['test','test2'])");
assertThat(query, instanceOf(TermInSetQuery.class));
query = convert("_id = any ([1, 2])");
assertThat(query, instanceOf(TermInSetQuery.class));
}
@Test
public void testAnyEqArrayReference() throws Exception {
Query query = convert("1.5 = any(d_array)");
assertThat(query, instanceOf(PointRangeQuery.class));
assertThat(query.toString(), startsWith("d_array"));
}
@Test
public void testAnyGreaterAndSmaller() throws Exception {
Query ltQuery = convert("1.5 < any(d_array)");
assertThat(ltQuery.toString(), is("d_array:[1.5000000000000002 TO Infinity]"));
// d < ANY ([1.2, 3.5])
Query ltQuery2 = convert("d < any ([1.2, 3.5])");
assertThat(ltQuery2.toString(), is("(d:[-Infinity TO 1.1999999999999997] d:[-Infinity TO 3.4999999999999996])~1"));
// 1.5d <= ANY (d_array)
Query lteQuery = convert("1.5 <= any(d_array)");
assertThat(lteQuery.toString(), is("d_array:[1.5 TO Infinity]"));
// d <= ANY ([1.2, 3.5])
Query lteQuery2 = convert("d <= any([1.2, 3.5])");
assertThat(lteQuery2.toString(), is("(d:[-Infinity TO 1.2] d:[-Infinity TO 3.5])~1"));
// 1.5d > ANY (d_array)
Query gtQuery = convert("1.5 > any(d_array)");
assertThat(gtQuery.toString(), is("d_array:[-Infinity TO 1.4999999999999998]"));
// d > ANY ([1.2, 3.5])
Query gtQuery2 = convert("d > any ([1.2, 3.5])");
assertThat(gtQuery2.toString(), is("(d:[1.2000000000000002 TO Infinity] d:[3.5000000000000004 TO Infinity])~1"));
// 1.5d >= ANY (d_array)
Query gteQuery = convert("1.5 >= any(d_array)");
assertThat(gteQuery.toString(), is("d_array:[-Infinity TO 1.5]"));
// d >= ANY ([1.2, 3.5])
Query gteQuery2 = convert("d >= any ([1.2, 3.5])");
assertThat(gteQuery2.toString(), is("(d:[1.2 TO Infinity] d:[3.5 TO Infinity])~1"));
}
@Test
public void testNeqAnyOnArrayLiteral() throws Exception {
Query neqQuery = convert("name != any (['a', 'b', 'c'])");
assertThat(neqQuery, instanceOf(BooleanQuery.class));
BooleanClause booleanClause = ((BooleanQuery) neqQuery).clauses().get(1);
assertThat(booleanClause.getOccur(), is(BooleanClause.Occur.MUST_NOT));
assertThat(booleanClause.getQuery().toString(), is("+name:a +name:b +name:c"));
}
@Test
public void testLessThanAnyOnArrayLiteral() throws Exception {
Query ltQuery2 = convert("name < any (['a', 'b', 'c'])");
assertThat(ltQuery2, instanceOf(BooleanQuery.class));
BooleanQuery ltBQuery = (BooleanQuery) ltQuery2;
assertThat(ltBQuery.toString(), is("(name:{* TO a} name:{* TO b} name:{* TO c})~1"));
}
/**
* geo match tests below... error cases (wrong matchType, etc.) are not tests here because validation is done in the
* analyzer
*/
@Test
public void testGeoShapeMatchWithDefaultMatchType() throws Exception {
Query query = convert("match(shape, 'POLYGON ((30 10, 40 40, 20 40, 10 20, 30 10))')");
assertThat(query, instanceOf(IntersectsPrefixTreeQuery.class));
}
@Test
public void testGeoShapeMatchDisJoint() throws Exception {
Query query = convert("match(shape, 'POLYGON ((30 10, 40 40, 20 40, 10 20, 30 10))') using disjoint");
assertThat(query, instanceOf(ConstantScoreQuery.class));
Query booleanQuery = ((ConstantScoreQuery) query).getQuery();
assertThat(booleanQuery, instanceOf(BooleanQuery.class));
BooleanClause existsClause = ((BooleanQuery) booleanQuery).clauses().get(0);
BooleanClause intersectsClause = ((BooleanQuery) booleanQuery).clauses().get(1);
assertThat(existsClause.getQuery(), instanceOf(TermRangeQuery.class));
assertThat(intersectsClause.getQuery(), instanceOf(IntersectsPrefixTreeQuery.class));
}
@Test
public void testWhereInIsOptimized() throws Exception {
Query query = convert("name in ('foo', 'bar')");
assertThat(query, instanceOf(TermInSetQuery.class));
assertThat(query.toString(), is("name:(bar foo)"));
}
@Test
public void testIsNullOnObjectArray() throws Exception {
Query isNull = convert("o_array IS NULL");
assertThat(isNull.toString(), is("+*:* -ConstantScore(_field_names:o_array)"));
Query isNotNull = convert("o_array IS NOT NULL");
assertThat(isNotNull.toString(), is("ConstantScore(DocValuesFieldExistsQuery [field=o_array.xs])"));
}
@Test
public void testRewriteDocReferenceInWhereClause() throws Exception {
Query query = convert("_doc['name'] = 'foo'");
assertThat(query, instanceOf(TermQuery.class));
assertThat(query.toString(), is("name:foo"));
query = convert("_doc = {\"name\"='foo'}");
assertThat(query, instanceOf(GenericFunctionQuery.class));
}
@Test
public void testMatchQueryTermMustNotBeNull() throws Exception {
expectedException.expect(IllegalArgumentException.class);
expectedException.expectMessage("cannot use NULL as query term in match predicate");
convert("match(name, null)");
}
@Test
public void testMatchQueryTermMustBeALiteral() throws Exception {
expectedException.expect(IllegalArgumentException.class);
expectedException.expectMessage("queryTerm must be a literal");
convert("match(name, name)");
}
@Test
public void testRangeQueryForId() throws Exception {
Query query = convert("_id > 'foo'");
assertThat(query, instanceOf(TermRangeQuery.class));
}
@Test
public void testNiceErrorIsThrownOnInvalidTopLevelLiteral() {
expectedException.expectMessage("Can't build query from symbol 'yes'");
convert("'yes'");
}
@Test
public void testRangeQueryForUid() throws Exception {
Query query = convert("_uid > 'foo'");
assertThat(query, instanceOf(TermRangeQuery.class));
TermRangeQuery rangeQuery = (TermRangeQuery) query;
assertThat(rangeQuery.getField(), is("_id"));
assertThat(rangeQuery.getLowerTerm().utf8ToString(), is("foo"));
}
public void testRangeQueryOnDocThrowsException() throws Exception {
expectedException.expect(UnsupportedOperationException.class);
expectedException.expectMessage("Unknown function: (doc.users._doc > _map('name', 'foo'))," +
" no overload found for matching argument types: (object, object).");
convert("_doc > {\"name\"='foo'}");
}
@Test
public void testIsNullOnGeoPoint() throws Exception {
Query query = convert("point is null");
assertThat(query.toString(), is("+*:* -ConstantScore(DocValuesFieldExistsQuery [field=point])"));
}
@Test
public void testIpRange() throws Exception {
Query query = convert("addr between '192.168.0.1' and '192.168.0.255'");
assertThat(query.toString(), is("+addr:[192.168.0.1 TO ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff] +addr:[0:0:0:0:0:0:0:0 TO 192.168.0.255]"));
query = convert("addr < 'fe80::1'");
assertThat(query.toString(), is("addr:[0:0:0:0:0:0:0:0 TO fe80:0:0:0:0:0:0:0]"));
}
@Test
public void testAnyEqOnTimestampArrayColumn() {
assertThat(
convert("1129224512000 = ANY(ts_array)").toString(),
is("ts_array:[1129224512000 TO 1129224512000]")
);
}
@Test
public void testAnyNotEqOnTimestampColumn() {
assertThat(
convert("ts != ANY([1129224512000])").toString(),
is("+*:* -(+ts:[1129224512000 TO 1129224512000])")
);
}
@Test
public void testArrayAccessResultsInTermAndFunctionQuery() {
assertThat(
convert("ts_array[1] = 1129224512000").toString(),
is("+ts_array:[1129224512000 TO 1129224512000] " +
"#(ts_array[1] = 1129224512000::bigint)")
);
assertThat(
convert("ts_array[1] >= 1129224512000").toString(),
is("+ts_array:[1129224512000 TO 9223372036854775807] " +
"#(ts_array[1] >= 1129224512000::bigint)")
);
assertThat(
convert("ts_array[1] > 1129224512000").toString(),
is("+ts_array:[1129224512001 TO 9223372036854775807] " +
"#(ts_array[1] > 1129224512000::bigint)")
);
assertThat(
convert("ts_array[1] <= 1129224512000").toString(),
is("+ts_array:[-9223372036854775808 TO 1129224512000] " +
"#(ts_array[1] <= 1129224512000::bigint)")
);
assertThat(
convert("ts_array[1] < 1129224512000").toString(),
is("+ts_array:[-9223372036854775808 TO 1129224511999] " +
"#(ts_array[1] < 1129224512000::bigint)")
);
}
@Test
public void testObjectArrayAccessResultsInFunctionQuery() {
assertThat(
convert("o_array[1] = {x=1}").toString(),
is("(o_array[1] = {\"x\"=1})")
);
}
@Test
public void testMatchWithOperator() {
assertThat(
convert("match(tags, 'foo bar') using best_fields with (operator='and')").toString(),
is("+tags:foo +tags:bar")
);
}
@Test
public void testMultiMatchWithOperator() {
assertThat(
convert("match((tags, name), 'foo bar') using best_fields with (operator='and')").toString(),
is("(name:foo bar | (+tags:foo +tags:bar))")
);
}
@Test
public void testEqOnObjectPreFiltersOnKnownObjectLiteralContents() {
// termQuery for obj.x; nothing for obj.z because it's missing in the mapping
assertThat(
convert("obj = {x=10, z=20}").toString(),
is("+obj.x:[10 TO 10] #(obj = {\"x\"=10, \"z\"=20})")
);
}
@Test
public void testEqOnObjectDoesBoolTermQueryForContents() {
assertThat(
convert("obj = {x=10, y=20}").toString(),
is("+obj.x:[10 TO 10] +obj.y:[20 TO 20]")
);
}
@Test
public void testEqAnyOnNestedArray() {
assertThat(
convert("[1, 2] = any(o_array['xs'])").toString(),
is("+o_array.xs:{1 2} #([1, 2] = ANY(o_array['xs']))")
);
}
@Test
public void testGtAnyOnNestedArrayIsNotSupported() {
expectedException.expectMessage("Cannot use any_> when the left side is an array");
convert("[1, 2] > any(o_array['xs'])");
}
@Test
public void testGteAnyOnNestedArrayIsNotSupported() {
expectedException.expectMessage("Cannot use any_>= when the left side is an array");
convert("[1, 2] >= any(o_array['xs'])");
}
@Test
public void testLtAnyOnNestedArrayIsNotSupported() {
expectedException.expectMessage("Cannot use any_< when the left side is an array");
convert("[1, 2] < any(o_array['xs'])");
}
@Test
public void testLteAnyOnNestedArrayIsNotSupported() {
expectedException.expectMessage("Cannot use any_<= when the left side is an array");
convert("[1, 2] <= any(o_array['xs'])");
}
@Test
public void testAnyOnObjectArrayResultsInXY() {
Query query = convert("{xs=[1, 1]} = ANY(o_array)");
assertThat(query, instanceOf(GenericFunctionQuery.class));
}
@Test
public void test_is_null_on_ignored_results_in_function_query() throws Exception {
Query query = convert("obj_ignored is null");
assertThat(query.toString(), is("(_doc['obj_ignored'] IS NULL)"));
}
@Test
public void test_is_not_null_on_ignored_results_in_function_query() throws Exception {
Query query = convert("obj_ignored is not null");
assertThat(query.toString(), is("(NOT (_doc['obj_ignored'] IS NULL))"));
}
}
| |
/*
* Copyright 2000-2015 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.coverage.view;
import com.intellij.coverage.*;
import com.intellij.ide.util.treeView.AbstractTreeNode;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.Computable;
import com.intellij.openapi.util.NullableComputable;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.psi.*;
import com.intellij.psi.search.GlobalSearchScope;
import com.intellij.psi.util.PsiTreeUtil;
import com.intellij.util.ui.ColumnInfo;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.*;
/**
* User: anna
* Date: 1/5/12
*/
public class JavaCoverageViewExtension extends CoverageViewExtension {
private final JavaCoverageAnnotator myAnnotator;
public JavaCoverageViewExtension(JavaCoverageAnnotator annotator,
Project project,
CoverageSuitesBundle suitesBundle,
CoverageViewManager.StateBean stateBean) {
super(project, suitesBundle, stateBean);
myAnnotator = annotator;
}
@Override
public String getSummaryForNode(AbstractTreeNode node) {
if (!myCoverageViewManager.isReady()) return "Loading...";
final String coverageInformationString = myAnnotator
.getPackageCoverageInformationString((PsiPackage)node.getValue(), null, myCoverageDataManager, myStateBean.myFlattenPackages);
return getNotCoveredMessage(coverageInformationString) + " in package \'" + node.toString() + "\'";
}
@Override
public String getSummaryForRootNode(AbstractTreeNode childNode) {
final Object value = childNode.getValue();
String coverageInformationString = myAnnotator.getPackageCoverageInformationString((PsiPackage)value, null,
myCoverageDataManager);
if (coverageInformationString == null) {
if (!myCoverageViewManager.isReady()) return "Loading...";
PackageAnnotator.SummaryCoverageInfo info = new PackageAnnotator.PackageCoverageInfo();
final Collection children = childNode.getChildren();
for (Object child : children) {
final Object childValue = ((CoverageListNode)child).getValue();
PackageAnnotator.SummaryCoverageInfo childInfo = getSummaryCoverageForNodeValue(childValue);
info = JavaCoverageAnnotator.merge(info, childInfo);
}
coverageInformationString = JavaCoverageAnnotator.getCoverageInformationString(info, false);
}
return getNotCoveredMessage(coverageInformationString) + " in 'all classes in scope'";
}
private static String getNotCoveredMessage(String coverageInformationString) {
if (coverageInformationString == null) {
coverageInformationString = "No coverage";
}
return coverageInformationString;
}
@Override
public String getPercentage(int columnIndex, AbstractTreeNode node) {
final Object value = node.getValue();
PackageAnnotator.SummaryCoverageInfo info = getSummaryCoverageForNodeValue(value);
if (columnIndex == 1) {
return myAnnotator.getClassCoveredPercentage(info);
} else if (columnIndex == 2){
return myAnnotator.getMethodCoveredPercentage(info);
}
return myAnnotator.getLineCoveredPercentage(info);
}
public PackageAnnotator.SummaryCoverageInfo getSummaryCoverageForNodeValue(Object value) {
if (value instanceof PsiClass) {
final String qualifiedName = ((PsiClass)value).getQualifiedName();
return myAnnotator.getClassCoverageInfo(qualifiedName);
}
if (value instanceof PsiPackage) {
return myAnnotator.getPackageCoverageInfo((PsiPackage)value, myStateBean.myFlattenPackages);
}
if (value instanceof PsiNamedElement) {
return myAnnotator.getExtensionCoverageInfo((PsiNamedElement) value);
}
return null;
}
@Override
public PsiElement getElementToSelect(Object object) {
PsiElement psiElement = super.getElementToSelect(object);
if (psiElement != null) {
final PsiFile containingFile = psiElement.getContainingFile();
if (containingFile instanceof PsiClassOwner) {
final PsiClass[] classes = ((PsiClassOwner)containingFile).getClasses();
if (classes.length == 1) return classes[0];
for (PsiClass aClass : classes) {
if (PsiTreeUtil.isAncestor(aClass, psiElement, false)) return aClass;
}
}
}
return psiElement;
}
@Override
public VirtualFile getVirtualFile(Object object) {
if (object instanceof PsiPackage) {
final PsiDirectory[] directories = ((PsiPackage)object).getDirectories();
return directories.length > 0 ? directories[0].getVirtualFile() : null;
}
return super.getVirtualFile(object);
}
@Nullable
@Override
public PsiElement getParentElement(PsiElement element) {
if (element instanceof PsiClass) {
final PsiDirectory containingDirectory = element.getContainingFile().getContainingDirectory();
return containingDirectory != null ? JavaDirectoryService.getInstance().getPackage(containingDirectory) : null;
}
return ((PsiPackage)element).getParentPackage();
}
@Override
public AbstractTreeNode createRootNode() {
return new CoverageListRootNode(myProject, JavaPsiFacade.getInstance(myProject).findPackage(""), mySuitesBundle, myStateBean);
}
@Override
public List<AbstractTreeNode> createTopLevelNodes() {
final List<AbstractTreeNode> topLevelNodes = new ArrayList<>();
final LinkedHashSet<PsiPackage> packages = new LinkedHashSet<>();
final LinkedHashSet<PsiClass> classes = new LinkedHashSet<>();
for (CoverageSuite suite : mySuitesBundle.getSuites()) {
packages.addAll(((JavaCoverageSuite)suite).getCurrentSuitePackages(myProject));
classes.addAll(((JavaCoverageSuite)suite).getCurrentSuiteClasses(myProject));
}
final Set<PsiPackage> packs = new HashSet<>();
for (PsiPackage aPackage : packages) {
final String qualifiedName = aPackage.getQualifiedName();
for (PsiPackage psiPackage : packages) {
if (psiPackage.getQualifiedName().startsWith(qualifiedName + ".")) {
packs.add(psiPackage);
break;
}
}
}
packages.removeAll(packs);
for (PsiPackage aPackage : packages) {
final GlobalSearchScope searchScope = mySuitesBundle.getSearchScope(myProject);
if (aPackage.getClasses(searchScope).length != 0) {
final CoverageListNode node = new CoverageListNode(myProject, aPackage, mySuitesBundle, myStateBean);
topLevelNodes.add(node);
}
collectSubPackages(topLevelNodes, aPackage);
}
for (PsiClass aClass : classes) {
topLevelNodes.add(new CoverageListNode(myProject, aClass, mySuitesBundle, myStateBean));
}
return topLevelNodes;
}
private void collectSubPackages(List<AbstractTreeNode> children, final PsiPackage rootPackage) {
final GlobalSearchScope searchScope = mySuitesBundle.getSearchScope(rootPackage.getProject());
final PsiPackage[] subPackages = ApplicationManager.getApplication().runReadAction(new Computable<PsiPackage[]>() {
@NotNull
public PsiPackage[] compute() {
return rootPackage.getSubPackages(searchScope);
}
});
for (final PsiPackage aPackage : subPackages) {
processSubPackage(aPackage, children);
}
}
private void processSubPackage(final PsiPackage aPackage, List<AbstractTreeNode> children) {
if (ApplicationManager.getApplication().runReadAction(new Computable<Boolean>() {
public Boolean compute() {
return isInCoverageScope(aPackage);
}
})) {
final CoverageListNode node = new CoverageListNode(aPackage.getProject(), aPackage, mySuitesBundle, myStateBean);
children.add(node);
}
else if (!myStateBean.myFlattenPackages) {
collectSubPackages(children, aPackage);
}
if (myStateBean.myFlattenPackages) {
collectSubPackages(children, aPackage);
}
}
@Override
public List<AbstractTreeNode> getChildrenNodes(final AbstractTreeNode node) {
List<AbstractTreeNode> children = new ArrayList<>();
if (node instanceof CoverageListNode) {
final Object val = node.getValue();
if (val instanceof PsiClass) return Collections.emptyList();
//append package classes
if (val instanceof PsiPackage) {
final PsiPackage psiPackage = (PsiPackage) val;
if (ApplicationManager.getApplication().runReadAction(new Computable<Boolean>() {
public Boolean compute() {
return isInCoverageScope(psiPackage);
}
})) {
final PsiPackage[] subPackages = ApplicationManager.getApplication().runReadAction(new Computable<PsiPackage[]>() {
@NotNull
public PsiPackage[] compute() {
return psiPackage.isValid()
? psiPackage.getSubPackages(mySuitesBundle.getSearchScope(node.getProject()))
: PsiPackage.EMPTY_ARRAY;
}
});
for (PsiPackage subPackage: subPackages) {
processSubPackage(subPackage, children);
}
final PsiFile[] childFiles = ApplicationManager.getApplication().runReadAction(new Computable<PsiFile[]>() {
@NotNull
public PsiFile[] compute() {
return psiPackage.isValid()
? psiPackage.getFiles(mySuitesBundle.getSearchScope(node.getProject()))
: PsiFile.EMPTY_ARRAY;
}
});
for (final PsiFile file : childFiles) {
collectFileChildren(file, node, children);
}
}
else if (!myStateBean.myFlattenPackages) {
collectSubPackages(children, (PsiPackage)val);
}
}
if (node instanceof CoverageListRootNode) {
for (CoverageSuite suite : mySuitesBundle.getSuites()) {
final List<PsiClass> classes = ((JavaCoverageSuite)suite).getCurrentSuiteClasses(myProject);
for (PsiClass aClass : classes) {
children.add(new CoverageListNode(myProject, aClass, mySuitesBundle, myStateBean));
}
}
}
for (AbstractTreeNode childNode : children) {
childNode.setParent(node);
}
}
return children;
}
protected void collectFileChildren(final PsiFile file, AbstractTreeNode node, List<AbstractTreeNode> children) {
if (file instanceof PsiClassOwner) {
PsiClass[] classes = ApplicationManager.getApplication().runReadAction(new Computable<PsiClass[]>() {
@NotNull
public PsiClass[] compute() {
return file.isValid() ? ((PsiClassOwner) file).getClasses() : PsiClass.EMPTY_ARRAY;
}
});
for (PsiClass aClass : classes) {
if (!(node instanceof CoverageListRootNode) && getClassCoverageInfo(aClass) == null) continue;
children.add(new CoverageListNode(myProject, aClass, mySuitesBundle, myStateBean));
}
}
}
@Nullable
private PackageAnnotator.ClassCoverageInfo getClassCoverageInfo(final PsiClass aClass) {
return myAnnotator.getClassCoverageInfo(ApplicationManager.getApplication().runReadAction(new NullableComputable<String>() {
public String compute() {
return aClass.isValid() ? aClass.getQualifiedName() : null;
}
}));
}
@Override
public ColumnInfo[] createColumnInfos() {
return new ColumnInfo[]{
new ElementColumnInfo(),
new PercentageCoverageColumnInfo(1, "Class, %", mySuitesBundle, myStateBean),
new PercentageCoverageColumnInfo(2, "Method, %", mySuitesBundle, myStateBean),
new PercentageCoverageColumnInfo(3, "Line, %", mySuitesBundle, myStateBean)
};
}
private boolean isInCoverageScope(PsiElement element) {
if (element instanceof PsiPackage) {
final PsiPackage psiPackage = (PsiPackage)element;
final String qualifiedName = psiPackage.getQualifiedName();
for (CoverageSuite suite : mySuitesBundle.getSuites()) {
if (((JavaCoverageSuite)suite).isPackageFiltered(qualifiedName)) return true;
}
}
return false;
}
@Override
public boolean canSelectInCoverageView(Object object) {
final PsiFile psiFile = object instanceof VirtualFile ? PsiManager.getInstance(myProject).findFile((VirtualFile)object) : null;
if (psiFile instanceof PsiClassOwner) {
final String packageName = ((PsiClassOwner)psiFile).getPackageName();
return isInCoverageScope(JavaPsiFacade.getInstance(myProject).findPackage(packageName));
}
if (object instanceof PsiPackage) {
return isInCoverageScope((PsiElement)object);
}
return false;
}
@Override
public boolean supportFlattenPackages() {
return true;
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hdfs.security.token.block;
import java.io.ByteArrayInputStream;
import java.io.DataInputStream;
import java.io.IOException;
import java.security.SecureRandom;
import java.util.Arrays;
import java.util.EnumSet;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import org.apache.commons.lang3.ArrayUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.fs.StorageType;
import org.apache.hadoop.hdfs.protocol.ExtendedBlock;
import org.apache.hadoop.hdfs.protocol.datatransfer.InvalidEncryptionKeyException;
import org.apache.hadoop.io.WritableUtils;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.token.SecretManager;
import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.util.Time;
import org.apache.hadoop.util.Timer;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Preconditions;
import com.google.common.collect.HashMultiset;
import com.google.common.collect.Multiset;
/**
* BlockTokenSecretManager can be instantiated in 2 modes, master mode
* and worker mode. Master can generate new block keys and export block
* keys to workers, while workers can only import and use block keys
* received from master. Both master and worker can generate and verify
* block tokens. Typically, master mode is used by NN and worker mode
* is used by DN.
*/
@InterfaceAudience.Private
public class BlockTokenSecretManager extends
SecretManager<BlockTokenIdentifier> {
public static final Logger LOG =
LoggerFactory.getLogger(BlockTokenSecretManager.class);
public static final Token<BlockTokenIdentifier> DUMMY_TOKEN = new Token<BlockTokenIdentifier>();
private final boolean isMaster;
/**
* keyUpdateInterval is the interval that NN updates its block keys. It should
* be set long enough so that all live DN's and Balancer should have sync'ed
* their block keys with NN at least once during each interval.
*/
private long keyUpdateInterval;
private volatile long tokenLifetime;
private int serialNo;
private BlockKey currentKey;
private BlockKey nextKey;
private final Map<Integer, BlockKey> allKeys;
private String blockPoolId;
private final String encryptionAlgorithm;
private final int intRange;
private final int nnRangeStart;
private final boolean useProto;
private final SecureRandom nonceGenerator = new SecureRandom();
/**
* Timer object for querying the current time. Separated out for
* unit testing.
*/
private Timer timer;
/**
* Constructor for workers.
*
* @param keyUpdateInterval how often a new key will be generated
* @param tokenLifetime how long an individual token is valid
* @param useProto should we use new protobuf style tokens
*/
public BlockTokenSecretManager(long keyUpdateInterval,
long tokenLifetime, String blockPoolId, String encryptionAlgorithm,
boolean useProto) {
this(false, keyUpdateInterval, tokenLifetime, blockPoolId,
encryptionAlgorithm, 0, 1, useProto);
}
/**
* Constructor for masters.
*
* @param keyUpdateInterval how often a new key will be generated
* @param tokenLifetime how long an individual token is valid
* @param nnIndex namenode index of the namenode for which we are creating the manager
* @param blockPoolId block pool ID
* @param encryptionAlgorithm encryption algorithm to use
* @param numNNs number of namenodes possible
* @param useProto should we use new protobuf style tokens
*/
public BlockTokenSecretManager(long keyUpdateInterval,
long tokenLifetime, int nnIndex, int numNNs, String blockPoolId,
String encryptionAlgorithm, boolean useProto) {
this(true, keyUpdateInterval, tokenLifetime, blockPoolId,
encryptionAlgorithm, nnIndex, numNNs, useProto);
Preconditions.checkArgument(nnIndex >= 0);
Preconditions.checkArgument(numNNs > 0);
setSerialNo(new SecureRandom().nextInt());
generateKeys();
}
private BlockTokenSecretManager(boolean isMaster, long keyUpdateInterval,
long tokenLifetime, String blockPoolId, String encryptionAlgorithm,
int nnIndex, int numNNs, boolean useProto) {
this.intRange = Integer.MAX_VALUE / numNNs;
this.nnRangeStart = intRange * nnIndex;
this.isMaster = isMaster;
this.keyUpdateInterval = keyUpdateInterval;
this.tokenLifetime = tokenLifetime;
this.allKeys = new HashMap<Integer, BlockKey>();
this.blockPoolId = blockPoolId;
this.encryptionAlgorithm = encryptionAlgorithm;
this.useProto = useProto;
this.timer = new Timer();
generateKeys();
}
@VisibleForTesting
public synchronized void setSerialNo(int serialNo) {
// we mod the serial number by the range and then add that times the index
this.serialNo = (serialNo % intRange) + (nnRangeStart);
}
public void setBlockPoolId(String blockPoolId) {
this.blockPoolId = blockPoolId;
}
/** Initialize block keys */
private synchronized void generateKeys() {
if (!isMaster) {
return;
}
/*
* Need to set estimated expiry dates for currentKey and nextKey so that if
* NN crashes, DN can still expire those keys. NN will stop using the newly
* generated currentKey after the first keyUpdateInterval, however it may
* still be used by DN and Balancer to generate new tokens before they get a
* chance to sync their keys with NN. Since we require keyUpdInterval to be
* long enough so that all live DN's and Balancer will sync their keys with
* NN at least once during the period, the estimated expiry date for
* currentKey is set to now() + 2 * keyUpdateInterval + tokenLifetime.
* Similarly, the estimated expiry date for nextKey is one keyUpdateInterval
* more.
*/
setSerialNo(serialNo + 1);
currentKey = new BlockKey(serialNo, timer.now() + 2
* keyUpdateInterval + tokenLifetime, generateSecret());
setSerialNo(serialNo + 1);
nextKey = new BlockKey(serialNo, timer.now() + 3
* keyUpdateInterval + tokenLifetime, generateSecret());
allKeys.put(currentKey.getKeyId(), currentKey);
allKeys.put(nextKey.getKeyId(), nextKey);
}
/** Export block keys, only to be used in master mode */
public synchronized ExportedBlockKeys exportKeys() {
if (!isMaster) {
return null;
}
LOG.debug("Exporting access keys");
return new ExportedBlockKeys(true, keyUpdateInterval, tokenLifetime,
currentKey, allKeys.values().toArray(new BlockKey[0]));
}
private synchronized void removeExpiredKeys() {
long now = timer.now();
for (Iterator<Map.Entry<Integer, BlockKey>> it = allKeys.entrySet()
.iterator(); it.hasNext();) {
Map.Entry<Integer, BlockKey> e = it.next();
if (e.getValue().getExpiryDate() < now) {
it.remove();
}
}
}
/**
* Set block keys, only to be used in worker mode
*/
public synchronized void addKeys(ExportedBlockKeys exportedKeys)
throws IOException {
if (isMaster || exportedKeys == null) {
return;
}
LOG.info("Setting block keys");
removeExpiredKeys();
this.currentKey = exportedKeys.getCurrentKey();
BlockKey[] receivedKeys = exportedKeys.getAllKeys();
for (int i = 0; i < receivedKeys.length; i++) {
if (receivedKeys[i] != null) {
this.allKeys.put(receivedKeys[i].getKeyId(), receivedKeys[i]);
}
}
}
/**
* Update block keys if update time > update interval.
* @return true if the keys are updated.
*/
public synchronized boolean updateKeys(final long updateTime) throws IOException {
if (updateTime > keyUpdateInterval) {
return updateKeys();
}
return false;
}
/**
* Update block keys, only to be used in master mode
*/
synchronized boolean updateKeys() throws IOException {
if (!isMaster) {
return false;
}
LOG.info("Updating block keys");
removeExpiredKeys();
// set final expiry date of retiring currentKey
allKeys.put(currentKey.getKeyId(), new BlockKey(currentKey.getKeyId(),
timer.now() + keyUpdateInterval + tokenLifetime,
currentKey.getKey()));
// update the estimated expiry date of new currentKey
currentKey = new BlockKey(nextKey.getKeyId(), timer.now()
+ 2 * keyUpdateInterval + tokenLifetime, nextKey.getKey());
allKeys.put(currentKey.getKeyId(), currentKey);
// generate a new nextKey
setSerialNo(serialNo + 1);
nextKey = new BlockKey(serialNo, timer.now() + 3
* keyUpdateInterval + tokenLifetime, generateSecret());
allKeys.put(nextKey.getKeyId(), nextKey);
return true;
}
/** Generate an block token for current user */
public Token<BlockTokenIdentifier> generateToken(ExtendedBlock block,
EnumSet<BlockTokenIdentifier.AccessMode> modes,
StorageType[] storageTypes, String[] storageIds) throws IOException {
UserGroupInformation ugi = UserGroupInformation.getCurrentUser();
String userID = (ugi == null ? null : ugi.getShortUserName());
return generateToken(userID, block, modes, storageTypes, storageIds);
}
/** Generate a block token for a specified user */
public Token<BlockTokenIdentifier> generateToken(String userId,
ExtendedBlock block, EnumSet<BlockTokenIdentifier.AccessMode> modes,
StorageType[] storageTypes, String[] storageIds) throws IOException {
BlockTokenIdentifier id = new BlockTokenIdentifier(userId, block
.getBlockPoolId(), block.getBlockId(), modes, storageTypes,
storageIds, useProto);
return new Token<BlockTokenIdentifier>(id, this);
}
/**
* Check if access should be allowed. userID is not checked if null. This
* method doesn't check if token password is correct. It should be used only
* when token password has already been verified (e.g., in the RPC layer).
*
* Some places need to check the access using StorageTypes and for other
* places the StorageTypes is not relevant.
*/
public void checkAccess(BlockTokenIdentifier id, String userId,
ExtendedBlock block, BlockTokenIdentifier.AccessMode mode,
StorageType[] storageTypes, String[] storageIds) throws InvalidToken {
checkAccess(id, userId, block, mode);
if (ArrayUtils.isNotEmpty(storageTypes)) {
checkAccess(id.getStorageTypes(), storageTypes, "StorageTypes");
}
if (ArrayUtils.isNotEmpty(storageIds)) {
checkAccess(id.getStorageIds(), storageIds, "StorageIDs");
}
}
public void checkAccess(BlockTokenIdentifier id, String userId,
ExtendedBlock block, BlockTokenIdentifier.AccessMode mode)
throws InvalidToken {
if (LOG.isDebugEnabled()) {
LOG.debug("Checking access for user=" + userId + ", block=" + block
+ ", access mode=" + mode + " using " + id);
}
if (userId != null && !userId.equals(id.getUserId())) {
throw new InvalidToken("Block token with " + id
+ " doesn't belong to user " + userId);
}
if (!id.getBlockPoolId().equals(block.getBlockPoolId())) {
throw new InvalidToken("Block token with " + id
+ " doesn't apply to block " + block);
}
if (id.getBlockId() != block.getBlockId()) {
throw new InvalidToken("Block token with " + id
+ " doesn't apply to block " + block);
}
if (isExpired(id.getExpiryDate())) {
throw new InvalidToken("Block token with " + id
+ " is expired.");
}
if (!id.getAccessModes().contains(mode)) {
throw new InvalidToken("Block token with " + id
+ " doesn't have " + mode + " permission");
}
}
/**
* Check if the requested values can be satisfied with the values in the
* BlockToken. This is intended for use with StorageTypes and StorageIDs.
*
* The current node can only verify that one of the storage [Type|ID] is
* available. The rest will be on different nodes.
*/
public static <T> void checkAccess(T[] candidates, T[] requested, String msg)
throws InvalidToken {
if (ArrayUtils.isEmpty(requested)) {
throw new InvalidToken("The request has no " + msg + ". "
+ "This is probably a configuration error.");
}
if (ArrayUtils.isEmpty(candidates)) {
return;
}
Multiset<T> c = HashMultiset.create(Arrays.asList(candidates));
for (T req : requested) {
if (!c.remove(req)) {
throw new InvalidToken("Block token with " + msg + " "
+ Arrays.toString(candidates)
+ " not valid for access with " + msg + " "
+ Arrays.toString(requested));
}
}
}
/** Check if access should be allowed. userID is not checked if null */
public void checkAccess(Token<BlockTokenIdentifier> token, String userId,
ExtendedBlock block, BlockTokenIdentifier.AccessMode mode,
StorageType[] storageTypes, String[] storageIds) throws InvalidToken {
BlockTokenIdentifier id = new BlockTokenIdentifier();
try {
id.readFields(new DataInputStream(new ByteArrayInputStream(token
.getIdentifier())));
} catch (IOException e) {
throw new InvalidToken(
"Unable to de-serialize block token identifier for user=" + userId
+ ", block=" + block + ", access mode=" + mode);
}
checkAccess(id, userId, block, mode, storageTypes, storageIds);
if (!Arrays.equals(retrievePassword(id), token.getPassword())) {
throw new InvalidToken("Block token with " + id
+ " doesn't have the correct token password");
}
}
private static boolean isExpired(long expiryDate) {
return Time.now() > expiryDate;
}
/**
* check if a token is expired. for unit test only. return true when token is
* expired, false otherwise
*/
static boolean isTokenExpired(Token<BlockTokenIdentifier> token)
throws IOException {
ByteArrayInputStream buf = new ByteArrayInputStream(token.getIdentifier());
DataInputStream in = new DataInputStream(buf);
long expiryDate = WritableUtils.readVLong(in);
return isExpired(expiryDate);
}
/** set token lifetime. */
public void setTokenLifetime(long tokenLifetime) {
this.tokenLifetime = tokenLifetime;
}
/**
* Create an empty block token identifier
*
* @return a newly created empty block token identifier
*/
@Override
public BlockTokenIdentifier createIdentifier() {
return new BlockTokenIdentifier();
}
/**
* Create a new password/secret for the given block token identifier.
*
* @param identifier
* the block token identifier
* @return token password/secret
*/
@Override
protected byte[] createPassword(BlockTokenIdentifier identifier) {
BlockKey key = null;
synchronized (this) {
key = currentKey;
}
if (key == null) {
throw new IllegalStateException("currentKey hasn't been initialized.");
}
identifier.setExpiryDate(timer.now() + tokenLifetime);
identifier.setKeyId(key.getKeyId());
if (LOG.isDebugEnabled()) {
LOG.debug("Generating block token for " + identifier);
}
return createPassword(identifier.getBytes(), key.getKey());
}
/**
* Look up the token password/secret for the given block token identifier.
*
* @param identifier
* the block token identifier to look up
* @return token password/secret as byte[]
* @throws InvalidToken
*/
@Override
public byte[] retrievePassword(BlockTokenIdentifier identifier)
throws InvalidToken {
if (isExpired(identifier.getExpiryDate())) {
throw new InvalidToken("Block token with " + identifier
+ " is expired.");
}
BlockKey key = null;
synchronized (this) {
key = allKeys.get(identifier.getKeyId());
}
if (key == null) {
throw new InvalidToken("Can't re-compute password for "
+ identifier + ", since the required block key (keyID="
+ identifier.getKeyId() + ") doesn't exist.");
}
return createPassword(identifier.getBytes(), key.getKey());
}
/**
* Generate a data encryption key for this block pool, using the current
* BlockKey.
*
* @return a data encryption key which may be used to encrypt traffic
* over the DataTransferProtocol
*/
public DataEncryptionKey generateDataEncryptionKey() {
byte[] nonce = new byte[8];
nonceGenerator.nextBytes(nonce);
BlockKey key = null;
synchronized (this) {
key = currentKey;
}
byte[] encryptionKey = createPassword(nonce, key.getKey());
return new DataEncryptionKey(key.getKeyId(), blockPoolId, nonce,
encryptionKey, timer.now() + tokenLifetime,
encryptionAlgorithm);
}
/**
* Recreate an encryption key based on the given key id and nonce.
*
* @param keyId identifier of the secret key used to generate the encryption key.
* @param nonce random value used to create the encryption key
* @return the encryption key which corresponds to this (keyId, blockPoolId, nonce)
* @throws InvalidEncryptionKeyException
*/
public byte[] retrieveDataEncryptionKey(int keyId, byte[] nonce)
throws InvalidEncryptionKeyException {
BlockKey key = null;
synchronized (this) {
key = allKeys.get(keyId);
if (key == null) {
throw new InvalidEncryptionKeyException("Can't re-compute encryption key"
+ " for nonce, since the required block key (keyID=" + keyId
+ ") doesn't exist. Current key: " + currentKey.getKeyId());
}
}
return createPassword(nonce, key.getKey());
}
@VisibleForTesting
public synchronized void setKeyUpdateIntervalForTesting(long millis) {
this.keyUpdateInterval = millis;
}
@VisibleForTesting
public void clearAllKeysForTesting() {
allKeys.clear();
}
@VisibleForTesting
public synchronized boolean hasKey(int keyId) {
BlockKey key = allKeys.get(keyId);
return key != null;
}
@VisibleForTesting
public synchronized int getSerialNoForTesting() {
return serialNo;
}
}
| |
/*
* Copyright 2018 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.cloud.dataflow.server.service.impl;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
import org.springframework.boot.autoconfigure.jdbc.DataSourceProperties;
import org.springframework.cloud.dataflow.core.TaskDefinition;
import org.springframework.cloud.dataflow.core.dsl.TaskNode;
import org.springframework.cloud.dataflow.core.dsl.TaskParser;
import org.springframework.cloud.dataflow.server.controller.WhitelistProperties;
import org.springframework.cloud.deployer.spi.core.AppDefinition;
import org.springframework.core.io.Resource;
import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.Assert.assertTrue;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.mock;
/**
* Verifies the behavior of the methods in the utility.
*
* @author Glenn Renfro
*/
public class TaskServiceUtilsTests {
public static final String BASE_GRAPH = "AAA && BBB";
@Rule
public ExpectedException expectedException = ExpectedException.none();
@Test
public void testCreateComposedTaskDefinition() {
TaskConfigurationProperties props = new TaskConfigurationProperties();
props.setComposedTaskRunnerUri("maven://org.springframework.cloud.task.app:composedtaskrunner-task:2.1.3.RELEASE");
assertThat(TaskServiceUtils.createComposedTaskDefinition(BASE_GRAPH)).isEqualTo("composed-task-runner --graph=\"AAA && BBB\"");
}
@Test
public void testCreateComposeTaskDefinitionNullNameCheck() {
this.expectedException.expect(IllegalArgumentException.class);
TaskConfigurationProperties props = new TaskConfigurationProperties();
TaskServiceUtils.createComposedTaskDefinition(BASE_GRAPH);
TaskServiceUtils.createComposedTaskDefinition(null);
}
@Test
public void testCreateComposeTaskDefinitionNullProperties() {
this.expectedException.expect(IllegalArgumentException.class);
TaskServiceUtils.createComposedTaskDefinition(BASE_GRAPH, null);
}
@Test
public void testCTRPropertyReplacement() {
TaskNode node = parse("AAA && BBB");
Map<String, String> taskDeploymentProperties = new HashMap<>();
taskDeploymentProperties.put("app.test.BBB.timestamp.format", "aformat");
taskDeploymentProperties.put("deployer.test.BBB.foo", "bar");
taskDeploymentProperties = TaskServiceUtils.establishComposedTaskProperties(
taskDeploymentProperties,
node);
assertThat(taskDeploymentProperties.size()).isEqualTo(1);
assertThat(taskDeploymentProperties.get(
"app.composed-task-runner.composed-task-properties"))
.isEqualTo("app.test-BBB.app.BBB.timestamp.format=aformat, deployer.test-BBB.deployer.BBB.foo=bar");
}
@Test
public void testDatabasePropUpdate() {
TaskDefinition taskDefinition = new TaskDefinition("testTask", "testApp");
DataSourceProperties dataSourceProperties = new DataSourceProperties();
dataSourceProperties.setUsername("myUser");
dataSourceProperties.setDriverClassName("myDriver");
dataSourceProperties.setPassword("myPassword");
dataSourceProperties.setUrl("myUrl");
TaskDefinition definition = TaskServiceUtils.updateTaskProperties(
taskDefinition,
dataSourceProperties);
assertThat(definition.getProperties().size()).isEqualTo(5);
assertThat(definition.getProperties().get("spring.datasource.url")).isEqualTo("myUrl");
assertThat(definition.getProperties().get("spring.datasource.driverClassName")).isEqualTo("myDriver");
assertThat(definition.getProperties().get("spring.datasource.username")).isEqualTo("myUser");
assertThat(definition.getProperties().get("spring.datasource.password")).isEqualTo("myPassword");
}
@Test
public void testDatabasePropUpdateWithPlatform() {
TaskDefinition taskDefinition = new TaskDefinition("testTask", "testApp");
DataSourceProperties dataSourceProperties = new DataSourceProperties();
dataSourceProperties.setUsername("myUser");
dataSourceProperties.setDriverClassName("myDriver");
dataSourceProperties.setPassword("myPassword");
dataSourceProperties.setUrl("myUrl");
TaskDefinition definition = TaskServiceUtils.updateTaskProperties(
taskDefinition,
dataSourceProperties, false);
assertThat(definition.getProperties().size()).isEqualTo(3);
assertThat(definition.getProperties().get("spring.datasource.url")).isEqualTo("myUrl");
assertThat(definition.getProperties().get("spring.datasource.driverClassName")).isEqualTo("myDriver");
assertThat(definition.getProperties().get("spring.datasource.username")).isNull();
assertThat(definition.getProperties().get("spring.datasource.password")).isNull();
}
@Test
public void testExtractAppProperties() {
Map<String, String> taskDeploymentProperties = new HashMap<>();
taskDeploymentProperties.put("app.test.foo", "bar");
taskDeploymentProperties.put("test.none", "boo");
taskDeploymentProperties.put("app.test.test", "baz");
taskDeploymentProperties.put("app.none.test", "boo");
Map<String, String> result = TaskServiceUtils.extractAppProperties("test",
taskDeploymentProperties);
assertThat(result.size()).isEqualTo(2);
assertThat(result.get("foo")).isEqualTo("bar");
assertThat(result.get("test")).isEqualTo("baz");
}
@Test
public void testMergeAndExpandAppProperties() {
TaskDefinition taskDefinition = new TaskDefinition("testTask", "testApp");
Map<String, String> appDeploymentProperties = new HashMap<>();
appDeploymentProperties.put("propA", "valA");
appDeploymentProperties.put("propB", "valB");
WhitelistProperties whitelistProperties = mock(WhitelistProperties.class);
org.mockito.BDDMockito.given(whitelistProperties
.qualifyProperties(any(), any()))
.willReturn(appDeploymentProperties);
AppDefinition appDefinition = TaskServiceUtils.mergeAndExpandAppProperties(
taskDefinition,
mock(Resource.class),
appDeploymentProperties,
whitelistProperties);
assertThat(appDefinition.getProperties().size()).isEqualTo(2);
assertThat(appDefinition.getProperties().get("propA")).isEqualTo("valA");
assertThat(appDefinition.getProperties().get("propB")).isEqualTo("valB");
}
@Test
public void testDataFlowUriProperty() throws Exception {
final String DATA_FLOW_SERVICE_URI = "https://myserver:9191";
List<String> cmdLineArgs = new ArrayList<>();
Map<String, String> appDeploymentProperties = new HashMap<>();
TaskServiceUtils.updateDataFlowUriIfNeeded(DATA_FLOW_SERVICE_URI, appDeploymentProperties, cmdLineArgs);
assertTrue(appDeploymentProperties.containsKey("dataflowServerUri"));
assertTrue("dataflowServerUri is expected to be in the app deployment properties",
appDeploymentProperties.get("dataflowServerUri").equals("https://myserver:9191"));
appDeploymentProperties.clear();
appDeploymentProperties.put("dataflow-server-uri", "http://localhost:8080");
TaskServiceUtils.updateDataFlowUriIfNeeded(DATA_FLOW_SERVICE_URI, appDeploymentProperties, cmdLineArgs);
assertTrue(!appDeploymentProperties.containsKey("dataflowServerUri"));
assertTrue("dataflowServerUri is incorrect",
appDeploymentProperties.get("dataflow-server-uri").equals("http://localhost:8080"));
appDeploymentProperties.clear();
appDeploymentProperties.put("dataflowServerUri", "http://localhost:8191");
TaskServiceUtils.updateDataFlowUriIfNeeded(DATA_FLOW_SERVICE_URI, appDeploymentProperties, cmdLineArgs);
assertTrue(appDeploymentProperties.containsKey("dataflowServerUri"));
assertTrue("dataflowServerUri is incorrect",
appDeploymentProperties.get("dataflowServerUri").equals("http://localhost:8191"));
appDeploymentProperties.clear();
appDeploymentProperties.put("DATAFLOW_SERVER_URI", "http://localhost:9000");
TaskServiceUtils.updateDataFlowUriIfNeeded(DATA_FLOW_SERVICE_URI, appDeploymentProperties, cmdLineArgs);
assertTrue(!appDeploymentProperties.containsKey("dataflowServerUri"));
assertTrue("dataflowServerUri is incorrect",
appDeploymentProperties.get("DATAFLOW_SERVER_URI").equals("http://localhost:9000"));
appDeploymentProperties.clear();
cmdLineArgs.add("--dataflowServerUri=http://localhost:8383");
TaskServiceUtils.updateDataFlowUriIfNeeded(DATA_FLOW_SERVICE_URI, appDeploymentProperties, cmdLineArgs);
assertTrue(!appDeploymentProperties.containsKey("dataflowServerUri"));
cmdLineArgs.clear();
cmdLineArgs.add("DATAFLOW_SERVER_URI=http://localhost:8383");
TaskServiceUtils.updateDataFlowUriIfNeeded(DATA_FLOW_SERVICE_URI, appDeploymentProperties, cmdLineArgs);
assertTrue(!appDeploymentProperties.containsKey("dataflowServerUri"));
assertTrue(!appDeploymentProperties.containsKey("DATAFLOW-SERVER-URI"));
}
private TaskNode parse(String dsltext) {
TaskNode ctn = new TaskParser("test", dsltext, true, true).parse();
return ctn;
}
}
| |
/*
* Copyright 2013 Thomas Hoffmann
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package de.j4velin.pedometer;
import android.app.AlarmManager;
import android.app.Notification;
import android.app.NotificationManager;
import android.app.PendingIntent;
import android.app.Service;
import android.content.Context;
import android.content.Intent;
import android.content.SharedPreferences;
import android.hardware.Sensor;
import android.hardware.SensorEvent;
import android.hardware.SensorEventListener;
import android.hardware.SensorManager;
import android.os.IBinder;
import java.text.NumberFormat;
import java.util.Locale;
import de.j4velin.pedometer.ui.Activity_Main;
import de.j4velin.pedometer.util.Logger;
import de.j4velin.pedometer.util.Util;
import de.j4velin.pedometer.widget.WidgetUpdateService;
/**
* Background service which keeps the step-sensor listener alive to always get
* the number of steps since boot.
* <p/>
* This service won't be needed any more if there is a way to read the
* step-value without waiting for a sensor event
*/
public class SensorListener extends Service implements SensorEventListener {
private final static int NOTIFICATION_ID = 1;
public final static String ACTION_PAUSE = "pause";
private static boolean WAIT_FOR_VALID_STEPS = false;
private static int steps;
private final static int MICROSECONDS_IN_ONE_MINUTE = 60000000;
@Override
public void onAccuracyChanged(final Sensor sensor, int accuracy) {
// nobody knows what happens here: step value might magically decrease
// when this method is called...
if (BuildConfig.DEBUG) Logger.log(sensor.getName() + " accuracy changed: " + accuracy);
}
@Override
public void onSensorChanged(final SensorEvent event) {
if (event.values[0] > Integer.MAX_VALUE) {
if (BuildConfig.DEBUG) Logger.log("probably not a real value: " + event.values[0]);
return;
} else {
steps = (int) event.values[0];
if (WAIT_FOR_VALID_STEPS && steps > 0) {
WAIT_FOR_VALID_STEPS = false;
Database db = Database.getInstance(this);
if (db.getSteps(Util.getToday()) == Integer.MIN_VALUE) {
int pauseDifference = steps -
getSharedPreferences("pedometer", Context.MODE_MULTI_PROCESS)
.getInt("pauseCount", steps);
db.insertNewDay(Util.getToday(), steps - pauseDifference);
if (pauseDifference > 0) {
// update pauseCount for the new day
getSharedPreferences("pedometer", Context.MODE_MULTI_PROCESS).edit()
.putInt("pauseCount", steps).commit();
}
reRegisterSensor();
}
db.saveCurrentSteps(steps);
db.close();
updateNotificationState();
startService(new Intent(this, WidgetUpdateService.class));
}
}
}
@Override
public IBinder onBind(final Intent intent) {
return null;
}
@Override
public int onStartCommand(final Intent intent, int flags, int startId) {
if (intent != null && ACTION_PAUSE.equals(intent.getStringExtra("action"))) {
if (BuildConfig.DEBUG)
Logger.log("onStartCommand action: " + intent.getStringExtra("action"));
if (steps == 0) {
Database db = Database.getInstance(this);
steps = db.getCurrentSteps();
db.close();
}
SharedPreferences prefs = getSharedPreferences("pedometer", Context.MODE_MULTI_PROCESS);
if (prefs.contains("pauseCount")) { // resume counting
int difference = steps -
prefs.getInt("pauseCount", steps); // number of steps taken during the pause
Database db = Database.getInstance(this);
db.updateSteps(Util.getToday(), -difference);
db.close();
prefs.edit().remove("pauseCount").commit();
updateNotificationState();
} else { // pause counting
// cancel restart
((AlarmManager) getApplicationContext().getSystemService(Context.ALARM_SERVICE))
.cancel(PendingIntent.getService(getApplicationContext(), 2,
new Intent(this, SensorListener.class),
PendingIntent.FLAG_UPDATE_CURRENT));
prefs.edit().putInt("pauseCount", steps).commit();
updateNotificationState();
stopSelf();
return START_NOT_STICKY;
}
}
// restart service every hour to get the current step count
((AlarmManager) getApplicationContext().getSystemService(Context.ALARM_SERVICE))
.set(AlarmManager.RTC, System.currentTimeMillis() + AlarmManager.INTERVAL_HOUR,
PendingIntent.getService(getApplicationContext(), 2,
new Intent(this, SensorListener.class),
PendingIntent.FLAG_UPDATE_CURRENT));
WAIT_FOR_VALID_STEPS = true;
return START_STICKY;
}
@Override
public void onCreate() {
super.onCreate();
if (BuildConfig.DEBUG) Logger.log("SensorListener onCreate");
reRegisterSensor();
updateNotificationState();
}
@Override
public void onTaskRemoved(final Intent rootIntent) {
super.onTaskRemoved(rootIntent);
if (BuildConfig.DEBUG) Logger.log("sensor service task removed");
// Restart service in 500 ms
((AlarmManager) getSystemService(Context.ALARM_SERVICE))
.set(AlarmManager.RTC, System.currentTimeMillis() + 500, PendingIntent
.getService(this, 3, new Intent(this, SensorListener.class), 0));
}
@Override
public void onDestroy() {
super.onDestroy();
if (BuildConfig.DEBUG) Logger.log("SensorListener onDestroy");
try {
SensorManager sm = (SensorManager) getSystemService(SENSOR_SERVICE);
sm.unregisterListener(this);
} catch (Exception e) {
if (BuildConfig.DEBUG) Logger.log(e);
e.printStackTrace();
}
}
private void updateNotificationState() {
if (BuildConfig.DEBUG) Logger.log("SensorListener updateNotificationState");
SharedPreferences prefs = getSharedPreferences("pedometer", Context.MODE_MULTI_PROCESS);
NotificationManager nm =
(NotificationManager) getSystemService(Context.NOTIFICATION_SERVICE);
if (prefs.getBoolean("notification", true)) {
int goal = prefs.getInt("goal", 10000);
Database db = Database.getInstance(this);
int today_offset = db.getSteps(Util.getToday());
if (steps == 0)
steps = db.getCurrentSteps(); // use saved value if we haven't anything better
db.close();
Notification.Builder notificationBuilder = new Notification.Builder(this);
if (steps > 0) {
if (today_offset == Integer.MIN_VALUE) today_offset = -steps;
notificationBuilder.setProgress(goal, today_offset + steps, false).setContentText(
today_offset + steps >= goal ? getString(R.string.goal_reached_notification,
NumberFormat.getInstance(Locale.getDefault())
.format((today_offset + steps))) :
getString(R.string.notification_text,
NumberFormat.getInstance(Locale.getDefault())
.format((goal - today_offset - steps))));
} else { // still no step value?
notificationBuilder
.setContentText(getString(R.string.your_progress_will_be_shown_here_soon));
}
boolean isPaused = prefs.contains("pauseCount");
notificationBuilder.setPriority(Notification.PRIORITY_MIN).setShowWhen(false)
.setContentTitle(isPaused ? getString(R.string.ispaused) :
getString(R.string.notification_title)).setContentIntent(PendingIntent
.getActivity(this, 0, new Intent(this, Activity_Main.class),
PendingIntent.FLAG_UPDATE_CURRENT))
.setSmallIcon(R.drawable.ic_notification)
.addAction(isPaused ? R.drawable.ic_resume : R.drawable.ic_pause,
isPaused ? getString(R.string.resume) : getString(R.string.pause),
PendingIntent.getService(this, 4, new Intent(this, SensorListener.class)
.putExtra("action", ACTION_PAUSE),
PendingIntent.FLAG_UPDATE_CURRENT)).setOngoing(true);
nm.notify(NOTIFICATION_ID, notificationBuilder.build());
} else {
nm.cancel(NOTIFICATION_ID);
}
}
private void reRegisterSensor() {
if (BuildConfig.DEBUG) Logger.log("re-register sensor listener");
SensorManager sm = (SensorManager) getSystemService(SENSOR_SERVICE);
try {
sm.unregisterListener(this);
} catch (Exception e) {
if (BuildConfig.DEBUG) Logger.log(e);
e.printStackTrace();
}
if (BuildConfig.DEBUG) {
Logger.log("step sensors: " + sm.getSensorList(Sensor.TYPE_STEP_COUNTER).size());
if (sm.getSensorList(Sensor.TYPE_STEP_COUNTER).size() < 1) return; // emulator
Logger.log("default: " + sm.getDefaultSensor(Sensor.TYPE_STEP_COUNTER).getName());
}
// enable batching with delay of max 5 min
sm.registerListener(this, sm.getDefaultSensor(Sensor.TYPE_STEP_COUNTER),
SensorManager.SENSOR_DELAY_NORMAL, 5 * MICROSECONDS_IN_ONE_MINUTE);
}
}
| |
/************************************************************************************
* @file BpTreeMap.java
*
* @author John Miller
*/
import java.io.*;
import java.lang.reflect.Array;
import static java.lang.System.out;
import java.util.*;
/************************************************************************************
* This class provides B+Tree maps. B+Trees are used as multi-level index structures
* that provide efficient access for both point queries and range queries.
*/
public class BpTreeMap <K extends Comparable <K>, V>
extends AbstractMap <K, V>
implements Serializable, Cloneable, SortedMap <K, V>
{
/** The maximum fanout for a B+Tree node.
*/
private static final int ORDER = 5;
/** The class for type K.
*/
private final Class <K> classK;
/** The class for type V.
*/
private final Class <V> classV;
/********************************************************************************
* This inner class defines nodes that are stored in the B+tree map.
*/
private class Node
{
boolean isLeaf;
int nKeys;
K [] key;
Object [] ref;
@SuppressWarnings("unchecked")
Node (boolean _isLeaf)
{
isLeaf = _isLeaf;
nKeys = 0;
key = (K []) Array.newInstance (classK, ORDER);
if (isLeaf) {
//ref = (V []) Array.newInstance (classV, ORDER);
ref = new Object [ORDER + 1];
} else {
ref = (Node []) Array.newInstance (Node.class, ORDER + 1);
} // if
} // constructor
} // Node inner class
/** The root of the B+Tree
*/
private Node root;
/** The counter for the number nodes accessed (for performance testing).
*/
private int count = 0;
private int treeSize = 0;
/********************************************************************************
* Construct an empty B+Tree map.
* @param _classK the class for keys (K)
* @param _classV the class for values (V)
*/
public BpTreeMap (Class <K> _classK, Class <V> _classV)
{
classK = _classK;
classV = _classV;
root = new Node (true);
} // constructor
/********************************************************************************
* Return null to use the natural order based on the key type. This requires the
* key type to implement Comparable.
*/
public Comparator <? super K> comparator ()
{
return null;
} // comparator
/********************************************************************************
* Return a set containing all the entries as pairs of keys and values.
* @return the set view of the map
*/
public Set <Map.Entry <K, V>> entrySet ()
{
return subSet(root);
} // entrySet
/**
* Creates a Set of keys and values from the leaves of the tree
* @param n
* @return the set of values of tree rooted at n
*/
public Set <Map.Entry <K, V>> subSet (Node n)
{
if (!n.isLeaf)
{
return subSet((Node) n.ref[0]);
}
else
{
Node current = n;
Set <Map.Entry <K, V>> returnSet = new HashSet <> ();
while (current != null)
{
for(int i = 0;i < n.nKeys;i ++)
{
returnSet.add(new AbstractMap.SimpleEntry(current.key[i], current.ref[i]));
}
current = (Node) current.ref[ORDER];
}
return returnSet;
}
}
/********************************************************************************
* Given the key, look up the value in the B+Tree map.
* @param key the key used for look up
* @return the value associated with the key
*/
@SuppressWarnings("unchecked")
public V get (Object key)
{
return find ((K) key, root);
} // get
/********************************************************************************
* Put the key-value pair in the B+Tree map.
* @param key the key to insert
* @param value the value to insert
* @return null (not the previous value)
*/
public V put (K key, V value)
{
insert (key, value, root, null);
treeSize++;
return null;
} // put
/********************************************************************************
* Return the first (smallest) key in the B+Tree map.
* @return the first key in the B+Tree map.
*/
public K firstKey ()
{
Node n = root;
K smallestKey = n.key[0];
//if node is a leaf return the smallest key(should be the first)
if(n.isLeaf){
return smallestKey;
}
//if node is not a leaf then set n to first node referenced by the current node
while(!n.isLeaf){
n = (Node)n.ref[0];
smallestKey = n.key[0];
}
return smallestKey;
} // firstKey
/********************************************************************************
* Return the last (largest) key in the B+Tree map.
* @return the last key in the B+Tree map.
*/
public K lastKey ()
{
Node n = root;
K largestKey = n.key[n.key.length - 2];
//if node is a leaf then loop through keys in reverse looking
//for first non-null value
if(n.isLeaf){
for(int i = n.key.length - 2; i >= 0; i--){
if(n.key[i] != null){
largestKey = n.key[i];
return largestKey;
}
}
}
//if node is not a leaf loop through keys in reverse and set n to
//the last reference
while(!n.isLeaf){
for(int i = n.nKeys - 1 ; i >= 0; i--){
if(n.key[i]!= null){
n = (Node)n.ref[i + 1];
i = n.nKeys;
if(n.isLeaf){
for(int j = n.nKeys; j >= 0; j-- ){
if(n.key[j]!= null){
largestKey = n.key[j];
break;
}
}
break;
}
}
}
}
return largestKey;
} // lastKey
/********************************************************************************
* Return the portion of the B+Tree map where key < toKey.
* @return the submap with keys in the range [firstKey, toKey)
*/
public SortedMap <K,V> headMap (K toKey)
{
return subMap(null, toKey);
} // headMap
/********************************************************************************
* Return the portion of the B+Tree map where fromKey <= key.
* @return the submap with keys in the range [fromKey, lastKey]
*/
public SortedMap <K,V> tailMap (K fromKey)
{
return subMap(fromKey, null);
} // tailMap
/********************************************************************************
* Return the portion of the B+Tree map whose keys are between fromKey and toKey,
* i.e., fromKey <= key < toKey.
* @return the submap with keys in the range [fromKey, toKey)
*/
public SortedMap <K,V> subMap (K fromKey, K toKey)
{
// Create empty submap
BpTreeMap <K, V> newMap = new BpTreeMap <> (classK, classV);
// Set start and end keys
K first = fromKey == null ? firstKey() : fromKey;
K second = toKey == null ? lastKey() : toKey;
Node temp = root;
boolean foundLeaf = false;
while (!foundLeaf)
{
if (temp.ref[0].getClass() != Node.class)
{
break;
}
for (int i = 0;i < temp.nKeys;i ++)
{
K key = temp.key[i];
if (first.compareTo(key) < 0)
{
temp = (Node) temp.ref[i];
break;
}
temp = (Node) temp.ref[temp.nKeys];
}
}
int index = 0;
for (int i = 0;i < temp.nKeys;i ++)
{
if (temp.key[i].compareTo(first) == 0)
{
index = i;
break;
}
}
while (temp != null)
{
for (int i = index;i < temp.nKeys;i ++)
{
K key = temp.key[i];
if (key.compareTo(second) > 0)
{
return newMap;
}
newMap.put(key, (V) temp.ref[i]);
}
temp = (Node) temp.ref[ORDER];
index = 0;
}
return newMap;
} // subMap
/********************************************************************************
* Return the size (number of keys) in the B+Tree.
* @return the size of the B+Tree
*/
public int size ()
{
return treeSize;
} // size
/********************************************************************************
* Print the B+Tree using a pre-order traveral and indenting each level.
* @param n the current node to print
* @param level the current level of the B+Tree
*/
@SuppressWarnings("unchecked")
private void print (Node n, int level)
{
out.println ("BpTreeMap");
out.println ("-------------------------------------------");
for (int j = 0; j < level; j++) out.print ("\t");
out.print ("[ . ");
for (int i = 0; i < n.nKeys; i++) out.print (n.key [i] + " . ");
out.println ("]");
if ( ! n.isLeaf) {
for (int i = 0; i <= n.nKeys; i++) print ((Node) n.ref [i], level + 1);
} // if
out.println ("-------------------------------------------");
} // print
/********************************************************************************
* Recursive helper function for finding a key in B+trees.
* @param key the key to find
* @param ney the current node
*/
@SuppressWarnings("unchecked")
private V find (K key, Node n)
{
count++;
for (int i = 0; i < n.nKeys; i++) {
K k_i = n.key [i];
if (key.compareTo(k_i) <= 0) {
if (n.isLeaf) {
return (key.compareTo(k_i) == 0) ? (V) n.ref [i] : null;
} else {
return find (key, (Node) n.ref [i]);
} // if
} // if
} // for
return (n.isLeaf) ? null : find (key, (Node) n.ref [n.nKeys]);
} // find
/********************************************************************************
* Recursive helper function for inserting a key in B+trees.
* @param key the key to insert
* @param ref the value/node to insert
* @param n the current node
* @param p the parent node
*/
private void insert (K key, V ref, Node n, Node p)
{
//if node is a leaf then wedge into its proper place in n.key[]
if (n.isLeaf)
{
//if node does not have max number of keys then loop through n.key
if (n.nKeys < ORDER - 1) {
for (int i = 0; i < n.nKeys; i++) {
K k_i = n.key [i];
//if key is less then k_i wedge in key
if (key.compareTo(k_i) < 0) {
wedge (key, ref, n, i);
return;
//if key is equal to k_i then notify as duplicate
} else if (key.equals (k_i)) {
out.println ("BpTreeMap:insert: attempt to insert duplicate key = " + key);
}
}
wedge (key, ref, n, n.nKeys);
}
//if node has max number of keys then split it
else {
Node sib = split (key, ref, n);
sib.ref[ORDER] = n.ref[ORDER];
n.ref[ORDER] = sib;
parentInsert(n, sib, p);
}
}
//if node is not leaf
else
{
boolean didInsert = false;
//loop through keys
for (int i = 0;i < n.nKeys;i ++)
{
K nKey = n.key[i];
//insert if key is less then nKey, set didInsert to true
if (key.compareTo(nKey) < 0)
{
insert (key, ref, (Node) n.ref[i], n);
didInsert = true;
break;
}
}
//if node has not been inserted then insert
if (!didInsert) {
Node tmpNode = (Node) n.ref[n.nKeys];
insert (key, ref, tmpNode, n);
}
//if node is full then split it
if (n.nKeys > ORDER - 1)
{
Node sib = split (null, null, n);
parentInsert(n, sib, p);
}
}
} // insert
/******************************************************************************
* Creates a new prent node with a right and left child
* @param sib the sibling node
* @param p the parent node
*/
private void parentInsert (Node n, Node sib, Node p)
{
Node parent = p;
//if node p is the root then set it equal to sib
if (parent == null)
{
parent = new Node(false);
root = parent;
parent.ref[0] = n;
}
//create variable middle to the key where to split the node
K middle = n.key[n.nKeys - 1];
// Remove the key if a non leaf node
if (!n.isLeaf)
{
n.key[n.nKeys] = null;
n.nKeys--;
}
boolean didWedge = false;
//loop through p.key[]
for (int i = 0;i < parent.nKeys;i ++)
{
K pKey = parent.key[i];
//wedge middle in its appropriate place
if (middle.compareTo(pKey) < 0)
{
parentWedge(sib, middle, parent, i);
didWedge = true;
break;
}
}
//wedge node if it has not been wedged yet
if (!didWedge)
{
parentWedge(sib, middle, parent, parent.nKeys);
}
}
private void parentWedge(Node n, K middle, Node p, int i)
{
for (int j = p.nKeys; j > i; j--)
{
p.key [j] = p.key [j - 1];
p.ref [j + 1] = p.ref [j];
}
p.key [i] = middle;
p.ref [i + 1] = n;
p.nKeys++;
}
/********************************************************************************
* Wedge the key-ref pair into node n.
* @param key the key to insert
* @param ref the value/node to insert
* @param n the current node
* @param i the insertion position within node n
*/
private void wedge (K key, Object ref, Node n, int i)
{
for (int j = n.nKeys; j > i; j--) {
n.key [j] = n.key [j - 1];
n.ref [j] = n.ref [j - 1];
} // for
n.key [i] = key;
n.ref [i] = ref;
n.nKeys++;
} // wedge
/********************************************************************************
* Split node n and return the newly created node.
* @param key the key to insert
* @param ref the value/node to insert
* @param n the current node
*/
private Node split (K key, Object ref, Node n)
{
//out.println ("split not implemented yet");
//loop through node to wedge in new key before splitting
if(key != null)
{
boolean didWedge = false;
for (int i = 0; i < n.nKeys; i++ )
{
if(key.compareTo(n.key[i]) < 0)
{
wedge(key, ref, n, i);
didWedge = true;
break;
}
}
if (!didWedge)
{
wedge (key, ref, n, n.nKeys);
}
}
//split on the middle key
int middleIndex = (int)(ORDER/2);
K middleKey = n.key[middleIndex];
// Create sibling node
Node sibling = new Node(n.isLeaf);
//filling new children with keys and values
for (int i = 0; i < n.nKeys; i++)
{
if (n.key[i].compareTo(middleKey) > 0) {
wedge(n.key[i], n.ref[i], sibling, sibling.nKeys);
n.key[i] = null;
n.ref[i] = null;
}
}
if (!n.isLeaf) {
sibling.ref[sibling.nKeys] = n.ref[n.nKeys];
n.ref[n.nKeys] = null;
}
n.nKeys = (int) Math.ceil(ORDER / 2.0);
return sibling;
} // split
/********************************************************************************
* The main method used for testing.
* @param the command-line arguments (args [0] gives number of keys to insert)
*/
public static void main (String [] args)
{
BpTreeMap <Integer, Integer> bpt = new BpTreeMap <> (Integer.class, Integer.class);
int totKeys = 10;
if (args.length == 1) totKeys = Integer.valueOf (args [0]);
int values[] = {700701, 458642, 738714, 406377, 312281, 534527, 979993, 370723, 57288, 580918};
for (int value : values)
{
bpt.put (value, value * value);
}
//for (int i = 1; i < totKeys; i += 2) {
//bpt.put (i, i * i);
// }
Set <Map.Entry <Integer, Integer>> set = bpt.entrySet();
bpt.print (bpt.root, 0);
for (int i = 0; i < totKeys; i++) {
out.println ("key = " + i + " value = " + bpt.get (i));
} // for
out.println ("-------------------------------------------");
out.println ("Average number of nodes accessed = " + bpt.count / (double) totKeys);
} // main
} // BpTreeMap class
| |
/*
* Copyright 2002-2012 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.http.converter;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import org.springframework.http.HttpHeaders;
import org.springframework.http.HttpInputMessage;
import org.springframework.http.HttpOutputMessage;
import org.springframework.http.MediaType;
import org.springframework.util.Assert;
/**
* Abstract base class for most {@link HttpMessageConverter} implementations.
*
* <p>This base class adds support for setting supported {@code MediaTypes}, through the
* {@link #setSupportedMediaTypes(List) supportedMediaTypes} bean property. It also adds
* support for {@code Content-Type} and {@code Content-Length} when writing to output messages.
*
* @author Arjen Poutsma
* @author Juergen Hoeller
* @since 1.0
*/
public abstract class AbstractHttpMessageConverter<T> implements HttpMessageConverter<T> {
private List<MediaType> supportedMediaTypes = Collections.emptyList();
/**
* Construct an {@code AbstractHttpMessageConverter} with no supported media types.
* @see #setSupportedMediaTypes
*/
protected AbstractHttpMessageConverter() {
}
/**
* Construct an {@code AbstractHttpMessageConverter} with one supported media type.
* @param supportedMediaType the supported media type
*/
protected AbstractHttpMessageConverter(MediaType supportedMediaType) {
setSupportedMediaTypes(Collections.singletonList(supportedMediaType));
}
/**
* Construct an {@code AbstractHttpMessageConverter} with multiple supported media type.
* @param supportedMediaTypes the supported media types
*/
protected AbstractHttpMessageConverter(MediaType... supportedMediaTypes) {
setSupportedMediaTypes(Arrays.asList(supportedMediaTypes));
}
/**
* Set the list of {@link MediaType} objects supported by this converter.
*/
public void setSupportedMediaTypes(List<MediaType> supportedMediaTypes) {
Assert.notEmpty(supportedMediaTypes, "'supportedMediaTypes' must not be empty");
this.supportedMediaTypes = new ArrayList<MediaType>(supportedMediaTypes);
}
public List<MediaType> getSupportedMediaTypes() {
return Collections.unmodifiableList(this.supportedMediaTypes);
}
/**
* This implementation checks if the given class is {@linkplain #supports(Class) supported},
* and if the {@linkplain #getSupportedMediaTypes() supported media types}
* {@linkplain MediaType#includes(MediaType) include} the given media type.
*/
public boolean canRead(Class<?> clazz, MediaType mediaType) {
return supports(clazz) && canRead(mediaType);
}
/**
* Returns true if any of the {@linkplain #setSupportedMediaTypes(List) supported media types}
* include the given media type.
* @param mediaType the media type to read, can be {@code null} if not specified.
* Typically the value of a {@code Content-Type} header.
* @return true if the supported media types include the media type,
* or if the media type is {@code null}
*/
protected boolean canRead(MediaType mediaType) {
if (mediaType == null) {
return true;
}
for (MediaType supportedMediaType : getSupportedMediaTypes()) {
if (supportedMediaType.includes(mediaType)) {
return true;
}
}
return false;
}
/**
* This implementation checks if the given class is {@linkplain #supports(Class) supported},
* and if the {@linkplain #getSupportedMediaTypes() supported media types}
* {@linkplain MediaType#includes(MediaType) include} the given media type.
*/
public boolean canWrite(Class<?> clazz, MediaType mediaType) {
return supports(clazz) && canWrite(mediaType);
}
/**
* Returns true if the given media type includes any of the
* {@linkplain #setSupportedMediaTypes(List) supported media types}.
* @param mediaType the media type to write, can be {@code null} if not specified.
* Typically the value of an {@code Accept} header.
* @return true if the supported media types are compatible with the media type,
* or if the media type is {@code null}
*/
protected boolean canWrite(MediaType mediaType) {
if (mediaType == null || MediaType.ALL.equals(mediaType)) {
return true;
}
for (MediaType supportedMediaType : getSupportedMediaTypes()) {
if (supportedMediaType.isCompatibleWith(mediaType)) {
return true;
}
}
return false;
}
/**
* This implementation simple delegates to {@link #readInternal(Class, HttpInputMessage)}.
* Future implementations might add some default behavior, however.
*/
public final T read(Class<? extends T> clazz, HttpInputMessage inputMessage) throws IOException {
return readInternal(clazz, inputMessage);
}
/**
* This implementation delegates to {@link #getDefaultContentType(Object)} if a content
* type was not provided, calls {@link #getContentLength}, and sets the corresponding headers
* on the output message. It then calls {@link #writeInternal}.
*/
public final void write(T t, MediaType contentType, HttpOutputMessage outputMessage)
throws IOException, HttpMessageNotWritableException {
HttpHeaders headers = outputMessage.getHeaders();
if (headers.getContentType() == null) {
if (contentType == null || contentType.isWildcardType() || contentType.isWildcardSubtype()) {
contentType = getDefaultContentType(t);
}
if (contentType != null) {
headers.setContentType(contentType);
}
}
if (headers.getContentLength() == -1) {
Long contentLength = getContentLength(t, headers.getContentType());
if (contentLength != null) {
headers.setContentLength(contentLength);
}
}
writeInternal(t, outputMessage);
outputMessage.getBody().flush();
}
/**
* Returns the default content type for the given type. Called when {@link #write}
* is invoked without a specified content type parameter.
* <p>By default, this returns the first element of the
* {@link #setSupportedMediaTypes(List) supportedMediaTypes} property, if any.
* Can be overridden in subclasses.
* @param t the type to return the content type for
* @return the content type, or <code>null</code> if not known
*/
protected MediaType getDefaultContentType(T t) throws IOException {
List<MediaType> mediaTypes = getSupportedMediaTypes();
return (!mediaTypes.isEmpty() ? mediaTypes.get(0) : null);
}
/**
* Returns the content length for the given type.
* <p>By default, this returns {@code null}, meaning that the content length is unknown.
* Can be overridden in subclasses.
* @param t the type to return the content length for
* @return the content length, or {@code null} if not known
*/
protected Long getContentLength(T t, MediaType contentType) throws IOException {
return null;
}
/**
* Indicates whether the given class is supported by this converter.
* @param clazz the class to test for support
* @return <code>true</code> if supported; <code>false</code> otherwise
*/
protected abstract boolean supports(Class<?> clazz);
/**
* Abstract template method that reads the actualy object. Invoked from {@link #read}.
* @param clazz the type of object to return
* @param inputMessage the HTTP input message to read from
* @return the converted object
* @throws IOException in case of I/O errors
* @throws HttpMessageNotReadableException in case of conversion errors
*/
protected abstract T readInternal(Class<? extends T> clazz, HttpInputMessage inputMessage)
throws IOException, HttpMessageNotReadableException;
/**
* Abstract template method that writes the actual body. Invoked from {@link #write}.
* @param t the object to write to the output message
* @param outputMessage the message to write to
* @throws IOException in case of I/O errors
* @throws HttpMessageNotWritableException in case of conversion errors
*/
protected abstract void writeInternal(T t, HttpOutputMessage outputMessage)
throws IOException, HttpMessageNotWritableException;
}
| |
/**
* Copyright (c) 2000-present Liferay, Inc. All rights reserved.
*
* This library is free software; you can redistribute it and/or modify it under
* the terms of the GNU Lesser General Public License as published by the Free
* Software Foundation; either version 2.1 of the License, or (at your option)
* any later version.
*
* This library is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
* FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
* details.
*/
package org.oep.core.processmgt.service.base;
import com.liferay.portal.kernel.bean.BeanReference;
import com.liferay.portal.kernel.bean.IdentifiableBean;
import com.liferay.portal.kernel.dao.jdbc.SqlUpdate;
import com.liferay.portal.kernel.dao.jdbc.SqlUpdateFactoryUtil;
import com.liferay.portal.kernel.exception.SystemException;
import com.liferay.portal.service.BaseServiceImpl;
import com.liferay.portal.service.persistence.UserPersistence;
import org.oep.core.processmgt.model.UserAssignment;
import org.oep.core.processmgt.service.UserAssignmentService;
import org.oep.core.processmgt.service.persistence.DossierProcessPersistence;
import org.oep.core.processmgt.service.persistence.DossierStep2RolePersistence;
import org.oep.core.processmgt.service.persistence.DossierStepPersistence;
import org.oep.core.processmgt.service.persistence.ProcessOrder2UserPersistence;
import org.oep.core.processmgt.service.persistence.ProcessOrderPersistence;
import org.oep.core.processmgt.service.persistence.StepTransitionPersistence;
import org.oep.core.processmgt.service.persistence.TransitionHistoryPersistence;
import org.oep.core.processmgt.service.persistence.UserAssignmentPersistence;
import javax.sql.DataSource;
/**
* Provides the base implementation for the user assignment remote service.
*
* <p>
* This implementation exists only as a container for the default service methods generated by ServiceBuilder. All custom service methods should be put in {@link org.oep.core.processmgt.service.impl.UserAssignmentServiceImpl}.
* </p>
*
* @author trungdk
* @see org.oep.core.processmgt.service.impl.UserAssignmentServiceImpl
* @see org.oep.core.processmgt.service.UserAssignmentServiceUtil
* @generated
*/
public abstract class UserAssignmentServiceBaseImpl extends BaseServiceImpl
implements UserAssignmentService, IdentifiableBean {
/*
* NOTE FOR DEVELOPERS:
*
* Never modify or reference this class directly. Always use {@link org.oep.core.processmgt.service.UserAssignmentServiceUtil} to access the user assignment remote service.
*/
/**
* Returns the dossier process local service.
*
* @return the dossier process local service
*/
public org.oep.core.processmgt.service.DossierProcessLocalService getDossierProcessLocalService() {
return dossierProcessLocalService;
}
/**
* Sets the dossier process local service.
*
* @param dossierProcessLocalService the dossier process local service
*/
public void setDossierProcessLocalService(
org.oep.core.processmgt.service.DossierProcessLocalService dossierProcessLocalService) {
this.dossierProcessLocalService = dossierProcessLocalService;
}
/**
* Returns the dossier process remote service.
*
* @return the dossier process remote service
*/
public org.oep.core.processmgt.service.DossierProcessService getDossierProcessService() {
return dossierProcessService;
}
/**
* Sets the dossier process remote service.
*
* @param dossierProcessService the dossier process remote service
*/
public void setDossierProcessService(
org.oep.core.processmgt.service.DossierProcessService dossierProcessService) {
this.dossierProcessService = dossierProcessService;
}
/**
* Returns the dossier process persistence.
*
* @return the dossier process persistence
*/
public DossierProcessPersistence getDossierProcessPersistence() {
return dossierProcessPersistence;
}
/**
* Sets the dossier process persistence.
*
* @param dossierProcessPersistence the dossier process persistence
*/
public void setDossierProcessPersistence(
DossierProcessPersistence dossierProcessPersistence) {
this.dossierProcessPersistence = dossierProcessPersistence;
}
/**
* Returns the dossier step local service.
*
* @return the dossier step local service
*/
public org.oep.core.processmgt.service.DossierStepLocalService getDossierStepLocalService() {
return dossierStepLocalService;
}
/**
* Sets the dossier step local service.
*
* @param dossierStepLocalService the dossier step local service
*/
public void setDossierStepLocalService(
org.oep.core.processmgt.service.DossierStepLocalService dossierStepLocalService) {
this.dossierStepLocalService = dossierStepLocalService;
}
/**
* Returns the dossier step remote service.
*
* @return the dossier step remote service
*/
public org.oep.core.processmgt.service.DossierStepService getDossierStepService() {
return dossierStepService;
}
/**
* Sets the dossier step remote service.
*
* @param dossierStepService the dossier step remote service
*/
public void setDossierStepService(
org.oep.core.processmgt.service.DossierStepService dossierStepService) {
this.dossierStepService = dossierStepService;
}
/**
* Returns the dossier step persistence.
*
* @return the dossier step persistence
*/
public DossierStepPersistence getDossierStepPersistence() {
return dossierStepPersistence;
}
/**
* Sets the dossier step persistence.
*
* @param dossierStepPersistence the dossier step persistence
*/
public void setDossierStepPersistence(
DossierStepPersistence dossierStepPersistence) {
this.dossierStepPersistence = dossierStepPersistence;
}
/**
* Returns the dossier step2 role local service.
*
* @return the dossier step2 role local service
*/
public org.oep.core.processmgt.service.DossierStep2RoleLocalService getDossierStep2RoleLocalService() {
return dossierStep2RoleLocalService;
}
/**
* Sets the dossier step2 role local service.
*
* @param dossierStep2RoleLocalService the dossier step2 role local service
*/
public void setDossierStep2RoleLocalService(
org.oep.core.processmgt.service.DossierStep2RoleLocalService dossierStep2RoleLocalService) {
this.dossierStep2RoleLocalService = dossierStep2RoleLocalService;
}
/**
* Returns the dossier step2 role remote service.
*
* @return the dossier step2 role remote service
*/
public org.oep.core.processmgt.service.DossierStep2RoleService getDossierStep2RoleService() {
return dossierStep2RoleService;
}
/**
* Sets the dossier step2 role remote service.
*
* @param dossierStep2RoleService the dossier step2 role remote service
*/
public void setDossierStep2RoleService(
org.oep.core.processmgt.service.DossierStep2RoleService dossierStep2RoleService) {
this.dossierStep2RoleService = dossierStep2RoleService;
}
/**
* Returns the dossier step2 role persistence.
*
* @return the dossier step2 role persistence
*/
public DossierStep2RolePersistence getDossierStep2RolePersistence() {
return dossierStep2RolePersistence;
}
/**
* Sets the dossier step2 role persistence.
*
* @param dossierStep2RolePersistence the dossier step2 role persistence
*/
public void setDossierStep2RolePersistence(
DossierStep2RolePersistence dossierStep2RolePersistence) {
this.dossierStep2RolePersistence = dossierStep2RolePersistence;
}
/**
* Returns the process order local service.
*
* @return the process order local service
*/
public org.oep.core.processmgt.service.ProcessOrderLocalService getProcessOrderLocalService() {
return processOrderLocalService;
}
/**
* Sets the process order local service.
*
* @param processOrderLocalService the process order local service
*/
public void setProcessOrderLocalService(
org.oep.core.processmgt.service.ProcessOrderLocalService processOrderLocalService) {
this.processOrderLocalService = processOrderLocalService;
}
/**
* Returns the process order remote service.
*
* @return the process order remote service
*/
public org.oep.core.processmgt.service.ProcessOrderService getProcessOrderService() {
return processOrderService;
}
/**
* Sets the process order remote service.
*
* @param processOrderService the process order remote service
*/
public void setProcessOrderService(
org.oep.core.processmgt.service.ProcessOrderService processOrderService) {
this.processOrderService = processOrderService;
}
/**
* Returns the process order persistence.
*
* @return the process order persistence
*/
public ProcessOrderPersistence getProcessOrderPersistence() {
return processOrderPersistence;
}
/**
* Sets the process order persistence.
*
* @param processOrderPersistence the process order persistence
*/
public void setProcessOrderPersistence(
ProcessOrderPersistence processOrderPersistence) {
this.processOrderPersistence = processOrderPersistence;
}
/**
* Returns the process order2 user local service.
*
* @return the process order2 user local service
*/
public org.oep.core.processmgt.service.ProcessOrder2UserLocalService getProcessOrder2UserLocalService() {
return processOrder2UserLocalService;
}
/**
* Sets the process order2 user local service.
*
* @param processOrder2UserLocalService the process order2 user local service
*/
public void setProcessOrder2UserLocalService(
org.oep.core.processmgt.service.ProcessOrder2UserLocalService processOrder2UserLocalService) {
this.processOrder2UserLocalService = processOrder2UserLocalService;
}
/**
* Returns the process order2 user remote service.
*
* @return the process order2 user remote service
*/
public org.oep.core.processmgt.service.ProcessOrder2UserService getProcessOrder2UserService() {
return processOrder2UserService;
}
/**
* Sets the process order2 user remote service.
*
* @param processOrder2UserService the process order2 user remote service
*/
public void setProcessOrder2UserService(
org.oep.core.processmgt.service.ProcessOrder2UserService processOrder2UserService) {
this.processOrder2UserService = processOrder2UserService;
}
/**
* Returns the process order2 user persistence.
*
* @return the process order2 user persistence
*/
public ProcessOrder2UserPersistence getProcessOrder2UserPersistence() {
return processOrder2UserPersistence;
}
/**
* Sets the process order2 user persistence.
*
* @param processOrder2UserPersistence the process order2 user persistence
*/
public void setProcessOrder2UserPersistence(
ProcessOrder2UserPersistence processOrder2UserPersistence) {
this.processOrder2UserPersistence = processOrder2UserPersistence;
}
/**
* Returns the step transition local service.
*
* @return the step transition local service
*/
public org.oep.core.processmgt.service.StepTransitionLocalService getStepTransitionLocalService() {
return stepTransitionLocalService;
}
/**
* Sets the step transition local service.
*
* @param stepTransitionLocalService the step transition local service
*/
public void setStepTransitionLocalService(
org.oep.core.processmgt.service.StepTransitionLocalService stepTransitionLocalService) {
this.stepTransitionLocalService = stepTransitionLocalService;
}
/**
* Returns the step transition remote service.
*
* @return the step transition remote service
*/
public org.oep.core.processmgt.service.StepTransitionService getStepTransitionService() {
return stepTransitionService;
}
/**
* Sets the step transition remote service.
*
* @param stepTransitionService the step transition remote service
*/
public void setStepTransitionService(
org.oep.core.processmgt.service.StepTransitionService stepTransitionService) {
this.stepTransitionService = stepTransitionService;
}
/**
* Returns the step transition persistence.
*
* @return the step transition persistence
*/
public StepTransitionPersistence getStepTransitionPersistence() {
return stepTransitionPersistence;
}
/**
* Sets the step transition persistence.
*
* @param stepTransitionPersistence the step transition persistence
*/
public void setStepTransitionPersistence(
StepTransitionPersistence stepTransitionPersistence) {
this.stepTransitionPersistence = stepTransitionPersistence;
}
/**
* Returns the transition history local service.
*
* @return the transition history local service
*/
public org.oep.core.processmgt.service.TransitionHistoryLocalService getTransitionHistoryLocalService() {
return transitionHistoryLocalService;
}
/**
* Sets the transition history local service.
*
* @param transitionHistoryLocalService the transition history local service
*/
public void setTransitionHistoryLocalService(
org.oep.core.processmgt.service.TransitionHistoryLocalService transitionHistoryLocalService) {
this.transitionHistoryLocalService = transitionHistoryLocalService;
}
/**
* Returns the transition history remote service.
*
* @return the transition history remote service
*/
public org.oep.core.processmgt.service.TransitionHistoryService getTransitionHistoryService() {
return transitionHistoryService;
}
/**
* Sets the transition history remote service.
*
* @param transitionHistoryService the transition history remote service
*/
public void setTransitionHistoryService(
org.oep.core.processmgt.service.TransitionHistoryService transitionHistoryService) {
this.transitionHistoryService = transitionHistoryService;
}
/**
* Returns the transition history persistence.
*
* @return the transition history persistence
*/
public TransitionHistoryPersistence getTransitionHistoryPersistence() {
return transitionHistoryPersistence;
}
/**
* Sets the transition history persistence.
*
* @param transitionHistoryPersistence the transition history persistence
*/
public void setTransitionHistoryPersistence(
TransitionHistoryPersistence transitionHistoryPersistence) {
this.transitionHistoryPersistence = transitionHistoryPersistence;
}
/**
* Returns the user assignment local service.
*
* @return the user assignment local service
*/
public org.oep.core.processmgt.service.UserAssignmentLocalService getUserAssignmentLocalService() {
return userAssignmentLocalService;
}
/**
* Sets the user assignment local service.
*
* @param userAssignmentLocalService the user assignment local service
*/
public void setUserAssignmentLocalService(
org.oep.core.processmgt.service.UserAssignmentLocalService userAssignmentLocalService) {
this.userAssignmentLocalService = userAssignmentLocalService;
}
/**
* Returns the user assignment remote service.
*
* @return the user assignment remote service
*/
public org.oep.core.processmgt.service.UserAssignmentService getUserAssignmentService() {
return userAssignmentService;
}
/**
* Sets the user assignment remote service.
*
* @param userAssignmentService the user assignment remote service
*/
public void setUserAssignmentService(
org.oep.core.processmgt.service.UserAssignmentService userAssignmentService) {
this.userAssignmentService = userAssignmentService;
}
/**
* Returns the user assignment persistence.
*
* @return the user assignment persistence
*/
public UserAssignmentPersistence getUserAssignmentPersistence() {
return userAssignmentPersistence;
}
/**
* Sets the user assignment persistence.
*
* @param userAssignmentPersistence the user assignment persistence
*/
public void setUserAssignmentPersistence(
UserAssignmentPersistence userAssignmentPersistence) {
this.userAssignmentPersistence = userAssignmentPersistence;
}
/**
* Returns the counter local service.
*
* @return the counter local service
*/
public com.liferay.counter.service.CounterLocalService getCounterLocalService() {
return counterLocalService;
}
/**
* Sets the counter local service.
*
* @param counterLocalService the counter local service
*/
public void setCounterLocalService(
com.liferay.counter.service.CounterLocalService counterLocalService) {
this.counterLocalService = counterLocalService;
}
/**
* Returns the resource local service.
*
* @return the resource local service
*/
public com.liferay.portal.service.ResourceLocalService getResourceLocalService() {
return resourceLocalService;
}
/**
* Sets the resource local service.
*
* @param resourceLocalService the resource local service
*/
public void setResourceLocalService(
com.liferay.portal.service.ResourceLocalService resourceLocalService) {
this.resourceLocalService = resourceLocalService;
}
/**
* Returns the user local service.
*
* @return the user local service
*/
public com.liferay.portal.service.UserLocalService getUserLocalService() {
return userLocalService;
}
/**
* Sets the user local service.
*
* @param userLocalService the user local service
*/
public void setUserLocalService(
com.liferay.portal.service.UserLocalService userLocalService) {
this.userLocalService = userLocalService;
}
/**
* Returns the user remote service.
*
* @return the user remote service
*/
public com.liferay.portal.service.UserService getUserService() {
return userService;
}
/**
* Sets the user remote service.
*
* @param userService the user remote service
*/
public void setUserService(
com.liferay.portal.service.UserService userService) {
this.userService = userService;
}
/**
* Returns the user persistence.
*
* @return the user persistence
*/
public UserPersistence getUserPersistence() {
return userPersistence;
}
/**
* Sets the user persistence.
*
* @param userPersistence the user persistence
*/
public void setUserPersistence(UserPersistence userPersistence) {
this.userPersistence = userPersistence;
}
public void afterPropertiesSet() {
Class<?> clazz = getClass();
_classLoader = clazz.getClassLoader();
}
public void destroy() {
}
/**
* Returns the Spring bean ID for this bean.
*
* @return the Spring bean ID for this bean
*/
@Override
public String getBeanIdentifier() {
return _beanIdentifier;
}
/**
* Sets the Spring bean ID for this bean.
*
* @param beanIdentifier the Spring bean ID for this bean
*/
@Override
public void setBeanIdentifier(String beanIdentifier) {
_beanIdentifier = beanIdentifier;
}
@Override
public Object invokeMethod(String name, String[] parameterTypes,
Object[] arguments) throws Throwable {
Thread currentThread = Thread.currentThread();
ClassLoader contextClassLoader = currentThread.getContextClassLoader();
if (contextClassLoader != _classLoader) {
currentThread.setContextClassLoader(_classLoader);
}
try {
return _clpInvoker.invokeMethod(name, parameterTypes, arguments);
}
finally {
if (contextClassLoader != _classLoader) {
currentThread.setContextClassLoader(contextClassLoader);
}
}
}
protected Class<?> getModelClass() {
return UserAssignment.class;
}
protected String getModelClassName() {
return UserAssignment.class.getName();
}
/**
* Performs an SQL query.
*
* @param sql the sql query
*/
protected void runSQL(String sql) throws SystemException {
try {
DataSource dataSource = userAssignmentPersistence.getDataSource();
SqlUpdate sqlUpdate = SqlUpdateFactoryUtil.getSqlUpdate(dataSource,
sql, new int[0]);
sqlUpdate.update();
}
catch (Exception e) {
throw new SystemException(e);
}
}
@BeanReference(type = org.oep.core.processmgt.service.DossierProcessLocalService.class)
protected org.oep.core.processmgt.service.DossierProcessLocalService dossierProcessLocalService;
@BeanReference(type = org.oep.core.processmgt.service.DossierProcessService.class)
protected org.oep.core.processmgt.service.DossierProcessService dossierProcessService;
@BeanReference(type = DossierProcessPersistence.class)
protected DossierProcessPersistence dossierProcessPersistence;
@BeanReference(type = org.oep.core.processmgt.service.DossierStepLocalService.class)
protected org.oep.core.processmgt.service.DossierStepLocalService dossierStepLocalService;
@BeanReference(type = org.oep.core.processmgt.service.DossierStepService.class)
protected org.oep.core.processmgt.service.DossierStepService dossierStepService;
@BeanReference(type = DossierStepPersistence.class)
protected DossierStepPersistence dossierStepPersistence;
@BeanReference(type = org.oep.core.processmgt.service.DossierStep2RoleLocalService.class)
protected org.oep.core.processmgt.service.DossierStep2RoleLocalService dossierStep2RoleLocalService;
@BeanReference(type = org.oep.core.processmgt.service.DossierStep2RoleService.class)
protected org.oep.core.processmgt.service.DossierStep2RoleService dossierStep2RoleService;
@BeanReference(type = DossierStep2RolePersistence.class)
protected DossierStep2RolePersistence dossierStep2RolePersistence;
@BeanReference(type = org.oep.core.processmgt.service.ProcessOrderLocalService.class)
protected org.oep.core.processmgt.service.ProcessOrderLocalService processOrderLocalService;
@BeanReference(type = org.oep.core.processmgt.service.ProcessOrderService.class)
protected org.oep.core.processmgt.service.ProcessOrderService processOrderService;
@BeanReference(type = ProcessOrderPersistence.class)
protected ProcessOrderPersistence processOrderPersistence;
@BeanReference(type = org.oep.core.processmgt.service.ProcessOrder2UserLocalService.class)
protected org.oep.core.processmgt.service.ProcessOrder2UserLocalService processOrder2UserLocalService;
@BeanReference(type = org.oep.core.processmgt.service.ProcessOrder2UserService.class)
protected org.oep.core.processmgt.service.ProcessOrder2UserService processOrder2UserService;
@BeanReference(type = ProcessOrder2UserPersistence.class)
protected ProcessOrder2UserPersistence processOrder2UserPersistence;
@BeanReference(type = org.oep.core.processmgt.service.StepTransitionLocalService.class)
protected org.oep.core.processmgt.service.StepTransitionLocalService stepTransitionLocalService;
@BeanReference(type = org.oep.core.processmgt.service.StepTransitionService.class)
protected org.oep.core.processmgt.service.StepTransitionService stepTransitionService;
@BeanReference(type = StepTransitionPersistence.class)
protected StepTransitionPersistence stepTransitionPersistence;
@BeanReference(type = org.oep.core.processmgt.service.TransitionHistoryLocalService.class)
protected org.oep.core.processmgt.service.TransitionHistoryLocalService transitionHistoryLocalService;
@BeanReference(type = org.oep.core.processmgt.service.TransitionHistoryService.class)
protected org.oep.core.processmgt.service.TransitionHistoryService transitionHistoryService;
@BeanReference(type = TransitionHistoryPersistence.class)
protected TransitionHistoryPersistence transitionHistoryPersistence;
@BeanReference(type = org.oep.core.processmgt.service.UserAssignmentLocalService.class)
protected org.oep.core.processmgt.service.UserAssignmentLocalService userAssignmentLocalService;
@BeanReference(type = org.oep.core.processmgt.service.UserAssignmentService.class)
protected org.oep.core.processmgt.service.UserAssignmentService userAssignmentService;
@BeanReference(type = UserAssignmentPersistence.class)
protected UserAssignmentPersistence userAssignmentPersistence;
@BeanReference(type = com.liferay.counter.service.CounterLocalService.class)
protected com.liferay.counter.service.CounterLocalService counterLocalService;
@BeanReference(type = com.liferay.portal.service.ResourceLocalService.class)
protected com.liferay.portal.service.ResourceLocalService resourceLocalService;
@BeanReference(type = com.liferay.portal.service.UserLocalService.class)
protected com.liferay.portal.service.UserLocalService userLocalService;
@BeanReference(type = com.liferay.portal.service.UserService.class)
protected com.liferay.portal.service.UserService userService;
@BeanReference(type = UserPersistence.class)
protected UserPersistence userPersistence;
private String _beanIdentifier;
private ClassLoader _classLoader;
private UserAssignmentServiceClpInvoker _clpInvoker = new UserAssignmentServiceClpInvoker();
}
| |
/*
* Copyright 2016 Sam Sun <me@samczsun.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.javadeobfuscator.deobfuscator.transformers.zelix;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.stream.Collectors;
import com.javadeobfuscator.deobfuscator.config.TransformerConfig;
import com.javadeobfuscator.deobfuscator.exceptions.NoClassInPathException;
import com.javadeobfuscator.deobfuscator.executor.Context;
import com.javadeobfuscator.deobfuscator.executor.MethodExecutor;
import com.javadeobfuscator.deobfuscator.executor.defined.JVMComparisonProvider;
import com.javadeobfuscator.deobfuscator.executor.defined.JVMMethodProvider;
import com.javadeobfuscator.deobfuscator.executor.defined.MappedFieldProvider;
import com.javadeobfuscator.deobfuscator.executor.defined.MappedMethodProvider;
import com.javadeobfuscator.deobfuscator.executor.defined.PrimitiveFieldProvider;
import com.javadeobfuscator.deobfuscator.executor.defined.types.JavaClass;
import com.javadeobfuscator.deobfuscator.executor.defined.types.JavaField;
import com.javadeobfuscator.deobfuscator.executor.defined.types.JavaFieldHandle;
import com.javadeobfuscator.deobfuscator.executor.defined.types.JavaHandle;
import com.javadeobfuscator.deobfuscator.executor.defined.types.JavaMethod;
import com.javadeobfuscator.deobfuscator.executor.defined.types.JavaMethodHandle;
import com.javadeobfuscator.deobfuscator.executor.providers.ComparisonProvider;
import com.javadeobfuscator.deobfuscator.executor.providers.DelegatingProvider;
import com.javadeobfuscator.deobfuscator.executor.values.JavaLong;
import com.javadeobfuscator.deobfuscator.executor.values.JavaObject;
import com.javadeobfuscator.deobfuscator.executor.values.JavaValue;
import com.javadeobfuscator.deobfuscator.transformers.Transformer;
import com.javadeobfuscator.deobfuscator.utils.Utils;
import org.objectweb.asm.Handle;
import org.objectweb.asm.Opcodes;
import org.objectweb.asm.Type;
import org.objectweb.asm.tree.*;
import org.objectweb.asm.tree.analysis.Analyzer;
import org.objectweb.asm.tree.analysis.AnalyzerException;
import org.objectweb.asm.tree.analysis.SourceInterpreter;
import org.objectweb.asm.tree.analysis.SourceValue;
@TransformerConfig.ConfigOptions(configClass = ReflectionObfuscationTransformer.Config.class)
public class ReflectionObfuscationTransformer extends Transformer<ReflectionObfuscationTransformer.Config> {
public static class Config extends TransformerConfig {
private boolean cleanup = true;
public Config() {
super(ReflectionObfuscationTransformer.class);
}
public boolean isCleanup() {
return cleanup;
}
public void setCleanup(boolean cleanup) {
this.cleanup = cleanup;
}
}
static Map<String, String> PRIMITIVES = new HashMap<>();
static {
PRIMITIVES.put("boolean", "java/lang/Boolean");
PRIMITIVES.put("byte", "java/lang/Byte");
PRIMITIVES.put("char", "java/lang/Character");
PRIMITIVES.put("short", "java/lang/Short");
PRIMITIVES.put("int", "java/lang/Integer");
PRIMITIVES.put("float", "java/lang/Float");
PRIMITIVES.put("double", "java/lang/Double");
PRIMITIVES.put("long", "java/lang/Long");
}
@Override
public boolean transform() throws Throwable {
System.out.println("[Zelix] [ReflectionObfuscationTransformer] Starting");
System.out.println("[Zelix] [ReflectionObfuscationTransformer] Finding reflection obfuscation");
int count = findReflectionObfuscation();
System.out.println("[Zelix] [ReflectionObfuscationTransformer] Found " + count + " reflection obfuscation instructions");
int amount = 0;
if (count > 0) {
amount = inlineReflection(count);
System.out.println("[Zelix] [ReflectionObfuscationTransformer] Inlined " + amount + " reflection obfuscation instructions");
}
System.out.println("[Zelix] [ReflectionObfuscationTransformer] Done");
return amount > 0;
}
public int inlineReflection(int expected) throws Throwable {
AtomicInteger count = new AtomicInteger(0);
final boolean[] alerted = new boolean[100];
DelegatingProvider provider = new DelegatingProvider();
provider.register(new PrimitiveFieldProvider());
provider.register(new MappedFieldProvider());
provider.register(new JVMMethodProvider());
provider.register(new JVMComparisonProvider());
provider.register(new MappedMethodProvider(classes));
provider.register(new ComparisonProvider() {
@Override
public boolean instanceOf(JavaValue target, Type type, Context context) {
return type.getDescriptor().equals("Ljava/lang/String;") && target.value() instanceof String;
}
@Override
public boolean checkcast(JavaValue target, Type type, Context context) {
if (type.getInternalName().equals("java/lang/String")) {
return target.value() instanceof String;
} else if (type.getInternalName().equals("java/lang/Class")) {
return target.value() instanceof JavaClass || target.value() instanceof Type; //TODO consolidate types
} else if (type.getInternalName().equals("java/lang/reflect/Method")) {
return target.value() instanceof JavaMethod;
} else if (type.getInternalName().equals("java/lang/reflect/Field")) {
return target.value() instanceof JavaField;
} else if (type.getInternalName().equals("[Ljava/lang/reflect/Method;")) {
return target.value() instanceof Object[];
} else if (type.getInternalName().equals("[Ljava/lang/Class;")) {
return target.value() instanceof Object[];
}
return false;
}
@Override
public boolean checkEquality(JavaValue first, JavaValue second, Context context) {
return false;
}
@Override
public boolean canCheckInstanceOf(JavaValue target, Type type, Context context) {
return true;
}
@Override
public boolean canCheckcast(JavaValue target, Type type, Context context) {
return type.getInternalName().equals("java/lang/String")
|| type.getInternalName().equals("java/lang/Class")
|| type.getInternalName().equals("java/lang/reflect/Method")
|| type.getInternalName().equals("java/lang/reflect/Field")
|| type.getInternalName().equals("[Ljava/lang/reflect/Method;")
|| type.getInternalName().equals("[Ljava/lang/Class;");
}
@Override
public boolean canCheckEquality(JavaValue first, JavaValue second, Context context) {
return false;
}
});
Set<ClassNode> initted = new HashSet<>();
Set<ClassNode> reflectionClasses = new HashSet<>();
Map<ClassNode, Set<MethodNode>> indyReflectionMethods = new HashMap<>();
Map<ClassNode, Set<MethodNode>> argsReflectionMethods = new HashMap<>();
Map<ClassNode, Set<MethodNode>> initReflectionMethod = new HashMap<>();
Map<ClassNode, MethodNode> fieldReflectionMethod = new HashMap<>();
Map<ClassNode, MethodNode> methodReflectionMethod = new HashMap<>();
for(ClassNode classNode : classNodes())
for(MethodNode methodNode : classNode.methods)
for(AbstractInsnNode current : methodNode.instructions.toArray())
{
if (current instanceof MethodInsnNode
&& !methodNode.desc.equals("(Ljava/lang/invoke/MethodHandles$Lookup;Ljava/lang/invoke/MutableCallSite;"
+ "Ljava/lang/String;Ljava/lang/invoke/MethodType;J)Ljava/lang/invoke/MethodHandle;")
&& !methodNode.desc.equals("(Ljava/lang/invoke/MethodHandles$Lookup;Ljava/lang/invoke/MutableCallSite;"
+ "Ljava/lang/String;Ljava/lang/invoke/MethodType;JJ)Ljava/lang/invoke/MethodHandle;")) {
MethodInsnNode methodInsnNode = (MethodInsnNode) current;
if (methodInsnNode.desc.equals("(J)Ljava/lang/reflect/Method;")) {
long ldc = (long) ((LdcInsnNode) current.getPrevious()).cst;
String strCl = methodInsnNode.owner;
ClassNode innerClassNode = classpath.get(strCl);
if (initted.add(innerClassNode)) {
try
{
List<MethodNode> init = new ArrayList<>();
MethodNode decryptorNode = innerClassNode.methods.stream().filter(mn -> mn.desc.equals("()V") && isInitMethod(innerClassNode, mn)).findFirst().orElse(null);
FieldInsnNode fieldInsn = null;
List<AbstractInsnNode> removed = new ArrayList<>();
if(decryptorNode != null)
{
init.add(decryptorNode);
fieldInsn = (FieldInsnNode)getObjectList(decryptorNode);
}else
{
MethodNode clinit = innerClassNode.methods.stream().filter(mn -> mn.name.equals("<clinit>")).findFirst().orElse(null);
for(AbstractInsnNode ain : clinit.instructions.toArray())
{
if(Utils.isInteger(ain) && ain.getNext() != null
&& (ain.getNext().getOpcode() == Opcodes.NEWARRAY ||
(ain.getNext().getOpcode() == Opcodes.ANEWARRAY && ((TypeInsnNode)ain.getNext()).desc.equals("java/lang/Object")))
&& ain.getNext().getNext() != null
&& ain.getNext().getNext().getOpcode() == Opcodes.PUTSTATIC && ((FieldInsnNode)ain.getNext().getNext()).owner.equals(innerClassNode.name)
&& ain.getNext().getNext().getNext() != null
&& Utils.isInteger(ain.getNext().getNext().getNext())
&& Utils.getIntValue(ain.getNext().getNext().getNext()) == Utils.getIntValue(ain)
&& ain.getNext().getNext().getNext().getNext() != null
&& ain.getNext().getNext().getNext().getNext().getOpcode() == Opcodes.ANEWARRAY
&& ((TypeInsnNode)ain.getNext().getNext().getNext().getNext()).desc.equals("java/lang/String")
&& ain.getNext().getNext().getNext().getNext().getNext() != null
&& ain.getNext().getNext().getNext().getNext().getNext().getOpcode() == Opcodes.PUTSTATIC
&& ((FieldInsnNode)ain.getNext().getNext().getNext().getNext().getNext()).owner.equals(innerClassNode.name))
{
((TypeInsnNode)ain.getNext()).desc = "java/lang/String";
((TypeInsnNode)ain.getNext().getNext().getNext().getNext()).desc = "java/lang/Object";
((FieldInsnNode)ain.getNext().getNext()).desc = "[Ljava/lang/String;";
((FieldInsnNode)ain.getNext().getNext().getNext().getNext().getNext()).desc = "[Ljava/lang/Object;";
String temp = ((FieldInsnNode)ain.getNext().getNext()).name;
((FieldInsnNode)ain.getNext().getNext()).name = ((FieldInsnNode)ain.getNext().getNext().getNext().getNext().getNext()).name;
((FieldInsnNode)ain.getNext().getNext().getNext().getNext().getNext()).name = temp;
fieldInsn = (FieldInsnNode)ain.getNext().getNext().getNext().getNext().getNext();
while(ain.getNext() != fieldInsn)
{
removed.add(ain.getNext());
clinit.instructions.remove(ain.getNext());
}
removed.add(fieldInsn);
clinit.instructions.remove(fieldInsn);
removed.add(0, ain);
clinit.instructions.remove(ain);
break;
}
}
}
FieldInsnNode fieldInsn1 = fieldInsn;
Object[] otherInit = innerClassNode.methods.stream().filter(mn -> mn.desc.equals("()V") && isOtherInitMethod(innerClassNode, mn, fieldInsn1)).toArray();
if(decryptorNode == null)
{
MethodNode firstinit = (MethodNode)Arrays.stream(otherInit).filter(mn ->
Utils.isInteger(getFirstIndex((MethodNode)mn))
&& Utils.getIntValue(getFirstIndex((MethodNode)mn)) == 0).findFirst().orElse(null);
firstinit.instructions.remove(firstinit.instructions.getFirst());
Collections.reverse(removed);
boolean first = false;
for(AbstractInsnNode ain : removed)
{
firstinit.instructions.insert(ain);
if(!first)
{
firstinit.instructions.insert(new InsnNode(Opcodes.DUP));
first = true;
}
}
}
for(Object o : otherInit)
init.add((MethodNode)o);
if(!innerClassNode.equals(classNode))
reflectionClasses.add(innerClassNode);
else
{
argsReflectionMethods.put(innerClassNode, new HashSet<>());
initReflectionMethod.put(innerClassNode, new HashSet<>());
for(MethodNode method : init)
initReflectionMethod.get(innerClassNode).add(method);
}
Context context = new Context(provider);
context.dictionary = this.classpath;
for(MethodNode method1 : init)
MethodExecutor.execute(innerClassNode, method1, Collections.emptyList(), null, context);
}catch(Throwable t)
{
System.out.println("Error while fully initializing " + classNode.name);
t.printStackTrace(System.out);
}
}
MethodNode decrypterNode = innerClassNode.methods.stream().filter(mn -> mn.name.equals(methodInsnNode.name) && mn.desc.equals(methodInsnNode.desc)).findFirst().orElse(null);
Context ctx = new Context(provider);
ctx.dictionary = classpath;
JavaMethod javaMethod;
try {
javaMethod = MethodExecutor.execute(innerClassNode, decrypterNode, Arrays.asList(new JavaLong(ldc)), null, ctx);
} catch (Exception ex) {
if (!(ex instanceof NoClassInPathException)) {
ex.printStackTrace();
}
continue;
}
if(!methodReflectionMethod.containsKey(innerClassNode))
methodReflectionMethod.put(innerClassNode, decrypterNode);
MethodInsnNode methodInsn = null;
if(current.getPrevious().getPrevious().getOpcode() == Opcodes.CHECKCAST)
{
methodInsn = (MethodInsnNode)current.getPrevious().getPrevious().getPrevious();
methodNode.instructions.remove(current.getPrevious().getPrevious().getPrevious());
}
if(methodInsn == null)
methodInsn = (MethodInsnNode)current.getPrevious().getPrevious();
methodNode.instructions.remove(current.getPrevious().getPrevious());
if(argsReflectionMethods.containsKey(innerClassNode))
{
MethodInsnNode finalMethodInsn = methodInsn;
MethodNode method = innerClassNode.methods.stream().filter(
m -> m.name.equals(finalMethodInsn.name) && m.desc.equals(finalMethodInsn.desc)).findFirst().orElse(null);
argsReflectionMethods.get(innerClassNode).add(method);
}
methodNode.instructions.remove(current.getPrevious());
int opcode = -1;
if(current.getNext().getNext().getOpcode() == Opcodes.ACONST_NULL)
opcode = Opcodes.INVOKESTATIC;
else if((classpath.get(javaMethod.getOwner()).access & Opcodes.ACC_INTERFACE) != 0)
opcode = Opcodes.INVOKEINTERFACE;
else
opcode = Opcodes.INVOKEVIRTUAL;
LabelNode label = null;
while(current.getNext() != null)
{
if(current.getNext() instanceof LabelNode)
{
methodNode.instructions.remove(label = (LabelNode)current.getNext());
methodNode.instructions.remove(current.getNext());
break;
}
methodNode.instructions.remove(current.getNext());
}
while(!(current.getNext() instanceof LabelNode))
methodNode.instructions.remove(current.getNext());
LabelNode nextLabel = (LabelNode)current.getNext();
methodNode.instructions.set(current, new MethodInsnNode(opcode, javaMethod.getOwner(), javaMethod.getName(), javaMethod.getDesc(), opcode == Opcodes.INVOKEINTERFACE));
//Remove exception thing
List<TryCatchBlockNode> beginTryCatch = new ArrayList<>();
List<TryCatchBlockNode> additionalRemove = new ArrayList<>();
Iterator<TryCatchBlockNode> itr = methodNode.tryCatchBlocks.iterator();
while(itr.hasNext())
{
TryCatchBlockNode trycatch = itr.next();
if(trycatch.start.equals(label) && trycatch.end.equals(nextLabel))
{
LabelNode begin = trycatch.handler;
while(begin.getNext() != null && !(begin.getNext() instanceof LabelNode))
methodNode.instructions.remove(begin.getNext());
//Find all trycatch nodes that begin with handler
for(TryCatchBlockNode tc : methodNode.tryCatchBlocks)
if(tc != trycatch && tc.end == begin)
{
beginTryCatch.add(tc);
tc.end = (LabelNode)begin.getNext();
}
//Find all trycatch nodes that try-catch exception block
for(TryCatchBlockNode tc : methodNode.tryCatchBlocks)
if(tc.start == begin && tc.end == begin.getNext())
additionalRemove.add(tc);
//Find all trycatch nodes that is a continuation of beginTryCatch
for(TryCatchBlockNode tc : methodNode.tryCatchBlocks)
if(tc.start == begin.getNext())
{
TryCatchBlockNode before = null;
for(TryCatchBlockNode tc2 : beginTryCatch)
if(tc2.end == begin.getNext() && tc2.type.equals(tc.type))
{
before = tc2;
break;
}
if(before != null)
{
additionalRemove.add(before);
tc.start = before.start;
}
}
methodNode.instructions.remove(begin);
itr.remove();
break;
}
}
for(TryCatchBlockNode trycatch : additionalRemove)
methodNode.tryCatchBlocks.remove(trycatch);
count.incrementAndGet();
int x = (int) ((count.get() * 1.0d / expected) * 100);
if (x != 0 && x % 10 == 0 && !alerted[x - 1]) {
System.out.println("[Zelix] [ReflectionObfucationTransformer] Done " + x + "%");
alerted[x - 1] = true;
}
} else if (methodInsnNode.desc.equals("(J)Ljava/lang/reflect/Field;")) {
long ldc = (long) ((LdcInsnNode) current.getPrevious()).cst;
String strCl = methodInsnNode.owner;
ClassNode innerClassNode = classpath.get(strCl);
if (initted.add(innerClassNode)) {
try
{
List<MethodNode> init = new ArrayList<>();
MethodNode decryptorNode = innerClassNode.methods.stream().filter(mn -> mn.desc.equals("()V") && isInitMethod(innerClassNode, mn)).findFirst().orElse(null);
FieldInsnNode fieldInsn = null;
List<AbstractInsnNode> removed = new ArrayList<>();
if(decryptorNode != null)
{
init.add(decryptorNode);
fieldInsn = (FieldInsnNode)getObjectList(decryptorNode);
}else
{
MethodNode clinit = innerClassNode.methods.stream().filter(mn -> mn.name.equals("<clinit>")).findFirst().orElse(null);
for(AbstractInsnNode ain : clinit.instructions.toArray())
{
if(Utils.isInteger(ain) && ain.getNext() != null
&& (ain.getNext().getOpcode() == Opcodes.NEWARRAY ||
(ain.getNext().getOpcode() == Opcodes.ANEWARRAY && ((TypeInsnNode)ain.getNext()).desc.equals("java/lang/Object")))
&& ain.getNext().getNext() != null
&& ain.getNext().getNext().getOpcode() == Opcodes.PUTSTATIC && ((FieldInsnNode)ain.getNext().getNext()).owner.equals(innerClassNode.name)
&& ain.getNext().getNext().getNext() != null
&& Utils.isInteger(ain.getNext().getNext().getNext())
&& Utils.getIntValue(ain.getNext().getNext().getNext()) == Utils.getIntValue(ain)
&& ain.getNext().getNext().getNext().getNext() != null
&& ain.getNext().getNext().getNext().getNext().getOpcode() == Opcodes.ANEWARRAY
&& ((TypeInsnNode)ain.getNext().getNext().getNext().getNext()).desc.equals("java/lang/String")
&& ain.getNext().getNext().getNext().getNext().getNext() != null
&& ain.getNext().getNext().getNext().getNext().getNext().getOpcode() == Opcodes.PUTSTATIC
&& ((FieldInsnNode)ain.getNext().getNext().getNext().getNext().getNext()).owner.equals(innerClassNode.name))
{
((TypeInsnNode)ain.getNext()).desc = "java/lang/String";
((TypeInsnNode)ain.getNext().getNext().getNext().getNext()).desc = "java/lang/Object";
((FieldInsnNode)ain.getNext().getNext()).desc = "[Ljava/lang/String;";
((FieldInsnNode)ain.getNext().getNext().getNext().getNext().getNext()).desc = "[Ljava/lang/Object;";
String temp = ((FieldInsnNode)ain.getNext().getNext()).name;
((FieldInsnNode)ain.getNext().getNext()).name = ((FieldInsnNode)ain.getNext().getNext().getNext().getNext().getNext()).name;
((FieldInsnNode)ain.getNext().getNext().getNext().getNext().getNext()).name = temp;
fieldInsn = (FieldInsnNode)ain.getNext().getNext().getNext().getNext().getNext();
while(ain.getNext() != fieldInsn)
{
removed.add(ain.getNext());
clinit.instructions.remove(ain.getNext());
}
removed.add(fieldInsn);
clinit.instructions.remove(fieldInsn);
removed.add(0, ain);
clinit.instructions.remove(ain);
break;
}
}
}
FieldInsnNode fieldInsn1 = fieldInsn;
Object[] otherInit = innerClassNode.methods.stream().filter(mn -> mn.desc.equals("()V") && isOtherInitMethod(innerClassNode, mn, fieldInsn1)).toArray();
if(decryptorNode == null)
{
MethodNode firstinit = (MethodNode)Arrays.stream(otherInit).filter(mn ->
Utils.isInteger(getFirstIndex((MethodNode)mn))
&& Utils.getIntValue(getFirstIndex((MethodNode)mn)) == 0).findFirst().orElse(null);
firstinit.instructions.remove(firstinit.instructions.getFirst());
Collections.reverse(removed);
boolean first = false;
for(AbstractInsnNode ain : removed)
{
firstinit.instructions.insert(ain);
if(!first)
{
firstinit.instructions.insert(new InsnNode(Opcodes.DUP));
first = true;
}
}
}
for(Object o : otherInit)
init.add((MethodNode)o);
if(!innerClassNode.equals(classNode))
reflectionClasses.add(innerClassNode);
else
{
argsReflectionMethods.put(innerClassNode, new HashSet<>());
initReflectionMethod.put(innerClassNode, new HashSet<>());
for(MethodNode method : init)
initReflectionMethod.get(innerClassNode).add(method);
}
Context context = new Context(provider);
context.dictionary = this.classpath;
for(MethodNode method1 : init)
MethodExecutor.execute(innerClassNode, method1, Collections.emptyList(), null, context);
}catch(Throwable t)
{
System.out.println("Error while fully initializing " + classNode.name);
t.printStackTrace(System.out);
}
}
MethodNode decrypterNode = innerClassNode.methods.stream().filter(mn -> mn.name.equals(methodInsnNode.name) && mn.desc.equals(methodInsnNode.desc)).findFirst().orElse(null);
Context ctx = new Context(provider);
ctx.dictionary = classpath;
JavaField javaField;
try {
javaField = MethodExecutor.execute(innerClassNode, decrypterNode, Collections.singletonList(new JavaLong(ldc)), null, ctx);
} catch (Exception ex) {
ex.printStackTrace();
continue;
}
if(!fieldReflectionMethod.containsKey(innerClassNode))
fieldReflectionMethod.put(innerClassNode, decrypterNode);
methodNode.instructions.remove(current.getPrevious());
if(current.getNext().getNext().getOpcode() == Opcodes.INVOKEVIRTUAL)
{
//Getstatic/field
int opcode = -1;
if(current.getNext().getOpcode() == Opcodes.SWAP)
opcode = Opcodes.GETFIELD;
else
opcode = Opcodes.GETSTATIC;
if(current.getNext().getNext().getNext() != null
&& current.getNext().getNext().getNext().getOpcode() == Opcodes.CHECKCAST)
methodNode.instructions.remove(current.getNext().getNext().getNext());
methodNode.instructions.remove(current.getNext().getNext());
methodNode.instructions.remove(current.getNext());
methodNode.instructions.set(current, new FieldInsnNode(
opcode, javaField.getClassName(), javaField.getName(), javaField.getDesc()));
}else
{
//Putstatic/field
int opcode = -1;
if(current.getNext().getOpcode() == Opcodes.ACONST_NULL)
opcode = Opcodes.PUTSTATIC;
else
opcode = Opcodes.PUTFIELD;
if(opcode == Opcodes.PUTSTATIC)
methodNode.instructions.remove(current.getNext());
else if(current.getNext().getOpcode() == Opcodes.SWAP)
{
//Long/Double
methodNode.instructions.remove(current.getNext());
methodNode.instructions.remove(current.getPrevious().getPrevious());
methodNode.instructions.remove(current.getPrevious());
}
methodNode.instructions.remove(current.getNext().getNext().getNext());
methodNode.instructions.remove(current.getNext().getNext());
methodNode.instructions.remove(current.getNext());
methodNode.instructions.set(current, new FieldInsnNode(
opcode, javaField.getClassName(), javaField.getName(), javaField.getDesc()));
}
count.incrementAndGet();
int x = (int) ((count.get() * 1.0d / expected) * 100);
if (x != 0 && x % 10 == 0 && !alerted[x - 1]) {
System.out.println("[Zelix] [ReflectionObfucationTransformer] Done " + x + "%");
alerted[x - 1] = true;
}
}
}else if (current instanceof InvokeDynamicInsnNode) {
InvokeDynamicInsnNode invokeDynamicInsnNode = (InvokeDynamicInsnNode) current;
boolean isRightDesc = invokeDynamicInsnNode.bsm.getDesc().equals("(Ljava/lang/invoke/MethodHandles$Lookup;Ljava/lang/String;Ljava/lang/invoke/MethodType;)Ljava/lang/invoke/CallSite;");
String lastArgs = invokeDynamicInsnNode.desc.substring(0, invokeDynamicInsnNode.desc.lastIndexOf(")"));
boolean isMPC = ((lastArgs.endsWith("IJ") && Utils.getPrevious(current).getOpcode() == Opcodes.LXOR)
|| (lastArgs.endsWith("JJ") && current.getPrevious().getOpcode() == Opcodes.LLOAD));
if(!isMPC && lastArgs.endsWith("IJ"))
{
org.objectweb.asm.tree.analysis.Frame<SourceValue>[] frames;
try
{
frames = new Analyzer<>(new SourceInterpreter()).analyze(classNode.name, methodNode);
}catch(AnalyzerException e)
{
throw new RuntimeException(e);
}
org.objectweb.asm.tree.analysis.Frame<SourceValue> f = frames[methodNode.instructions.indexOf(current)];
if(f.getStack(f.getStackSize() - 1).insns.size() == 1)
{
AbstractInsnNode a1 = f.getStack(f.getStackSize() - 1).insns.iterator().next();
if(a1.getOpcode() == Opcodes.LXOR)
isMPC = true;
}
}
if (isRightDesc && !isMPC) {
long ldc = (long) ((LdcInsnNode) current.getPrevious()).cst;
String strCl = invokeDynamicInsnNode.bsm.getOwner();
ClassNode innerClassNode = classpath.get(strCl);
if (initted.add(innerClassNode)) {
try
{
List<MethodNode> init = new ArrayList<>();
MethodNode decryptorNode = innerClassNode.methods.stream().filter(mn -> mn.desc.equals("()V") && isInitMethod(innerClassNode, mn)).findFirst().orElse(null);
FieldInsnNode fieldInsn = null;
List<AbstractInsnNode> removed = new ArrayList<>();
if(decryptorNode != null)
{
init.add(decryptorNode);
fieldInsn = (FieldInsnNode)getObjectList(decryptorNode);
}else
{
MethodNode clinit = innerClassNode.methods.stream().filter(mn -> mn.name.equals("<clinit>")).findFirst().orElse(null);
for(AbstractInsnNode ain : clinit.instructions.toArray())
{
if(Utils.isInteger(ain) && ain.getNext() != null
&& (ain.getNext().getOpcode() == Opcodes.NEWARRAY ||
(ain.getNext().getOpcode() == Opcodes.ANEWARRAY && ((TypeInsnNode)ain.getNext()).desc.equals("java/lang/Object")))
&& ain.getNext().getNext() != null
&& ain.getNext().getNext().getOpcode() == Opcodes.PUTSTATIC && ((FieldInsnNode)ain.getNext().getNext()).owner.equals(innerClassNode.name)
&& ain.getNext().getNext().getNext() != null
&& Utils.isInteger(ain.getNext().getNext().getNext())
&& Utils.getIntValue(ain.getNext().getNext().getNext()) == Utils.getIntValue(ain)
&& ain.getNext().getNext().getNext().getNext() != null
&& ain.getNext().getNext().getNext().getNext().getOpcode() == Opcodes.ANEWARRAY
&& ((TypeInsnNode)ain.getNext().getNext().getNext().getNext()).desc.equals("java/lang/String")
&& ain.getNext().getNext().getNext().getNext().getNext() != null
&& ain.getNext().getNext().getNext().getNext().getNext().getOpcode() == Opcodes.PUTSTATIC
&& ((FieldInsnNode)ain.getNext().getNext().getNext().getNext().getNext()).owner.equals(innerClassNode.name))
{
((TypeInsnNode)ain.getNext()).desc = "java/lang/String";
((TypeInsnNode)ain.getNext().getNext().getNext().getNext()).desc = "java/lang/Object";
((FieldInsnNode)ain.getNext().getNext()).desc = "[Ljava/lang/String;";
((FieldInsnNode)ain.getNext().getNext().getNext().getNext().getNext()).desc = "[Ljava/lang/Object;";
String temp = ((FieldInsnNode)ain.getNext().getNext()).name;
((FieldInsnNode)ain.getNext().getNext()).name = ((FieldInsnNode)ain.getNext().getNext().getNext().getNext().getNext()).name;
((FieldInsnNode)ain.getNext().getNext().getNext().getNext().getNext()).name = temp;
fieldInsn = (FieldInsnNode)ain.getNext().getNext().getNext().getNext().getNext();
while(ain.getNext() != fieldInsn)
{
removed.add(ain.getNext());
clinit.instructions.remove(ain.getNext());
}
removed.add(fieldInsn);
clinit.instructions.remove(fieldInsn);
removed.add(0, ain);
clinit.instructions.remove(ain);
break;
}
}
}
FieldInsnNode fieldInsn1 = fieldInsn;
Object[] otherInit = innerClassNode.methods.stream().filter(mn -> mn.desc.equals("()V") && isOtherInitMethod(innerClassNode, mn, fieldInsn1)).toArray();
if(decryptorNode == null)
{
MethodNode firstinit = (MethodNode)Arrays.stream(otherInit).filter(mn ->
Utils.isInteger(getFirstIndex((MethodNode)mn))
&& Utils.getIntValue(getFirstIndex((MethodNode)mn)) == 0).findFirst().orElse(null);
firstinit.instructions.remove(firstinit.instructions.getFirst());
Collections.reverse(removed);
boolean first = false;
for(AbstractInsnNode ain : removed)
{
firstinit.instructions.insert(ain);
if(!first)
{
firstinit.instructions.insert(new InsnNode(Opcodes.DUP));
first = true;
}
}
}
for(Object o : otherInit)
init.add((MethodNode)o);
if(!innerClassNode.equals(classNode))
reflectionClasses.add(innerClassNode);
else
{
indyReflectionMethods.put(innerClassNode, new HashSet<>());
initReflectionMethod.put(innerClassNode, new HashSet<>());
for(MethodNode method : init)
initReflectionMethod.get(innerClassNode).add(method);
}
Context context = new Context(provider);
context.dictionary = this.classpath;
for(MethodNode method1 : init)
MethodExecutor.execute(innerClassNode, method1, Collections.emptyList(), null, context);
}catch(Throwable t)
{
System.out.println("Error while fully initializing " + classNode.name);
t.printStackTrace(System.out);
}
}
MethodNode indyNode = innerClassNode.methods.stream().filter(mn -> mn.name.equals(invokeDynamicInsnNode.bsm.getName())
&& mn.desc.equals(invokeDynamicInsnNode.bsm.getDesc())).findFirst().orElse(null);
MethodNode indyNode2 = null;
for(AbstractInsnNode ain : indyNode.instructions.toArray())
if(ain.getOpcode() == Opcodes.LDC && ((LdcInsnNode)ain).cst instanceof Handle)
{
Handle handle = (Handle)((LdcInsnNode)ain).cst;
indyNode2 = innerClassNode.methods.stream().filter(mn -> mn.name.equals(handle.getName())
&& mn.desc.equals(handle.getDesc())).findFirst().orElse(null);
break;
}
MethodNode indyNode3 = null;
for(AbstractInsnNode ain : indyNode2.instructions.toArray())
if(ain.getOpcode() == Opcodes.INVOKESTATIC && ((MethodInsnNode)ain).owner.equals(innerClassNode.name))
{
indyNode3 = innerClassNode.methods.stream().filter(mn -> mn.name.equals(((MethodInsnNode)ain).name)
&& mn.desc.equals(((MethodInsnNode)ain).desc)).findFirst().orElse(null);
break;
}
List<JavaValue> args = new ArrayList<>();
args.add(new JavaObject(null, "java/lang/invoke/MethodHandles$Lookup")); //Lookup
args.add(new JavaObject(null, "java/lang/invoke/MutableCallSite")); //CallSite
args.add(JavaValue.valueOf(invokeDynamicInsnNode.name)); //dyn method name
args.add(new JavaObject(invokeDynamicInsnNode.desc, "java/lang/invoke/MethodType")); //dyn method type
args.add(new JavaLong(ldc));
Context ctx = new Context(provider);
ctx.dictionary = classpath;
JavaHandle handle = MethodExecutor.execute(innerClassNode, indyNode3, args, null, ctx);
if(indyReflectionMethods.containsKey(innerClassNode))
{
indyReflectionMethods.get(innerClassNode).add(indyNode);
indyReflectionMethods.get(innerClassNode).add(indyNode2);
indyReflectionMethods.get(innerClassNode).add(indyNode3);
if(!fieldReflectionMethod.containsKey(innerClassNode))
for(AbstractInsnNode ain : indyNode3.instructions.toArray())
if(ain instanceof MethodInsnNode && ((MethodInsnNode)ain).desc.equals("(J)Ljava/lang/reflect/Field;"))
{
MethodNode refMethod = innerClassNode.methods.stream().filter(m -> m.name.equals(((MethodInsnNode)ain).name)
&& m.desc.equals(((MethodInsnNode)ain).desc)).findFirst().orElse(null);
fieldReflectionMethod.put(innerClassNode, refMethod);
}
if(!methodReflectionMethod.containsKey(innerClassNode))
for(AbstractInsnNode ain : indyNode3.instructions.toArray())
if(ain instanceof MethodInsnNode && ((MethodInsnNode)ain).desc.equals("(J)Ljava/lang/reflect/Method;"))
{
MethodNode refMethod = innerClassNode.methods.stream().filter(m -> m.name.equals(((MethodInsnNode)ain).name)
&& m.desc.equals(((MethodInsnNode)ain).desc)).findFirst().orElse(null);
methodReflectionMethod.put(innerClassNode, refMethod);
}
}
methodNode.instructions.remove(current.getPrevious());
AbstractInsnNode replacement = null;
if(handle instanceof JavaMethodHandle)
{
JavaMethodHandle jmh = (JavaMethodHandle)handle;
String clazz = jmh.clazz.replace('.', '/');
switch (jmh.type) {
case "virtual":
replacement = new MethodInsnNode((classpath.get(clazz).access & Opcodes.ACC_INTERFACE) != 0 ?
Opcodes.INVOKEINTERFACE : Opcodes.INVOKEVIRTUAL, clazz, jmh.name, jmh.desc,
(classpath.get(clazz).access & Opcodes.ACC_INTERFACE) != 0);
break;
case "static":
replacement = new MethodInsnNode(Opcodes.INVOKESTATIC, clazz, jmh.name, jmh.desc, false);
break;
case "special":
replacement = new MethodInsnNode(Opcodes.INVOKESPECIAL, clazz, jmh.name, jmh.desc, false);
break;
}
}else
{
JavaFieldHandle jfh = (JavaFieldHandle)handle;
String clazz = jfh.clazz.replace('.', '/');
switch (jfh.type) {
case "virtual":
replacement = new FieldInsnNode(jfh.setter ?
Opcodes.PUTFIELD : Opcodes.GETFIELD, clazz, jfh.name, jfh.desc);
break;
case "static":
replacement = new FieldInsnNode(jfh.setter ?
Opcodes.PUTSTATIC : Opcodes.GETSTATIC, clazz, jfh.name, jfh.desc);
break;
}
}
methodNode.instructions.set(current, replacement);
count.incrementAndGet();
int x = (int) ((count.get() * 1.0d / expected) * 100);
if (x != 0 && x % 10 == 0 && !alerted[x - 1]) {
System.out.println("[Zelix] [ReflectionObfucationTransformer] Done " + x + "%");
alerted[x - 1] = true;
}
}
}
}
//Remove all decryption class/methods
if (getConfig().isCleanup()) {
Set<ClassNode> remove = classNodes().stream().filter(classNode -> reflectionClasses.contains(classNode)).collect(Collectors.toSet());
classNodes().removeAll(remove);
for (Entry<ClassNode, Set<MethodNode>> entry : argsReflectionMethods.entrySet())
for (MethodNode method : entry.getValue())
entry.getKey().methods.remove(method);
for (Entry<ClassNode, Set<MethodNode>> entry : indyReflectionMethods.entrySet())
for (MethodNode method : entry.getValue())
entry.getKey().methods.remove(method);
for (Entry<ClassNode, Set<MethodNode>> entry : initReflectionMethod.entrySet()) {
List<MethodNode> list = new ArrayList<>(entry.getValue());
for (MethodNode method : list) {
int fieldCount = 0;
if (list.indexOf(method) == 0)
for (AbstractInsnNode ain : method.instructions.toArray()) {
if (ain.getOpcode() == Opcodes.PUTSTATIC) {
FieldInsnNode fieldInsn = (FieldInsnNode) ain;
FieldNode field = entry.getKey().fields.stream().filter(f ->
f.name.equals(fieldInsn.name) && f.desc.equals(fieldInsn.desc)).findFirst().orElse(null);
entry.getKey().fields.remove(field);
}
if (fieldCount >= 2)
break;
}
MethodNode clinit = entry.getKey().methods.stream().filter(m -> m.name.equals("<clinit>")).findFirst().orElse(null);
if (clinit != null)
for (AbstractInsnNode ain : clinit.instructions.toArray())
if (ain.getOpcode() == Opcodes.INVOKESTATIC) {
MethodInsnNode methodInsn = (MethodInsnNode) ain;
if (methodInsn.desc.equals(method.desc) && methodInsn.name.equals(method.name)) {
clinit.instructions.remove(methodInsn);
break;
}
}
entry.getKey().methods.remove(method);
}
}
for (Entry<ClassNode, MethodNode> entry : methodReflectionMethod.entrySet()) {
List<MethodNode> reflectionReferences = new ArrayList<>();
for (AbstractInsnNode ain : entry.getValue().instructions.toArray()) {
if (ain.getOpcode() == Opcodes.INVOKESTATIC && ((MethodInsnNode) ain).owner.equals(entry.getKey().name)) {
MethodInsnNode methodInsn = (MethodInsnNode) ain;
MethodNode method = entry.getKey().methods.stream().filter(
m -> m.name.equals(methodInsn.name) && m.desc.equals(methodInsn.desc)).findFirst().orElse(null);
if (!reflectionReferences.contains(method))
reflectionReferences.add(method);
}
if (reflectionReferences.size() >= 4)
break;
}
for (int i = 0; i < reflectionReferences.size(); i++) {
MethodNode method = reflectionReferences.get(i);
if (i == 2)
for (AbstractInsnNode ain : method.instructions.toArray())
if (ain.getOpcode() == Opcodes.INVOKESTATIC && ((MethodInsnNode) ain).owner.equals(entry.getKey().name)) {
MethodInsnNode methodInsn = (MethodInsnNode) ain;
MethodNode method1 = entry.getKey().methods.stream().filter(
m -> m.name.equals(methodInsn.name) && m.desc.equals(methodInsn.desc)).findFirst().orElse(null);
if (method1 != null)
entry.getKey().methods.remove(method1);
}
entry.getKey().methods.remove(method);
}
entry.getKey().methods.remove(entry.getValue());
}
for (Entry<ClassNode, MethodNode> entry : fieldReflectionMethod.entrySet()) {
List<MethodNode> reflectionReferences = new ArrayList<>();
for (AbstractInsnNode ain : entry.getValue().instructions.toArray()) {
if (ain.getOpcode() == Opcodes.INVOKESTATIC && ((MethodInsnNode) ain).owner.equals(entry.getKey().name)) {
MethodInsnNode methodInsn = (MethodInsnNode) ain;
MethodNode method = entry.getKey().methods.stream().filter(
m -> m.name.equals(methodInsn.name) && m.desc.equals(methodInsn.desc)).findFirst().orElse(null);
if (method != null && !reflectionReferences.contains(method))
reflectionReferences.add(method);
}
if (reflectionReferences.size() >= 4)
break;
}
for (MethodNode method : reflectionReferences)
entry.getKey().methods.remove(method);
entry.getKey().methods.remove(entry.getValue());
}
}
return count.get();
}
public static boolean isInitMethod(ClassNode classNode, MethodNode method)
{
List<AbstractInsnNode> instrs = new ArrayList<>();
for(AbstractInsnNode ain : method.instructions.toArray())
{
if(ain.getOpcode() == -1)
continue;
instrs.add(ain);
if(instrs.size() >= 7)
break;
}
if(instrs.size() < 7)
return false;
int firstNum = -1;
if(Utils.isInteger(instrs.get(0)))
firstNum = Utils.getIntValue(instrs.get(0));
if(firstNum == -1)
return false;
if(instrs.get(1).getOpcode() == Opcodes.ANEWARRAY && ((TypeInsnNode)instrs.get(1)).desc.equals("java/lang/String")
&& instrs.get(2).getOpcode() == Opcodes.PUTSTATIC && ((FieldInsnNode)instrs.get(2)).owner.equals(classNode.name)
&& Utils.isInteger(instrs.get(3)) && Utils.getIntValue(instrs.get(3)) == firstNum
&& instrs.get(4).getOpcode() == Opcodes.ANEWARRAY && ((TypeInsnNode)instrs.get(4)).desc.equals("java/lang/Object")
&& instrs.get(5).getOpcode() == Opcodes.DUP
&& instrs.get(6).getOpcode() == Opcodes.PUTSTATIC && ((FieldInsnNode)instrs.get(6)).owner.equals(classNode.name))
return true;
if(instrs.get(1).getOpcode() == Opcodes.NEWARRAY
&& instrs.get(2).getOpcode() == Opcodes.PUTSTATIC && ((FieldInsnNode)instrs.get(2)).owner.equals(classNode.name)
&& Utils.isInteger(instrs.get(3)) && Utils.getIntValue(instrs.get(3)) == firstNum
&& instrs.get(4).getOpcode() == Opcodes.ANEWARRAY && ((TypeInsnNode)instrs.get(4)).desc.equals("java/lang/Object")
&& instrs.get(5).getOpcode() == Opcodes.DUP
&& instrs.get(6).getOpcode() == Opcodes.PUTSTATIC && ((FieldInsnNode)instrs.get(6)).owner.equals(classNode.name))
return true;
return false;
}
public static AbstractInsnNode getObjectList(MethodNode method)
{
List<AbstractInsnNode> instrs = new ArrayList<>();
for(AbstractInsnNode ain : method.instructions.toArray())
{
if(ain.getOpcode() == -1)
continue;
instrs.add(ain);
if(instrs.size() >= 7)
break;
}
return instrs.get(6);
}
public static boolean isOtherInitMethod(ClassNode classNode, MethodNode method, FieldInsnNode fieldInsn)
{
List<AbstractInsnNode> instrs = new ArrayList<>();
for(AbstractInsnNode ain : method.instructions.toArray())
{
if(ain.getOpcode() == -1)
continue;
instrs.add(ain);
if(instrs.size() >= 3)
break;
}
if(instrs.size() < 3)
return false;
if(instrs.get(0).getOpcode() == Opcodes.GETSTATIC && ((FieldInsnNode)instrs.get(0)).desc.equals(fieldInsn.desc)
&& ((FieldInsnNode)instrs.get(0)).name.equals(fieldInsn.name)
&& ((FieldInsnNode)instrs.get(0)).owner.equals(fieldInsn.owner)
&& instrs.get(1).getOpcode() == Opcodes.DUP
&& Utils.isInteger(instrs.get(2)))
return true;
return false;
}
public static AbstractInsnNode getFirstIndex(MethodNode method)
{
List<AbstractInsnNode> instrs = new ArrayList<>();
for(AbstractInsnNode ain : method.instructions.toArray())
{
if(ain.getOpcode() == -1)
continue;
instrs.add(ain);
if(instrs.size() >= 3)
break;
}
return instrs.get(2);
}
private int findReflectionObfuscation() throws Throwable {
AtomicInteger count = new AtomicInteger(0);
classNodes().stream().forEach(classNode -> {
classNode.methods.forEach(methodNode -> {
for (int i = 0; i < methodNode.instructions.size(); i++) {
AbstractInsnNode current = methodNode.instructions.get(i);
if (current instanceof MethodInsnNode
&& !methodNode.desc.equals("(Ljava/lang/invoke/MethodHandles$Lookup;Ljava/lang/invoke/MutableCallSite;"
+ "Ljava/lang/String;Ljava/lang/invoke/MethodType;J)Ljava/lang/invoke/MethodHandle;")
&& !methodNode.desc.equals("(Ljava/lang/invoke/MethodHandles$Lookup;Ljava/lang/invoke/MutableCallSite;"
+ "Ljava/lang/String;Ljava/lang/invoke/MethodType;JJ)Ljava/lang/invoke/MethodHandle;")) {
MethodInsnNode methodInsnNode = (MethodInsnNode) current;
if (methodInsnNode.desc.equals("(J)Ljava/lang/reflect/Method;") || methodInsnNode.desc.equals("(J)Ljava/lang/reflect/Field;")) {
count.incrementAndGet();
}
}else if (current instanceof InvokeDynamicInsnNode) {
InvokeDynamicInsnNode invokeDynamicInsnNode = (InvokeDynamicInsnNode) current;
boolean isRightDesc = invokeDynamicInsnNode.bsm.getDesc().equals("(Ljava/lang/invoke/MethodHandles$Lookup;Ljava/lang/String;Ljava/lang/invoke/MethodType;)Ljava/lang/invoke/CallSite;");
String lastArgs = invokeDynamicInsnNode.desc.substring(0, invokeDynamicInsnNode.desc.lastIndexOf(")"));
boolean isMPC = ((lastArgs.endsWith("IJ") && Utils.getPrevious(current).getOpcode() == Opcodes.LXOR)
|| (lastArgs.endsWith("JJ") && current.getPrevious().getOpcode() == Opcodes.LLOAD));
if(!isMPC && lastArgs.endsWith("IJ"))
{
org.objectweb.asm.tree.analysis.Frame<SourceValue>[] frames;
try
{
frames = new Analyzer<>(new SourceInterpreter()).analyze(classNode.name, methodNode);
}catch(AnalyzerException e)
{
throw new RuntimeException(e);
}
org.objectweb.asm.tree.analysis.Frame<SourceValue> f = frames[methodNode.instructions.indexOf(current)];
if(f.getStack(f.getStackSize() - 1).insns.size() == 1)
{
AbstractInsnNode a1 = f.getStack(f.getStackSize() - 1).insns.iterator().next();
if(a1.getOpcode() == Opcodes.LXOR)
isMPC = true;
}
}
if (isRightDesc && !isMPC) {
count.incrementAndGet();
}
}
}
});
});
return count.get();
}
}
| |
/**
* Copyright (c) 2015-present, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*/
package com.facebook.react.modules.camera;
import javax.annotation.Nullable;
import java.io.File;
import java.io.FileOutputStream;
import java.io.FilenameFilter;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.net.URL;
import java.net.URLConnection;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import android.annotation.SuppressLint;
import android.content.Context;
import android.database.Cursor;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.Matrix;
import android.media.ExifInterface;
import android.net.Uri;
import android.os.AsyncTask;
import android.provider.MediaStore;
import android.text.TextUtils;
import com.facebook.common.logging.FLog;
import com.facebook.react.bridge.Callback;
import com.facebook.react.bridge.GuardedAsyncTask;
import com.facebook.react.bridge.ReactApplicationContext;
import com.facebook.react.bridge.ReactContext;
import com.facebook.react.bridge.ReactContextBaseJavaModule;
import com.facebook.react.bridge.ReactMethod;
import com.facebook.react.bridge.JSApplicationIllegalArgumentException;
import com.facebook.react.bridge.ReadableMap;
import com.facebook.infer.annotation.Assertions;
import com.facebook.react.common.ReactConstants;
import com.facebook.react.module.annotations.ReactModule;
/**
* Native module that provides image cropping functionality.
*/
@ReactModule(name = ImageEditingManager.NAME)
public class ImageEditingManager extends ReactContextBaseJavaModule {
protected static final String NAME = "ImageEditingManager";
private static final List<String> LOCAL_URI_PREFIXES = Arrays.asList(
"file://", "content://");
private static final String TEMP_FILE_PREFIX = "ReactNative_cropped_image_";
/** Compress quality of the output file. */
private static final int COMPRESS_QUALITY = 90;
@SuppressLint("InlinedApi") private static final String[] EXIF_ATTRIBUTES = new String[] {
ExifInterface.TAG_APERTURE,
ExifInterface.TAG_DATETIME,
ExifInterface.TAG_DATETIME_DIGITIZED,
ExifInterface.TAG_EXPOSURE_TIME,
ExifInterface.TAG_FLASH,
ExifInterface.TAG_FOCAL_LENGTH,
ExifInterface.TAG_GPS_ALTITUDE,
ExifInterface.TAG_GPS_ALTITUDE_REF,
ExifInterface.TAG_GPS_DATESTAMP,
ExifInterface.TAG_GPS_LATITUDE,
ExifInterface.TAG_GPS_LATITUDE_REF,
ExifInterface.TAG_GPS_LONGITUDE,
ExifInterface.TAG_GPS_LONGITUDE_REF,
ExifInterface.TAG_GPS_PROCESSING_METHOD,
ExifInterface.TAG_GPS_TIMESTAMP,
ExifInterface.TAG_IMAGE_LENGTH,
ExifInterface.TAG_IMAGE_WIDTH,
ExifInterface.TAG_ISO,
ExifInterface.TAG_MAKE,
ExifInterface.TAG_MODEL,
ExifInterface.TAG_ORIENTATION,
ExifInterface.TAG_SUBSEC_TIME,
ExifInterface.TAG_SUBSEC_TIME_DIG,
ExifInterface.TAG_SUBSEC_TIME_ORIG,
ExifInterface.TAG_WHITE_BALANCE
};
public ImageEditingManager(ReactApplicationContext reactContext) {
super(reactContext);
new CleanTask(getReactApplicationContext()).executeOnExecutor(AsyncTask.THREAD_POOL_EXECUTOR);
}
@Override
public String getName() {
return NAME;
}
@Override
public Map<String, Object> getConstants() {
return Collections.emptyMap();
}
@Override
public void onCatalystInstanceDestroy() {
new CleanTask(getReactApplicationContext()).executeOnExecutor(AsyncTask.THREAD_POOL_EXECUTOR);
}
/**
* Asynchronous task that cleans up cache dirs (internal and, if available, external) of cropped
* image files. This is run when the catalyst instance is being destroyed (i.e. app is shutting
* down) and when the module is instantiated, to handle the case where the app crashed.
*/
private static class CleanTask extends GuardedAsyncTask<Void, Void> {
private final Context mContext;
private CleanTask(ReactContext context) {
super(context);
mContext = context;
}
@Override
protected void doInBackgroundGuarded(Void... params) {
cleanDirectory(mContext.getCacheDir());
File externalCacheDir = mContext.getExternalCacheDir();
if (externalCacheDir != null) {
cleanDirectory(externalCacheDir);
}
}
private void cleanDirectory(File directory) {
File[] toDelete = directory.listFiles(
new FilenameFilter() {
@Override
public boolean accept(File dir, String filename) {
return filename.startsWith(TEMP_FILE_PREFIX);
}
});
if (toDelete != null) {
for (File file: toDelete) {
file.delete();
}
}
}
}
/**
* Crop an image. If all goes well, the success callback will be called with the file:// URI of
* the new image as the only argument. This is a temporary file - consider using
* CameraRollManager.saveImageWithTag to save it in the gallery.
*
* @param uri the MediaStore URI of the image to crop
* @param options crop parameters specified as {@code {offset: {x, y}, size: {width, height}}}.
* Optionally this also contains {@code {targetSize: {width, height}}}. If this is
* specified, the cropped image will be resized to that size.
* All units are in pixels (not DPs).
* @param success callback to be invoked when the image has been cropped; the only argument that
* is passed to this callback is the file:// URI of the new image
* @param error callback to be invoked when an error occurs (e.g. can't create file etc.)
*/
@ReactMethod
public void cropImage(
String uri,
ReadableMap options,
final Callback success,
final Callback error) {
ReadableMap offset = options.hasKey("offset") ? options.getMap("offset") : null;
ReadableMap size = options.hasKey("size") ? options.getMap("size") : null;
if (offset == null || size == null ||
!offset.hasKey("x") || !offset.hasKey("y") ||
!size.hasKey("width") || !size.hasKey("height")) {
throw new JSApplicationIllegalArgumentException("Please specify offset and size");
}
if (uri == null || uri.isEmpty()) {
throw new JSApplicationIllegalArgumentException("Please specify a URI");
}
CropTask cropTask = new CropTask(
getReactApplicationContext(),
uri,
(int) offset.getDouble("x"),
(int) offset.getDouble("y"),
(int) size.getDouble("width"),
(int) size.getDouble("height"),
success,
error);
if (options.hasKey("displaySize")) {
ReadableMap targetSize = options.getMap("displaySize");
cropTask.setTargetSize(
(int) targetSize.getDouble("width"),
(int) targetSize.getDouble("height"));
}
cropTask.executeOnExecutor(AsyncTask.THREAD_POOL_EXECUTOR);
}
private static class CropTask extends GuardedAsyncTask<Void, Void> {
final Context mContext;
final String mUri;
final int mX;
final int mY;
final int mWidth;
final int mHeight;
int mTargetWidth = 0;
int mTargetHeight = 0;
final Callback mSuccess;
final Callback mError;
private CropTask(
ReactContext context,
String uri,
int x,
int y,
int width,
int height,
Callback success,
Callback error) {
super(context);
if (x < 0 || y < 0 || width <= 0 || height <= 0) {
throw new JSApplicationIllegalArgumentException(String.format(
"Invalid crop rectangle: [%d, %d, %d, %d]", x, y, width, height));
}
mContext = context;
mUri = uri;
mX = x;
mY = y;
mWidth = width;
mHeight = height;
mSuccess = success;
mError = error;
}
public void setTargetSize(int width, int height) {
if (width <= 0 || height <= 0) {
throw new JSApplicationIllegalArgumentException(String.format(
"Invalid target size: [%d, %d]", width, height));
}
mTargetWidth = width;
mTargetHeight = height;
}
private InputStream openBitmapInputStream() throws IOException {
InputStream stream;
if (isLocalUri(mUri)) {
stream = mContext.getContentResolver().openInputStream(Uri.parse(mUri));
} else {
URLConnection connection = new URL(mUri).openConnection();
stream = connection.getInputStream();
}
if (stream == null) {
throw new IOException("Cannot open bitmap: " + mUri);
}
return stream;
}
@Override
protected void doInBackgroundGuarded(Void... params) {
try {
BitmapFactory.Options outOptions = new BitmapFactory.Options();
// If we're downscaling, we can decode the bitmap more efficiently, using less memory
boolean hasTargetSize = (mTargetWidth > 0) && (mTargetHeight > 0);
Bitmap cropped;
if (hasTargetSize) {
cropped = cropAndResize(mTargetWidth, mTargetHeight, outOptions);
} else {
cropped = crop(outOptions);
}
String mimeType = outOptions.outMimeType;
if (mimeType == null || mimeType.isEmpty()) {
throw new IOException("Could not determine MIME type");
}
File tempFile = createTempFile(mContext, mimeType);
writeCompressedBitmapToFile(cropped, mimeType, tempFile);
if (mimeType.equals("image/jpeg")) {
copyExif(mContext, Uri.parse(mUri), tempFile);
}
mSuccess.invoke(Uri.fromFile(tempFile).toString());
} catch (Exception e) {
mError.invoke(e.getMessage());
}
}
/**
* Reads and crops the bitmap.
* @param outOptions Bitmap options, useful to determine {@code outMimeType}.
*/
private Bitmap crop(BitmapFactory.Options outOptions) throws IOException {
InputStream inputStream = openBitmapInputStream();
try {
// This can use a lot of memory
Bitmap fullResolutionBitmap = BitmapFactory.decodeStream(inputStream, null, outOptions);
if (fullResolutionBitmap == null) {
throw new IOException("Cannot decode bitmap: " + mUri);
}
return Bitmap.createBitmap(fullResolutionBitmap, mX, mY, mWidth, mHeight);
} finally {
if (inputStream != null) {
inputStream.close();
}
}
}
/**
* Crop the rectangle given by {@code mX, mY, mWidth, mHeight} within the source bitmap
* and scale the result to {@code targetWidth, targetHeight}.
* @param outOptions Bitmap options, useful to determine {@code outMimeType}.
*/
private Bitmap cropAndResize(
int targetWidth,
int targetHeight,
BitmapFactory.Options outOptions)
throws IOException {
Assertions.assertNotNull(outOptions);
// Loading large bitmaps efficiently:
// http://developer.android.com/training/displaying-bitmaps/load-bitmap.html
// Just decode the dimensions
BitmapFactory.Options options = new BitmapFactory.Options();
options.inJustDecodeBounds = true;
InputStream inputStream = openBitmapInputStream();
try {
BitmapFactory.decodeStream(inputStream, null, options);
} finally {
if (inputStream != null) {
inputStream.close();
}
}
// This uses scaling mode COVER
// Where would the crop rect end up within the scaled bitmap?
float newWidth, newHeight, newX, newY, scale;
float cropRectRatio = mWidth / (float) mHeight;
float targetRatio = targetWidth / (float) targetHeight;
if (cropRectRatio > targetRatio) {
// e.g. source is landscape, target is portrait
newWidth = mHeight * targetRatio;
newHeight = mHeight;
newX = mX + (mWidth - newWidth) / 2;
newY = mY;
scale = targetHeight / (float) mHeight;
} else {
// e.g. source is landscape, target is portrait
newWidth = mWidth;
newHeight = mWidth / targetRatio;
newX = mX;
newY = mY + (mHeight - newHeight) / 2;
scale = targetWidth / (float) mWidth;
}
// Decode the bitmap. We have to open the stream again, like in the example linked above.
// Is there a way to just continue reading from the stream?
outOptions.inSampleSize = getDecodeSampleSize(mWidth, mHeight, targetWidth, targetHeight);
options.inJustDecodeBounds = false;
inputStream = openBitmapInputStream();
Bitmap bitmap;
try {
// This can use significantly less memory than decoding the full-resolution bitmap
bitmap = BitmapFactory.decodeStream(inputStream, null, outOptions);
if (bitmap == null) {
throw new IOException("Cannot decode bitmap: " + mUri);
}
} finally {
if (inputStream != null) {
inputStream.close();
}
}
int cropX = (int) Math.floor(newX / (float) outOptions.inSampleSize);
int cropY = (int) Math.floor(newY / (float) outOptions.inSampleSize);
int cropWidth = (int) Math.floor(newWidth / (float) outOptions.inSampleSize);
int cropHeight = (int) Math.floor(newHeight / (float) outOptions.inSampleSize);
float cropScale = scale * outOptions.inSampleSize;
Matrix scaleMatrix = new Matrix();
scaleMatrix.setScale(cropScale, cropScale);
boolean filter = true;
return Bitmap.createBitmap(bitmap, cropX, cropY, cropWidth, cropHeight, scaleMatrix, filter);
}
}
// Utils
private static void copyExif(Context context, Uri oldImage, File newFile) throws IOException {
File oldFile = getFileFromUri(context, oldImage);
if (oldFile == null) {
FLog.w(ReactConstants.TAG, "Couldn't get real path for uri: " + oldImage);
return;
}
ExifInterface oldExif = new ExifInterface(oldFile.getAbsolutePath());
ExifInterface newExif = new ExifInterface(newFile.getAbsolutePath());
for (String attribute : EXIF_ATTRIBUTES) {
String value = oldExif.getAttribute(attribute);
if (value != null) {
newExif.setAttribute(attribute, value);
}
}
newExif.saveAttributes();
}
private static @Nullable File getFileFromUri(Context context, Uri uri) {
if (uri.getScheme().equals("file")) {
return new File(uri.getPath());
} else if (uri.getScheme().equals("content")) {
Cursor cursor = context.getContentResolver()
.query(uri, new String[] { MediaStore.MediaColumns.DATA }, null, null, null);
if (cursor != null) {
try {
if (cursor.moveToFirst()) {
String path = cursor.getString(0);
if (!TextUtils.isEmpty(path)) {
return new File(path);
}
}
} finally {
cursor.close();
}
}
}
return null;
}
private static boolean isLocalUri(String uri) {
for (String localPrefix : LOCAL_URI_PREFIXES) {
if (uri.startsWith(localPrefix)) {
return true;
}
}
return false;
}
private static String getFileExtensionForType(@Nullable String mimeType) {
if ("image/png".equals(mimeType)) {
return ".png";
}
if ("image/webp".equals(mimeType)) {
return ".webp";
}
return ".jpg";
}
private static Bitmap.CompressFormat getCompressFormatForType(String type) {
if ("image/png".equals(type)) {
return Bitmap.CompressFormat.PNG;
}
if ("image/webp".equals(type)) {
return Bitmap.CompressFormat.WEBP;
}
return Bitmap.CompressFormat.JPEG;
}
private static void writeCompressedBitmapToFile(Bitmap cropped, String mimeType, File tempFile)
throws IOException {
OutputStream out = new FileOutputStream(tempFile);
try {
cropped.compress(getCompressFormatForType(mimeType), COMPRESS_QUALITY, out);
} finally {
if (out != null) {
out.close();
}
}
}
/**
* Create a temporary file in the cache directory on either internal or external storage,
* whichever is available and has more free space.
*
* @param mimeType the MIME type of the file to create (image/*)
*/
private static File createTempFile(Context context, @Nullable String mimeType)
throws IOException {
File externalCacheDir = context.getExternalCacheDir();
File internalCacheDir = context.getCacheDir();
File cacheDir;
if (externalCacheDir == null && internalCacheDir == null) {
throw new IOException("No cache directory available");
}
if (externalCacheDir == null) {
cacheDir = internalCacheDir;
}
else if (internalCacheDir == null) {
cacheDir = externalCacheDir;
} else {
cacheDir = externalCacheDir.getFreeSpace() > internalCacheDir.getFreeSpace() ?
externalCacheDir : internalCacheDir;
}
return File.createTempFile(TEMP_FILE_PREFIX, getFileExtensionForType(mimeType), cacheDir);
}
/**
* When scaling down the bitmap, decode only every n-th pixel in each dimension.
* Calculate the largest {@code inSampleSize} value that is a power of 2 and keeps both
* {@code width, height} larger or equal to {@code targetWidth, targetHeight}.
* This can significantly reduce memory usage.
*/
private static int getDecodeSampleSize(int width, int height, int targetWidth, int targetHeight) {
int inSampleSize = 1;
if (height > targetWidth || width > targetHeight) {
int halfHeight = height / 2;
int halfWidth = width / 2;
while ((halfWidth / inSampleSize) >= targetWidth
&& (halfHeight / inSampleSize) >= targetHeight) {
inSampleSize *= 2;
}
}
return inSampleSize;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.highlight;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.ListIterator;
import java.util.Map;
import java.util.Set;
import org.apache.lucene.analysis.CachingTokenFilter;
import org.apache.lucene.analysis.TokenFilter;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.tokenattributes.OffsetAttribute;
import org.apache.lucene.analysis.tokenattributes.PositionIncrementAttribute;
import org.apache.lucene.document.Document;
import org.apache.lucene.index.Fields;
import org.apache.lucene.index.FilterLeafReader;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexableField;
import org.apache.lucene.index.LeafReader;
import org.apache.lucene.index.Terms;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.highlight.Encoder;
import org.apache.lucene.search.highlight.Formatter;
import org.apache.lucene.search.highlight.Fragmenter;
import org.apache.lucene.search.highlight.Highlighter;
import org.apache.lucene.search.highlight.InvalidTokenOffsetsException;
import org.apache.lucene.search.highlight.OffsetLimitTokenFilter;
import org.apache.lucene.search.highlight.QueryScorer;
import org.apache.lucene.search.highlight.QueryTermScorer;
import org.apache.lucene.search.highlight.Scorer;
import org.apache.lucene.search.highlight.TextFragment;
import org.apache.lucene.search.highlight.TokenSources;
import org.apache.lucene.search.vectorhighlight.BoundaryScanner;
import org.apache.lucene.search.vectorhighlight.FastVectorHighlighter;
import org.apache.lucene.search.vectorhighlight.FieldQuery;
import org.apache.lucene.search.vectorhighlight.FragListBuilder;
import org.apache.lucene.search.vectorhighlight.FragmentsBuilder;
import org.apache.lucene.util.AttributeSource.State;
import org.apache.solr.common.SolrException;
import org.apache.solr.common.params.HighlightParams;
import org.apache.solr.common.params.SolrParams;
import org.apache.solr.common.util.NamedList;
import org.apache.solr.common.util.SimpleOrderedMap;
import org.apache.solr.core.PluginInfo;
import org.apache.solr.core.SolrCore;
import org.apache.solr.request.SolrQueryRequest;
import org.apache.solr.schema.IndexSchema;
import org.apache.solr.schema.SchemaField;
import org.apache.solr.search.DocIterator;
import org.apache.solr.search.DocList;
import org.apache.solr.search.SolrIndexSearcher;
import org.apache.solr.util.plugin.PluginInfoInitialized;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
*
* @since solr 1.3
*/
public class DefaultSolrHighlighter extends SolrHighlighter implements PluginInfoInitialized
{
public static Logger log = LoggerFactory.getLogger(DefaultSolrHighlighter.class);
protected final SolrCore solrCore;
//Will be invoked via reflection
public DefaultSolrHighlighter(SolrCore solrCore) {
this.solrCore = solrCore;
}
// Thread safe registry
protected final Map<String,SolrFormatter> formatters =
new HashMap<>();
// Thread safe registry
protected final Map<String,SolrEncoder> encoders =
new HashMap<>();
// Thread safe registry
protected final Map<String,SolrFragmenter> fragmenters =
new HashMap<>() ;
// Thread safe registry
protected final Map<String, SolrFragListBuilder> fragListBuilders =
new HashMap<>() ;
// Thread safe registry
protected final Map<String, SolrFragmentsBuilder> fragmentsBuilders =
new HashMap<>() ;
// Thread safe registry
protected final Map<String, SolrBoundaryScanner> boundaryScanners =
new HashMap<>() ;
@Override
public void init(PluginInfo info) {
formatters.clear();
encoders.clear();
fragmenters.clear();
fragListBuilders.clear();
fragmentsBuilders.clear();
boundaryScanners.clear();
// Load the fragmenters
SolrFragmenter frag = solrCore.initPlugins(info.getChildren("fragmenter") , fragmenters,SolrFragmenter.class,null);
if (frag == null) frag = new GapFragmenter();
fragmenters.put("", frag);
fragmenters.put(null, frag);
// Load the formatters
SolrFormatter fmt = solrCore.initPlugins(info.getChildren("formatter"), formatters,SolrFormatter.class,null);
if (fmt == null) fmt = new HtmlFormatter();
formatters.put("", fmt);
formatters.put(null, fmt);
// Load the encoders
SolrEncoder enc = solrCore.initPlugins(info.getChildren("encoder"), encoders,SolrEncoder.class,null);
if (enc == null) enc = new DefaultEncoder();
encoders.put("", enc);
encoders.put(null, enc);
// Load the FragListBuilders
SolrFragListBuilder fragListBuilder = solrCore.initPlugins(info.getChildren("fragListBuilder"),
fragListBuilders, SolrFragListBuilder.class, null );
if( fragListBuilder == null ) fragListBuilder = new SimpleFragListBuilder();
fragListBuilders.put( "", fragListBuilder );
fragListBuilders.put( null, fragListBuilder );
// Load the FragmentsBuilders
SolrFragmentsBuilder fragsBuilder = solrCore.initPlugins(info.getChildren("fragmentsBuilder"),
fragmentsBuilders, SolrFragmentsBuilder.class, null);
if( fragsBuilder == null ) fragsBuilder = new ScoreOrderFragmentsBuilder();
fragmentsBuilders.put( "", fragsBuilder );
fragmentsBuilders.put( null, fragsBuilder );
// Load the BoundaryScanners
SolrBoundaryScanner boundaryScanner = solrCore.initPlugins(info.getChildren("boundaryScanner"),
boundaryScanners, SolrBoundaryScanner.class, null);
if(boundaryScanner == null) boundaryScanner = new SimpleBoundaryScanner();
boundaryScanners.put("", boundaryScanner);
boundaryScanners.put(null, boundaryScanner);
}
/**
* Return a phrase {@link org.apache.lucene.search.highlight.Highlighter} appropriate for this field.
* @param query The current Query
* @param fieldName The name of the field
* @param request The current SolrQueryRequest
* @param tokenStream document text tokenStream that implements reset() efficiently (e.g. CachingTokenFilter).
* If it's used, call reset() first.
* @throws IOException If there is a low-level I/O error.
*/
protected Highlighter getPhraseHighlighter(Query query, String fieldName, SolrQueryRequest request, TokenStream tokenStream) throws IOException {
SolrParams params = request.getParams();
Highlighter highlighter = new Highlighter(
getFormatter(fieldName, params),
getEncoder(fieldName, params),
getSpanQueryScorer(query, fieldName, tokenStream, request));
highlighter.setTextFragmenter(getFragmenter(fieldName, params));
return highlighter;
}
/**
* Return a {@link org.apache.lucene.search.highlight.Highlighter} appropriate for this field.
* @param query The current Query
* @param fieldName The name of the field
* @param request The current SolrQueryRequest
*/
protected Highlighter getHighlighter(Query query, String fieldName, SolrQueryRequest request) {
SolrParams params = request.getParams();
Highlighter highlighter = new Highlighter(
getFormatter(fieldName, params),
getEncoder(fieldName, params),
getQueryScorer(query, fieldName, request));
highlighter.setTextFragmenter(getFragmenter(fieldName, params));
return highlighter;
}
/**
* Return a {@link org.apache.lucene.search.highlight.QueryScorer} suitable for this Query and field.
* @param query The current query
* @param tokenStream document text tokenStream that implements reset() efficiently (e.g. CachingTokenFilter).
* If it's used, call reset() first.
* @param fieldName The name of the field
* @param request The SolrQueryRequest
*/
protected QueryScorer getSpanQueryScorer(Query query, String fieldName, TokenStream tokenStream, SolrQueryRequest request) {
QueryScorer scorer = new QueryScorer(query,
request.getParams().getFieldBool(fieldName, HighlightParams.FIELD_MATCH, false) ? fieldName : null);
scorer.setExpandMultiTermQuery(request.getParams().getBool(HighlightParams.HIGHLIGHT_MULTI_TERM, true));
boolean defaultPayloads = true;//overwritten below
try {
// It'd be nice to know if payloads are on the tokenStream but the presence of the attribute isn't a good
// indicator.
final Terms terms = request.getSearcher().getLeafReader().fields().terms(fieldName);
if (terms != null) {
defaultPayloads = terms.hasPayloads();
}
} catch (IOException e) {
log.error("Couldn't check for existence of payloads", e);
}
scorer.setUsePayloads(request.getParams().getFieldBool(fieldName, HighlightParams.PAYLOADS, defaultPayloads));
return scorer;
}
/**
* Return a {@link org.apache.lucene.search.highlight.Scorer} suitable for this Query and field.
* @param query The current query
* @param fieldName The name of the field
* @param request The SolrQueryRequest
*/
protected Scorer getQueryScorer(Query query, String fieldName, SolrQueryRequest request) {
boolean reqFieldMatch = request.getParams().getFieldBool(fieldName, HighlightParams.FIELD_MATCH, false);
if (reqFieldMatch) {
return new QueryTermScorer(query, request.getSearcher().getIndexReader(), fieldName);
} else {
return new QueryTermScorer(query);
}
}
/**
* Return the max number of snippets for this field. If this has not
* been configured for this field, fall back to the configured default
* or the solr default.
* @param fieldName The name of the field
* @param params The params controlling Highlighting
*/
protected int getMaxSnippets(String fieldName, SolrParams params) {
return params.getFieldInt(fieldName, HighlightParams.SNIPPETS, 1);
}
/**
* Return whether adjacent fragments should be merged.
* @param fieldName The name of the field
* @param params The params controlling Highlighting
*/
protected boolean isMergeContiguousFragments(String fieldName, SolrParams params){
return params.getFieldBool(fieldName, HighlightParams.MERGE_CONTIGUOUS_FRAGMENTS, false);
}
/**
* Return a {@link org.apache.lucene.search.highlight.Formatter} appropriate for this field. If a formatter
* has not been configured for this field, fall back to the configured
* default or the solr default ({@link org.apache.lucene.search.highlight.SimpleHTMLFormatter}).
*
* @param fieldName The name of the field
* @param params The params controlling Highlighting
* @return An appropriate {@link org.apache.lucene.search.highlight.Formatter}.
*/
protected Formatter getFormatter(String fieldName, SolrParams params )
{
String str = params.getFieldParam( fieldName, HighlightParams.FORMATTER );
SolrFormatter formatter = formatters.get(str);
if( formatter == null ) {
throw new SolrException( SolrException.ErrorCode.BAD_REQUEST, "Unknown formatter: "+str );
}
return formatter.getFormatter(fieldName, params);
}
/**
* Return an {@link org.apache.lucene.search.highlight.Encoder} appropriate for this field. If an encoder
* has not been configured for this field, fall back to the configured
* default or the solr default ({@link org.apache.lucene.search.highlight.DefaultEncoder}).
*
* @param fieldName The name of the field
* @param params The params controlling Highlighting
* @return An appropriate {@link org.apache.lucene.search.highlight.Encoder}.
*/
protected Encoder getEncoder(String fieldName, SolrParams params){
String str = params.getFieldParam( fieldName, HighlightParams.ENCODER );
SolrEncoder encoder = encoders.get( str );
if( encoder == null ) {
throw new SolrException( SolrException.ErrorCode.BAD_REQUEST, "Unknown encoder: "+str );
}
return encoder.getEncoder(fieldName, params);
}
/**
* Return a {@link org.apache.lucene.search.highlight.Fragmenter} appropriate for this field. If a fragmenter
* has not been configured for this field, fall back to the configured
* default or the solr default ({@link GapFragmenter}).
*
* @param fieldName The name of the field
* @param params The params controlling Highlighting
* @return An appropriate {@link org.apache.lucene.search.highlight.Fragmenter}.
*/
protected Fragmenter getFragmenter(String fieldName, SolrParams params)
{
String fmt = params.getFieldParam( fieldName, HighlightParams.FRAGMENTER );
SolrFragmenter frag = fragmenters.get(fmt);
if( frag == null ) {
throw new SolrException( SolrException.ErrorCode.BAD_REQUEST, "Unknown fragmenter: "+fmt );
}
return frag.getFragmenter(fieldName, params);
}
protected FragListBuilder getFragListBuilder( String fieldName, SolrParams params ){
String flb = params.getFieldParam( fieldName, HighlightParams.FRAG_LIST_BUILDER );
SolrFragListBuilder solrFlb = fragListBuilders.get(flb);
if( solrFlb == null ){
throw new SolrException( SolrException.ErrorCode.BAD_REQUEST, "Unknown fragListBuilder: " + flb );
}
return solrFlb.getFragListBuilder(params);
}
protected FragmentsBuilder getFragmentsBuilder( String fieldName, SolrParams params ){
BoundaryScanner bs = getBoundaryScanner(fieldName, params);
return getSolrFragmentsBuilder( fieldName, params ).getFragmentsBuilder(params, bs);
}
protected SolrFragmentsBuilder getSolrFragmentsBuilder( String fieldName, SolrParams params ){
String fb = params.getFieldParam( fieldName, HighlightParams.FRAGMENTS_BUILDER );
SolrFragmentsBuilder solrFb = fragmentsBuilders.get(fb);
if( solrFb == null ){
throw new SolrException( SolrException.ErrorCode.BAD_REQUEST, "Unknown fragmentsBuilder: " + fb );
}
return solrFb;
}
protected BoundaryScanner getBoundaryScanner(String fieldName, SolrParams params){
String bs = params.getFieldParam(fieldName, HighlightParams.BOUNDARY_SCANNER);
SolrBoundaryScanner solrBs = boundaryScanners.get(bs);
if(solrBs == null){
throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "Unknown boundaryScanner: " + bs);
}
return solrBs.getBoundaryScanner(fieldName, params);
}
/**
* Generates a list of Highlighted query fragments for each item in a list
* of documents, or returns null if highlighting is disabled.
*
* @param docs query results
* @param query the query
* @param req the current request
* @param defaultFields default list of fields to summarize
*
* @return NamedList containing a NamedList for each document, which in
* turns contains sets (field, summary) pairs.
*/
@Override
@SuppressWarnings("unchecked")
public NamedList<Object> doHighlighting(DocList docs, Query query, SolrQueryRequest req, String[] defaultFields) throws IOException {
SolrParams params = req.getParams();
if (!isHighlightingEnabled(params)) // also returns early if no unique key field
return null;
SolrIndexSearcher searcher = req.getSearcher();
IndexSchema schema = searcher.getSchema();
// fetch unique key if one exists.
SchemaField keyField = schema.getUniqueKeyField();
if (keyField == null) {
return null;//exit early; we need a unique key field to populate the response
}
String[] fieldNames = getHighlightFields(query, req, defaultFields);
Set<String> preFetchFieldNames = getDocPrefetchFieldNames(fieldNames, req);
if (preFetchFieldNames != null) {
preFetchFieldNames.add(keyField.getName());
}
FastVectorHighlighter fvh = null; // lazy
FieldQuery fvhFieldQuery = null; // lazy
IndexReader reader = new TermVectorReusingLeafReader(req.getSearcher().getLeafReader()); // SOLR-5855
// Highlight each document
NamedList fragments = new SimpleOrderedMap();
DocIterator iterator = docs.iterator();
for (int i = 0; i < docs.size(); i++) {
int docId = iterator.nextDoc();
Document doc = searcher.doc(docId, preFetchFieldNames);
@SuppressWarnings("rawtypes")
NamedList docHighlights = new SimpleOrderedMap();
// Highlight per-field
for (String fieldName : fieldNames) {
SchemaField schemaField = schema.getFieldOrNull(fieldName);
Object fieldHighlights; // object type allows flexibility for subclassers
if (schemaField == null) {
fieldHighlights = null;
} else if (schemaField.getType() instanceof org.apache.solr.schema.TrieField) {
// TODO: highlighting numeric fields is broken (Lucene) - so we disable them until fixed (see LUCENE-3080)!
fieldHighlights = null;
} else if (useFastVectorHighlighter(params, schemaField)) {
if (fvhFieldQuery == null) {
fvh = new FastVectorHighlighter(
// FVH cannot process hl.usePhraseHighlighter parameter per-field basis
params.getBool(HighlightParams.USE_PHRASE_HIGHLIGHTER, true),
// FVH cannot process hl.requireFieldMatch parameter per-field basis
params.getBool(HighlightParams.FIELD_MATCH, false));
fvh.setPhraseLimit(params.getInt(HighlightParams.PHRASE_LIMIT, SolrHighlighter.DEFAULT_PHRASE_LIMIT));
fvhFieldQuery = fvh.getFieldQuery(query, reader);
}
fieldHighlights =
doHighlightingByFastVectorHighlighter(doc, docId, schemaField, fvh, fvhFieldQuery, reader, req);
} else { // standard/default highlighter
fieldHighlights = doHighlightingByHighlighter(doc, docId, schemaField, query, reader, req);
}
if (fieldHighlights == null) {
// no summaries made; copy text from alternate field
fieldHighlights = alternateField(doc, fieldName, req);
}
if (fieldHighlights != null) {
docHighlights.add(fieldName, fieldHighlights);
}
} // for each field
fragments.add(schema.printableUniqueKey(doc), docHighlights);
} // for each doc
return fragments;
}
/** Returns the field names to be passed to {@link SolrIndexSearcher#doc(int, Set)}.
* Subclasses might over-ride to include fields in search-results and other stored field values needed so as to avoid
* the possibility of extra trips to disk. The uniqueKey will be added after if the result isn't null. */
protected Set<String> getDocPrefetchFieldNames(String[] hlFieldNames, SolrQueryRequest req) {
Set<String> preFetchFieldNames = new HashSet<>(hlFieldNames.length + 1);//+1 for uniqueyKey added after
Collections.addAll(preFetchFieldNames, hlFieldNames);
return preFetchFieldNames;
}
/**
* Determines if we should use the FastVectorHighlighter for this field.
*/
protected boolean useFastVectorHighlighter(SolrParams params, SchemaField schemaField) {
boolean useFvhParam = params.getFieldBool(schemaField.getName(), HighlightParams.USE_FVH, false);
if (!useFvhParam) return false;
boolean termPosOff = schemaField.storeTermPositions() && schemaField.storeTermOffsets();
if (!termPosOff) {
log.warn("Solr will not use FastVectorHighlighter because {} field does not store TermPositions and "
+ "TermOffsets.", schemaField.getName());
}
return termPosOff;
}
/** Highlights and returns the highlight object for this field -- a String[] by default. Null if none. */
@SuppressWarnings("unchecked")
protected Object doHighlightingByFastVectorHighlighter(Document doc, int docId,
SchemaField schemaField, FastVectorHighlighter highlighter,
FieldQuery fieldQuery,
IndexReader reader, SolrQueryRequest req) throws IOException {
SolrParams params = req.getParams();
String fieldName = schemaField.getName();
SolrFragmentsBuilder solrFb = getSolrFragmentsBuilder(fieldName, params);
String[] snippets = highlighter.getBestFragments( fieldQuery, reader, docId, fieldName,
params.getFieldInt( fieldName, HighlightParams.FRAGSIZE, 100 ),
params.getFieldInt( fieldName, HighlightParams.SNIPPETS, 1 ),
getFragListBuilder( fieldName, params ),
getFragmentsBuilder( fieldName, params ),
solrFb.getPreTags( params, fieldName ),
solrFb.getPostTags( params, fieldName ),
getEncoder( fieldName, params ) );
if (snippets != null && snippets.length > 0 )
return snippets;
return null;
}
/** Highlights and returns the highlight object for this field -- a String[] by default. Null if none. */
@SuppressWarnings("unchecked")
protected Object doHighlightingByHighlighter(Document doc, int docId, SchemaField schemaField, Query query,
IndexReader reader, SolrQueryRequest req) throws IOException {
final SolrParams params = req.getParams();
final String fieldName = schemaField.getName();
final int mvToExamine =
req.getParams().getFieldInt(fieldName, HighlightParams.MAX_MULTIVALUED_TO_EXAMINE,
(schemaField.multiValued()) ? Integer.MAX_VALUE : 1);
// Technically this is the max *fragments* (snippets), not max values:
int mvToMatch =
req.getParams().getFieldInt(fieldName, HighlightParams.MAX_MULTIVALUED_TO_MATCH, Integer.MAX_VALUE);
if (mvToExamine <= 0 || mvToMatch <= 0) {
return null;
}
int maxCharsToAnalyze = params.getFieldInt(fieldName,
HighlightParams.MAX_CHARS,
Highlighter.DEFAULT_MAX_CHARS_TO_ANALYZE);
if (maxCharsToAnalyze < 0) {//e.g. -1
maxCharsToAnalyze = Integer.MAX_VALUE;
}
List<String> fieldValues = getFieldValues(doc, fieldName, mvToExamine, maxCharsToAnalyze, req);
if (fieldValues.isEmpty()) {
return null;
}
// preserve order of values in a multiValued list
boolean preserveMulti = params.getFieldBool(fieldName, HighlightParams.PRESERVE_MULTI, false);
int numFragments = getMaxSnippets(fieldName, params);
boolean mergeContiguousFragments = isMergeContiguousFragments(fieldName, params);
List<TextFragment> frags = new ArrayList<>();
//Try term vectors, which is faster
// note: offsets are minimally sufficient for this HL.
final Fields tvFields = schemaField.storeTermOffsets() ? reader.getTermVectors(docId) : null;
final TokenStream tvStream =
TokenSources.getTermVectorTokenStreamOrNull(fieldName, tvFields, maxCharsToAnalyze - 1);
// We need to wrap in OffsetWindowTokenFilter if multi-valued
final OffsetWindowTokenFilter tvWindowStream;
if (tvStream != null && fieldValues.size() > 1) {
tvWindowStream = new OffsetWindowTokenFilter(tvStream);
} else {
tvWindowStream = null;
}
for (String thisText : fieldValues) {
if (mvToMatch <= 0 || maxCharsToAnalyze <= 0) {
break;
}
TokenStream tstream;
if (tvWindowStream != null) {
// if we have a multi-valued field with term vectors, then get the next offset window
tstream = tvWindowStream.advanceToNextWindowOfLength(thisText.length());
} else if (tvStream != null) {
tstream = tvStream; // single-valued with term vectors
} else {
// fall back to analyzer
tstream = createAnalyzerTStream(schemaField, thisText);
}
Highlighter highlighter;
if (req.getParams().getFieldBool(fieldName, HighlightParams.USE_PHRASE_HIGHLIGHTER, true)) {
// We're going to call getPhraseHighlighter and it might consume the tokenStream. If it does, the tokenStream
// needs to implement reset() efficiently.
//If the tokenStream is right from the term vectors, then CachingTokenFilter is unnecessary.
// It should be okay if OffsetLimit won't get applied in this case.
final TokenStream tempTokenStream;
if (tstream != tvStream) {
if (maxCharsToAnalyze >= thisText.length()) {
tempTokenStream = new CachingTokenFilter(tstream);
} else {
tempTokenStream = new CachingTokenFilter(new OffsetLimitTokenFilter(tstream, maxCharsToAnalyze));
}
} else {
tempTokenStream = tstream;
}
// get highlighter
highlighter = getPhraseHighlighter(query, fieldName, req, tempTokenStream);
// if the CachingTokenFilter was consumed then use it going forward.
if (tempTokenStream instanceof CachingTokenFilter && ((CachingTokenFilter) tempTokenStream).isCached()) {
tstream = tempTokenStream;
}
//tstream.reset(); not needed; getBestTextFragments will reset it.
} else {
// use "the old way"
highlighter = getHighlighter(query, fieldName, req);
}
highlighter.setMaxDocCharsToAnalyze(maxCharsToAnalyze);
maxCharsToAnalyze -= thisText.length();
// Highlight!
try {
TextFragment[] bestTextFragments =
highlighter.getBestTextFragments(tstream, thisText, mergeContiguousFragments, numFragments);
for (TextFragment bestTextFragment : bestTextFragments) {
if (bestTextFragment == null)//can happen via mergeContiguousFragments
continue;
// normally we want a score (must be highlighted), but if preserveMulti then we return a snippet regardless.
if (bestTextFragment.getScore() > 0 || preserveMulti) {
frags.add(bestTextFragment);
if (bestTextFragment.getScore() > 0)
--mvToMatch; // note: limits fragments (for multi-valued fields), not quite the number of values
}
}
} catch (InvalidTokenOffsetsException e) {
throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, e);
}
}//end field value loop
// Put the fragments onto the Solr response (docSummaries)
if (frags.size() > 0) {
// sort such that the fragments with the highest score come first
if (!preserveMulti) {
Collections.sort(frags, new Comparator<TextFragment>() {//TODO make TextFragment Comparable
@Override
public int compare(TextFragment arg0, TextFragment arg1) {
return Float.compare(arg1.getScore(), arg0.getScore());
}
});
}
// Truncate list to hl.snippets, but not when hl.preserveMulti
if (frags.size() > numFragments && !preserveMulti) {
frags = frags.subList(0, numFragments);
}
return getResponseForFragments(frags, req);
}
return null;//no highlights for this field
}
/** Fetches field values to highlight. If the field value should come from an atypical place (or another aliased
* field name, then a subclass could override to implement that.
*/
protected List<String> getFieldValues(Document doc, String fieldName, int maxValues, int maxCharsToAnalyze,
SolrQueryRequest req) {
// Collect the Fields we will examine (could be more than one if multi-valued)
List<String> result = new ArrayList<>();
for (IndexableField thisField : doc.getFields()) {
if (! thisField.name().equals(fieldName)) {
continue;
}
String value = thisField.stringValue();
result.add(value);
maxCharsToAnalyze -= value.length();//we exit early if we'll never get to analyze the value
maxValues--;
if (maxValues <= 0 || maxCharsToAnalyze <= 0) {
break;
}
}
return result;
}
/** Given the fragments, return the result to be put in the field {@link NamedList}. This is an extension
* point to allow adding other metadata like the absolute offsets or scores.
*/
protected Object getResponseForFragments(List<TextFragment> frags, SolrQueryRequest req) {
// TODO: we can include score and position information in output as snippet attributes
ArrayList<String> fragTexts = new ArrayList<>();
for (TextFragment fragment : frags) {
fragTexts.add(fragment.toString());
}
return fragTexts.toArray(new String[fragTexts.size()]);
}
/** Returns the alternate highlight object for this field -- a String[] by default. Null if none. */
@SuppressWarnings("unchecked")
protected Object alternateField(Document doc, String fieldName, SolrQueryRequest req) {
SolrParams params = req.getParams();
String alternateField = params.getFieldParam(fieldName, HighlightParams.ALTERNATE_FIELD);
if (alternateField == null || alternateField.length() == 0) {
return null;
}
IndexableField[] docFields = doc.getFields(alternateField);
if (docFields.length == 0) {
// The alternate field did not exist, treat the original field as fallback instead
docFields = doc.getFields(fieldName);
}
List<String> listFields = new ArrayList<>();
for (IndexableField field : docFields) {
if (field.binaryValue() == null)
listFields.add(field.stringValue());
}
if (listFields.isEmpty()) {
return null;
}
String[] altTexts = listFields.toArray(new String[listFields.size()]);
Encoder encoder = getEncoder(fieldName, params);
int alternateFieldLen = params.getFieldInt(fieldName, HighlightParams.ALTERNATE_FIELD_LENGTH, 0);
List<String> altList = new ArrayList<>();
int len = 0;
for( String altText: altTexts ){
if( alternateFieldLen <= 0 ){
altList.add(encoder.encodeText(altText));
} else{
//note: seemingly redundant new String(...) releases memory to the larger text. But is copying better?
altList.add( len + altText.length() > alternateFieldLen ?
encoder.encodeText(new String(altText.substring( 0, alternateFieldLen - len ))) :
encoder.encodeText(altText) );
len += altText.length();
if( len >= alternateFieldLen ) break;
}
}
return altList;
}
protected TokenStream createAnalyzerTStream(SchemaField schemaField, String docText) throws IOException {
final TokenStream tStream = schemaField.getType().getIndexAnalyzer().tokenStream(schemaField.getName(), docText);
return new TokenOrderingFilter(tStream, 10);
}
}
/** Orders Tokens in a window first by their startOffset ascending.
* endOffset is currently ignored.
* This is meant to work around fickleness in the highlighter only. It
* can mess up token positions and should not be used for indexing or querying.
*/
final class TokenOrderingFilter extends TokenFilter {
private final int windowSize;
private final LinkedList<OrderedToken> queue = new LinkedList<>(); //TODO replace with Deque, Array impl
private boolean done=false;
private final OffsetAttribute offsetAtt = addAttribute(OffsetAttribute.class);
protected TokenOrderingFilter(TokenStream input, int windowSize) {
super(input);
this.windowSize = windowSize;
}
@Override
public void reset() throws IOException {
super.reset();
queue.clear();
done = false;
}
@Override
public boolean incrementToken() throws IOException {
while (!done && queue.size() < windowSize) {
if (!input.incrementToken()) {
done = true;
break;
}
// reverse iterating for better efficiency since we know the
// list is already sorted, and most token start offsets will be too.
ListIterator<OrderedToken> iter = queue.listIterator(queue.size());
while(iter.hasPrevious()) {
if (offsetAtt.startOffset() >= iter.previous().startOffset) {
// insertion will be before what next() would return (what
// we just compared against), so move back one so the insertion
// will be after.
iter.next();
break;
}
}
OrderedToken ot = new OrderedToken();
ot.state = captureState();
ot.startOffset = offsetAtt.startOffset();
iter.add(ot);
}
if (queue.isEmpty()) {
return false;
} else {
restoreState(queue.removeFirst().state);
return true;
}
}
}
// for TokenOrderingFilter, so it can easily sort by startOffset
class OrderedToken {
State state;
int startOffset;
}
/** For use with term vectors of multi-valued fields. We want an offset based window into its TokenStream. */
final class OffsetWindowTokenFilter extends TokenFilter {
private final OffsetAttribute offsetAtt = addAttribute(OffsetAttribute.class);
private final PositionIncrementAttribute posIncAtt = addAttribute(PositionIncrementAttribute.class);
private int windowStartOffset;
private int windowEndOffset = -1;//exclusive
private boolean windowTokenIncremented = false;
private boolean inputWasReset = false;
private State capturedState;//only used for first token of each subsequent window
OffsetWindowTokenFilter(TokenStream input) {//input should not have been reset already
super(input);
}
//Called at the start of each value/window
OffsetWindowTokenFilter advanceToNextWindowOfLength(int length) {
windowStartOffset = windowEndOffset + 1;//unclear why there's a single offset gap between values, but tests show it
windowEndOffset = windowStartOffset + length;
windowTokenIncremented = false;//thereby permit reset()
return this;
}
@Override
public void reset() throws IOException {
//we do some state checking to ensure this is being used correctly
if (windowTokenIncremented) {
throw new IllegalStateException("This TokenStream does not support being subsequently reset()");
}
if (!inputWasReset) {
super.reset();
inputWasReset = true;
}
}
@Override
public boolean incrementToken() throws IOException {
assert inputWasReset;
windowTokenIncremented = true;
while (true) {
//increment Token
if (capturedState == null) {
if (!input.incrementToken()) {
return false;
}
} else {
restoreState(capturedState);
capturedState = null;
//Set posInc to 1 on first token of subsequent windows. To be thorough, we could subtract posIncGap?
posIncAtt.setPositionIncrement(1);
}
final int startOffset = offsetAtt.startOffset();
final int endOffset = offsetAtt.endOffset();
if (startOffset >= windowEndOffset) {//end of window
capturedState = captureState();
return false;
}
if (startOffset >= windowStartOffset) {//in this window
offsetAtt.setOffset(startOffset - windowStartOffset, endOffset - windowStartOffset);
return true;
}
//otherwise this token is before the window; continue to advance
}
}
}
/** Wraps a DirectoryReader that caches the {@link LeafReader#getTermVectors(int)} so that
* if the next call has the same ID, then it is reused.
*/
class TermVectorReusingLeafReader extends FilterLeafReader {
private int lastDocId = -1;
private Fields tvFields;
public TermVectorReusingLeafReader(LeafReader in) {
super(in);
}
@Override
public Fields getTermVectors(int docID) throws IOException {
if (docID != lastDocId) {
lastDocId = docID;
tvFields = in.getTermVectors(docID);
}
return tvFields;
}
}
| |
package com.vaadin.tests.components.upload;
import java.io.OutputStream;
import com.vaadin.server.VaadinRequest;
import com.vaadin.tests.components.AbstractTestUI;
import com.vaadin.ui.Button;
import com.vaadin.ui.FormLayout;
import com.vaadin.ui.HorizontalLayout;
import com.vaadin.ui.Label;
import com.vaadin.ui.ProgressBar;
import com.vaadin.ui.UI;
import com.vaadin.ui.Upload;
import com.vaadin.ui.Upload.FailedEvent;
import com.vaadin.ui.Upload.FinishedEvent;
import com.vaadin.ui.Upload.Receiver;
import com.vaadin.ui.Upload.StartedEvent;
import com.vaadin.ui.Upload.SucceededEvent;
import com.vaadin.ui.Window;
public class InterruptUpload extends AbstractTestUI {
private Upload sample;
private UploadInfoWindow uploadInfoWindow;
@Override
protected void setup(VaadinRequest request) {
LineBreakCounter lineBreakCounter = new LineBreakCounter();
lineBreakCounter.setSlow(true);
sample = new Upload(null, lineBreakCounter);
sample.setImmediateMode(true);
sample.setButtonCaption("Upload File");
uploadInfoWindow = new UploadInfoWindow(sample, lineBreakCounter);
sample.addStartedListener(event -> {
if (uploadInfoWindow.getParent() == null) {
UI.getCurrent().addWindow(uploadInfoWindow);
}
uploadInfoWindow.setClosable(false);
});
sample.addFinishedListener(event -> uploadInfoWindow.setClosable(true));
addComponent(sample);
}
private static class UploadInfoWindow extends Window
implements Upload.StartedListener, Upload.ProgressListener,
Upload.FailedListener, Upload.SucceededListener,
Upload.FinishedListener {
private final Label state = new Label();
private final Label result = new Label();
private final Label fileName = new Label();
private final Label textualProgress = new Label();
private final ProgressBar progressBar = new ProgressBar();
private final Button cancelButton;
private final LineBreakCounter counter;
private UploadInfoWindow(final Upload upload,
final LineBreakCounter lineBreakCounter) {
super("Status");
counter = lineBreakCounter;
addStyleName("upload-info");
setResizable(false);
setDraggable(false);
final FormLayout uploadInfoLayout = new FormLayout();
setContent(uploadInfoLayout);
uploadInfoLayout.setMargin(true);
final HorizontalLayout stateLayout = new HorizontalLayout();
stateLayout.setSpacing(true);
stateLayout.addComponent(state);
cancelButton = new Button("Cancel");
cancelButton.addClickListener(event -> upload.interruptUpload());
cancelButton.setVisible(false);
cancelButton.setStyleName("small");
stateLayout.addComponent(cancelButton);
stateLayout.setCaption("Current state");
state.setValue("Idle");
uploadInfoLayout.addComponent(stateLayout);
fileName.setCaption("File name");
uploadInfoLayout.addComponent(fileName);
result.setCaption("Line breaks counted");
uploadInfoLayout.addComponent(result);
progressBar.setCaption("Progress");
progressBar.setVisible(false);
uploadInfoLayout.addComponent(progressBar);
textualProgress.setVisible(false);
uploadInfoLayout.addComponent(textualProgress);
upload.addStartedListener(this);
upload.addProgressListener(this);
upload.addFailedListener(this);
upload.addSucceededListener(this);
upload.addFinishedListener(this);
}
@Override
public void uploadFinished(final FinishedEvent event) {
state.setValue("Idle");
progressBar.setVisible(false);
textualProgress.setVisible(false);
cancelButton.setVisible(false);
UI.getCurrent().setPollInterval(-1);
}
@Override
public void uploadStarted(final StartedEvent event) {
// this method gets called immediately after upload is started
progressBar.setValue(0f);
progressBar.setVisible(true);
UI.getCurrent().setPollInterval(500);
textualProgress.setVisible(true);
// updates to client
state.setValue("Uploading");
fileName.setValue(event.getFilename());
cancelButton.setVisible(true);
}
@Override
public void updateProgress(final long readBytes,
final long contentLength) {
// this method gets called several times during the update
progressBar.setValue(readBytes / (float) contentLength);
textualProgress.setValue(
"Processed " + readBytes + " bytes of " + contentLength);
result.setValue(counter.getLineBreakCount() + " (counting...)");
}
@Override
public void uploadSucceeded(final SucceededEvent event) {
result.setValue(counter.getLineBreakCount() + " (total)");
}
@Override
public void uploadFailed(final FailedEvent event) {
result.setValue(
counter.getLineBreakCount() + " (counting interrupted at "
+ Math.round(100 * progressBar.getValue()) + "%)");
}
}
private static class LineBreakCounter implements Receiver {
private int counter;
private int total;
private boolean sleep;
/**
* return an OutputStream that simply counts lineends
*/
@Override
public OutputStream receiveUpload(final String filename,
final String MIMEType) {
counter = 0;
total = 0;
return new OutputStream() {
private static final int searchedByte = '\n';
@Override
public void write(final int b) {
total++;
if (b == searchedByte) {
counter++;
}
if (sleep && total % 1000 == 0) {
try {
Thread.sleep(100);
} catch (final InterruptedException e) {
e.printStackTrace();
}
}
}
};
}
private int getLineBreakCount() {
return counter;
}
private void setSlow(boolean value) {
sleep = value;
}
}
@Override
protected Integer getTicketNumber() {
return 9635;
}
@Override
public String getDescription() {
return "Interrupting an upload shouldn't prevent uploading that same file immediately afterwards.";
}
}
| |
package org.apache.solr.client.solrj.response;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import org.apache.solr.common.util.NamedList;
import java.util.ArrayList;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
/**
* Encapsulates responses from SpellCheckComponent
*
*
* @since solr 1.3
*/
public class SpellCheckResponse {
private boolean correctlySpelled;
private List<Collation> collations;
private List<Suggestion> suggestions = new ArrayList<>();
Map<String, Suggestion> suggestionMap = new LinkedHashMap<>();
public SpellCheckResponse(NamedList<NamedList<Object>> spellInfo) {
NamedList<Object> sugg = spellInfo.get("suggestions");
if (sugg == null) {
correctlySpelled = true;
return;
}
for (int i = 0; i < sugg.size(); i++) {
String n = sugg.getName(i);
if ("correctlySpelled".equals(n)) {
correctlySpelled = (Boolean) sugg.getVal(i);
} else if ("collationInternalRank".equals(n)){
//continue;
} else if ("collation".equals(n)) {
List<Object> collationInfo = sugg.getAll(n);
collations = new ArrayList<>(collationInfo.size());
for (Object o : collationInfo) {
if (o instanceof String) {
collations.add(new Collation()
.setCollationQueryString((String) o));
} else if (o instanceof NamedList) {
@SuppressWarnings("unchecked")
NamedList<Object> expandedCollation = (NamedList<Object>) o;
String collationQuery
= (String) expandedCollation.get("collationQuery");
int hits = (Integer) expandedCollation.get("hits");
@SuppressWarnings("unchecked")
NamedList<String> misspellingsAndCorrections
= (NamedList<String>) expandedCollation.get("misspellingsAndCorrections");
Collation collation = new Collation();
collation.setCollationQueryString(collationQuery);
collation.setNumberOfHits(hits);
for (int ii = 0; ii < misspellingsAndCorrections.size(); ii++) {
String misspelling = misspellingsAndCorrections.getName(ii);
String correction = misspellingsAndCorrections.getVal(ii);
collation.addMisspellingsAndCorrection(new Correction(
misspelling, correction));
}
collations.add(collation);
} else {
throw new AssertionError(
"Should get Lists of Strings or List of NamedLists here.");
}
}
} else {
@SuppressWarnings("unchecked")
Suggestion s = new Suggestion(n, (NamedList<Object>) sugg.getVal(i));
suggestionMap.put(n, s);
suggestions.add(s);
}
}
}
public boolean isCorrectlySpelled() {
return correctlySpelled;
}
public List<Suggestion> getSuggestions() {
return suggestions;
}
public Map<String, Suggestion> getSuggestionMap() {
return suggestionMap;
}
public Suggestion getSuggestion(String token) {
return suggestionMap.get(token);
}
public String getFirstSuggestion(String token) {
Suggestion s = suggestionMap.get(token);
if (s==null || s.getAlternatives().isEmpty()) return null;
return s.getAlternatives().get(0);
}
/**
* <p>
* Return the first collated query string. For convenience and backwards-compatibility. Use getCollatedResults() for full data.
* </p>
* @return first collated query string
*/
public String getCollatedResult() {
return collations==null || collations.size()==0 ? null : collations.get(0).collationQueryString;
}
/**
* <p>
* Return all collations.
* Will include # of hits and misspelling-to-correction details if "spellcheck.collateExtendedResults was true.
* </p>
* @return all collations
*/
public List<Collation> getCollatedResults() {
return collations;
}
public static class Suggestion {
private String token;
private int numFound;
private int startOffset;
private int endOffset;
private int originalFrequency;
private List<String> alternatives = new ArrayList<>();
private List<Integer> alternativeFrequencies;
public Suggestion(String token, NamedList<Object> suggestion) {
this.token = token;
for (int i = 0; i < suggestion.size(); i++) {
String n = suggestion.getName(i);
if ("numFound".equals(n)) {
numFound = (Integer) suggestion.getVal(i);
} else if ("startOffset".equals(n)) {
startOffset = (Integer) suggestion.getVal(i);
} else if ("endOffset".equals(n)) {
endOffset = (Integer) suggestion.getVal(i);
} else if ("origFreq".equals(n)) {
originalFrequency = (Integer) suggestion.getVal(i);
} else if ("suggestion".equals(n)) {
@SuppressWarnings("unchecked")
List list = (List)suggestion.getVal(i);
if (list.size() > 0 && list.get(0) instanceof NamedList) {
// extended results detected
@SuppressWarnings("unchecked")
List<NamedList> extended = (List<NamedList>)list;
alternativeFrequencies = new ArrayList<>();
for (NamedList nl : extended) {
alternatives.add((String)nl.get("word"));
alternativeFrequencies.add((Integer)nl.get("freq"));
}
} else {
@SuppressWarnings("unchecked")
List<String> alts = (List<String>) list;
alternatives.addAll(alts);
}
}
}
}
public String getToken() {
return token;
}
public int getNumFound() {
return numFound;
}
public int getStartOffset() {
return startOffset;
}
public int getEndOffset() {
return endOffset;
}
public int getOriginalFrequency() {
return originalFrequency;
}
/** The list of alternatives */
public List<String> getAlternatives() {
return alternatives;
}
/** The frequencies of the alternatives in the corpus, or null if this information was not returned */
public List<Integer> getAlternativeFrequencies() {
return alternativeFrequencies;
}
@Deprecated
/** @see #getAlternatives */
public List<String> getSuggestions() {
return alternatives;
}
@Deprecated
/** @see #getAlternativeFrequencies */
public List<Integer> getSuggestionFrequencies() {
return alternativeFrequencies;
}
}
public class Collation {
private String collationQueryString;
private List<Correction> misspellingsAndCorrections = new ArrayList<>();
private long numberOfHits;
public long getNumberOfHits() {
return numberOfHits;
}
public void setNumberOfHits(long numberOfHits) {
this.numberOfHits = numberOfHits;
}
public String getCollationQueryString() {
return collationQueryString;
}
public Collation setCollationQueryString(String collationQueryString) {
this.collationQueryString = collationQueryString;
return this;
}
public List<Correction> getMisspellingsAndCorrections() {
return misspellingsAndCorrections;
}
public Collation addMisspellingsAndCorrection(Correction correction) {
this.misspellingsAndCorrections.add(correction);
return this;
}
}
public class Correction {
private String original;
private String correction;
public Correction(String original, String correction) {
this.original = original;
this.correction = correction;
}
public String getOriginal() {
return original;
}
public void setOriginal(String original) {
this.original = original;
}
public String getCorrection() {
return correction;
}
public void setCorrection(String correction) {
this.correction = correction;
}
}
}
| |
/*
* Copyright 2005 The Apache Software Foundation or its licensors, as applicable.
*
* Licensed under the Apache License, Version 2.0 (the License);
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an AS IS BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ar.org.fitc.test.rmi.integration.fase2.test;
import java.net.MalformedURLException;
import java.rmi.AccessException;
import java.rmi.AlreadyBoundException;
import java.rmi.NotBoundException;
import java.rmi.RemoteException;
import java.rmi.ServerException;
import java.rmi.registry.LocateRegistry;
import java.rmi.registry.Registry;
import ar.org.fitc.test.rmi.integration.fase2.AutoBindITCRemoteUnicast;
import ar.org.fitc.test.rmi.integration.fase2.ITCRemoteUnicast;
import ar.org.fitc.test.rmi.integration.fase2.Net;
import ar.org.fitc.test.rmi.integration.fase2.executor.ReportIPServer;
import ar.org.fitc.test.rmi.integration.fase2.interfaces.ITCRemote;
/**
* Testing class for a remote <code>Registry</code>.
*
* @author Jorge Rafael
* @author Marcelo Arcidiacono
*
* @version 1.0
*/
public class RemoteRegistryTestCase extends ITCTestCase {
/**
* A simple <code>Registry</code>.
*/
public Registry reg;
/**
* An <code>ITCRemote</code> for exporting.
*/
public ITCRemote exportObj;
/**
* Default constructor.
*
*/
public RemoteRegistryTestCase() {
super();
}
/**
* Constructs a <code>RemoteRegistryTestCase</code> with a name.
*
* @param arg0 a name.
*/
public RemoteRegistryTestCase(String arg0) {
super(arg0);
}
/**
* Assigns a <code>Registry</code> for each reported server and
* exports an object on them.
*
* @throws Exception if a failure occurs during exportation
*/
protected void setUp() throws Exception {
String[] hosts = ReportIPServer.getit();
for (String host: hosts) {
if (!Net.isOwnHost(host)) {
reg = LocateRegistry.getRegistry(host, Net.getRegistryPort());
break;
}
}
exportObj = new ITCRemoteUnicast();
super.setUp();
}
/**
* Tries to bind a remote object in a <code>Registry</code> of a
* non-local host.
*
* @throws RemoteException if remote communication with the
* registry failed
* @throws AlreadyBoundException if the name is already bound
*/
public void testBind001() throws RemoteException, AlreadyBoundException {
try {
reg.bind("echo", exportObj);
fail("Stub of registry can't chanch registry");
} catch (ServerException e) {
}
}
/**
* This test verifies that all elements in a non-local <code>
* Registry</code> are functional.
*
* @throws AccessException if this registry is local and it
* denies the caller access to perform this operation
* @throws RemoteException if remote communication with the
* registry failed
* @throws NotBoundException if the name is not currently bound
*/
public void testLookup001() throws AccessException, RemoteException,
NotBoundException {
try {
for (String bindName : reg.list() ) {
ITCRemote o =(ITCRemote) reg.lookup(bindName);
o.getString();
}
} catch (Exception e) {
e.printStackTrace();
System.exit(1);
}
}
/**
* This test tries lookup a non-bounded remote object.
*
* @throws RemoteException if remote communication with the
* registry failed
* @throws MalformedURLException if the name is not an
* appropriately formatted URL
* @throws NotBoundException if the name is not currently bound
*/
public void testLookup002() throws RemoteException, MalformedURLException,
NotBoundException {
try {
reg.lookup("echo123413");
fail("Mal formed URL");
} catch (NotBoundException e) {
} catch (Throwable e) {
fail("Failed with:" + e);
}
}
/**
* This test tries lookup a remote object with a non-existing name.
*
* @throws RemoteException if remote communication with the
* registry failed
* @throws MalformedURLException if the name is not an
* appropriately formatted URL
* @throws NotBoundException if the name is not currently bound
*/
public void testLookup003() {
try {
reg.lookup("#$%$%echo");
fail("Mal formed URL");
} catch (NotBoundException e) {
} catch (Throwable e) {
fail("Failed with:" + e);
}
}
/**
* This test tries to re-bind a remote object with a same name
* in a Registry.
*
* @throws RemoteException if remote communication with the
* registry failed
* @throws AlreadyBoundException if the name is already bound
*/
public void testRebind001() throws RemoteException, MalformedURLException,
NotBoundException {
try {
reg.rebind("echo", exportObj);
fail("Stub of registry can't chanch registry");
} catch (ServerException e) {
}
}
/**
* This test tries to un-bind remote objects from a non-local
* <code>Registry</code>. The object must to be registred.
*
* @throws AccessException if this registry is local and it
* denies the caller access to perform this operation
* @throws RemoteException if remote communication with the
* registry failed
* @throws NotBoundException if the name is not currently bound
*/
@SuppressWarnings("unchecked")
public void testUnbind() throws AccessException, RemoteException,
NotBoundException {
for (String bindName : reg.list() ) {
try {
reg.unbind(bindName);
fail("Stub of registry can't chanch registry");
} catch (ServerException e) {
ITCRemote o =(ITCRemote) reg.lookup(bindName);
o.getString();
}
}
}
/**
* This case makes a test on a non-exported object. If this
* object is deserialized, it is exported and bounded. This object
* is sent as parameter to a non-local <code>Registry</code>. The
* binding fails. However the object will be bounded because
* will be deserialized.
*
* @throws RemoteException if remote communication with the
* registry failed
*/
public void testAutoBindAndExportionUsingRemoteRegistry() throws RemoteException {
exportObj = new AutoBindITCRemoteUnicast();
exportObj.clean(true);
try {
reg.rebind(AutoBindITCRemoteUnicast.BIND_NAME, exportObj);
fail("can't bind remotelly");
} catch (ServerException e) {
}
try {
reg.lookup(AutoBindITCRemoteUnicast.BIND_NAME);
} catch (NotBoundException e) {
fail("Object not binded");
}
}
}
| |
// ----------------------------------------------------------------------------
// Copyright 2007-2017, GeoTelematic Solutions, Inc.
// All rights reserved
// ----------------------------------------------------------------------------
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
// ----------------------------------------------------------------------------
// Description:
// Merge new Localization strings from 'LocalStrings_en.properties' into existing
// 'LocalStrings_XX.properties' files.
// Notes:
// - This modules and merges all 'LocalStrings_XX.properties' in memory before saving
// any of the merged files. If any errors are encountered, the merge and save are
// aborted. This also means that if the number of LocalStrings files is large, this
// module may run out of memory, requiring that the java option '-Xmx<memory>' be
// used to boost the amount of memory available to the Java process (ie. '-Xmx256m')
// Example Usage:
// To find (but not save) all LocalString_XX.properties files which need to be merged:
// > java -classpath <classpath> org.opengts.tools.MergeLocalStrings -scan=src
// To merge and save all LocalString_XX.properties files:
// > java -classpath <classpath> org.opengts.tools.MergeLocalStrings -merge=src
// ----------------------------------------------------------------------------
// Change History:
// 2008/02/17 Martin D. Flynn
// -Initial release
// ----------------------------------------------------------------------------
package org.opengts.tools;
import java.lang.*;
import java.util.*;
import java.io.*;
import org.opengts.util.*;
public class MergeLocalStrings
{
// ------------------------------------------------------------------------
private static final String LOCALSTRINGS_EN_PROPERTIES = "LocalStrings_en.properties";
// ------------------------------------------------------------------------
// ------------------------------------------------------------------------
/* this class holds a single 'LocalStrings_XX.properties' instance for merging */
protected class MergeItem
{
private File xxFile = null;
private String xxFileString_old = null;
private String xxFileString_new = null;
public MergeItem(File xx, String enFileName, String mergeProps[]) throws IOException {
this.xxFile = xx;
/* read old file */
byte xxFileData_old[] = FileTools.readFile(this.xxFile);
this.xxFileString_old = (xxFileData_old != null)? StringTools.toStringValue(xxFileData_old) : null;
/* load "LocalStrings_XX.properties" */
Properties xxProps = new Properties();
if (this.xxFile.exists()) {
FileInputStream fis = null;
try {
fis = new FileInputStream(xxFile);
xxProps.load(fis);
} catch (IOException ioe) {
Print.logError("Locale read error: " + ioe);
throw ioe;
} finally {
try { fis.close(); } catch (Throwable th) {}
}
} else {
// new file
}
/* copy array, replace filenames */
// TODO: could stand some optimization
String xxFileStrArry[] = new String[mergeProps.length];
for (int i = 0; i < xxFileStrArry.length; i++) {
xxFileStrArry[i] = mergeProps[i];
int p = xxFileStrArry[i].indexOf(enFileName);
if (p >= 0) {
String e = xxFileStrArry[i];
xxFileStrArry[i] = e.substring(0,p) + this.xxFile.getName() + e.substring(p + enFileName.length());
}
}
/* merge */
// TODO: could stand some optimization
for (Enumeration<?> e = xxProps.propertyNames(); e.hasMoreElements();) {
String propKey = e.nextElement().toString();
String propVal = StringTools.replace(xxProps.getProperty(propKey), "\n", "\\n");
if (propVal != null) {
String enk = "#" + propKey + "=";
for (int i = 0; i < xxFileStrArry.length; i++) {
if (xxFileStrArry[i].startsWith(enk)) {
xxFileStrArry[i] = propKey + "=" + propVal;
}
}
}
}
/* new file contents */
this.xxFileString_new = StringTools.join(xxFileStrArry,'\n') + "\n";
}
public File getFile() {
return this.xxFile;
}
public boolean hasChanged() {
if (this.xxFileString_new == null) {
return false;
} else
if ((this.xxFileString_old != null) && this.xxFileString_old.equals(this.xxFileString_new)) {
return false;
} else {
return true;
}
}
public boolean save() throws IOException {
/* do we have data to save? */
if (this.xxFileString_new == null) {
throw new IOException("No new 'LocalStrings_XX.properties' data to save");
}
/* has file changed? */
if (!this.hasChanged()) {
//Print.logInfo("New file contents is same as old file contents: " + this.xxFile);
return false;
}
/* rename old */
File xxFile_old = new File(this.xxFile.toString() + ".old");
if (xxFile_old.exists()) {
throw new IOException("Old 'LocalStrings_XX.properties' already exists: " + xxFile_old);
}
if (!this.xxFile.renameTo(xxFile_old)) {
throw new IOException("Unable to rename 'LocalStrings_XX.properties' file: " + this.xxFile);
}
if (!xxFile_old.exists()) {
throw new IOException("Renamed, but 'LocalStrings_XX.properties.old' does not exist: " + xxFile_old);
}
if (this.xxFile.exists()) {
throw new IOException("Renamed, but 'LocalStrings_XX.properties' still exist: " + this.xxFile);
}
/* save new */
try {
byte xxFileData_new[] = this.xxFileString_new.getBytes();
FileTools.writeFile(xxFileData_new, this.xxFile);
} catch (IOException ioe) {
throw ioe;
}
/* success */
Print.logInfo("Saved: " + this.xxFile);
return true;
}
public String toString() {
return (this.xxFileString_new != null)? this.xxFileString_new : "";
}
}
// ------------------------------------------------------------------------
// ------------------------------------------------------------------------
private File locale_en_props = null;
private String enFileStrArry[] = null;
private MergeItem mergeItems[] = null;
public MergeLocalStrings(File enFile, File xxFile[])
throws IOException
{
/* "LocalStrings_en.properties" exists? */
this.locale_en_props = enFile;
if ((this.locale_en_props == null) || !this.locale_en_props.isFile()) {
// EN locale file not specified, or does not exist
throw new FileNotFoundException("LocalStrings_en.properties not found: " + this.locale_en_props);
}
/* "LocalStrings_en.properties" filename */
String enFilePath = this.locale_en_props.getParent();
String enFileName = this.locale_en_props.getName();
if ((enFilePath == null) || !enFileName.equals(LOCALSTRINGS_EN_PROPERTIES)) {
// EN locale filename not "LocalStrings_en.properties"
throw new IOException("Invalid 'LocalStrings_en.properties' filename: " + this.locale_en_props);
}
/* Invalid "LocalStrings_XX.properties" file specified? */
if ((xxFile == null) || (xxFile.length == 0)) {
// XX locale files not specified
throw new FileNotFoundException("'LocalStrings_XX.properties' files not specified");
}
/* read "LocalStrings_en.properties" */
byte enFileData[] = FileTools.readFile(this.locale_en_props);
if (enFileData == null) {
// unable to read EN locale file
throw new IOException("Invalid 'LocalStrings_en.properties' file: " + this.locale_en_props);
}
this.enFileStrArry = StringTools.split(StringTools.toStringValue(enFileData),'\n');
/* Invalid "LocalStrings_XX.properties" file specified? */
this.mergeItems = new MergeItem[xxFile.length];
for (int i = 0; i < xxFile.length; i++) {
/* Invalid "LocalStrings_XX.properties" file specified? */
if (xxFile[i] == null) {
// XX locale file not specified
throw new FileNotFoundException("Invalid 'LocalStrings_XX.properties' file: " + xxFile[i]);
}
/* "LocalStrings_XX.properties" filename */
String xxFilePath = xxFile[i].getParent();
String xxFileName = xxFile[i].getName();
if ((xxFilePath == null) || !xxFileName.startsWith("LocalStrings_") || !xxFileName.endsWith(".properties")) {
// invalid file name
throw new IOException("Invalid 'LocalStrings_XX.properties' filename: " + xxFile[i]);
}
/* old file exists? */
File xxFile_old = new File(xxFile[i].toString() + ".old");
if (xxFile_old.exists()) {
throw new IOException("Old 'LocalStrings_XX.properties' already exists: " + xxFile_old);
}
/* same path? */
if (!enFilePath.equals(xxFilePath)) {
// paths do not match
throw new IOException("Invalid 'LocalStrings_XX.properties' filename: " + xxFile[i]);
}
/* merge */
this.mergeItems[i] = new MergeItem(xxFile[i], enFileName, this.enFileStrArry);
}
}
// ------------------------------------------------------------------------
/* return true if something has changed */
public boolean hasChanged()
{
if (this.mergeItems != null) {
for (int i = 0; i < this.mergeItems.length; i++) {
if (this.mergeItems[i].hasChanged()) {
return true;
}
}
}
return false;
}
// ------------------------------------------------------------------------
/* save */
public boolean save()
throws IOException
{
/* nothing to save */
if (this.mergeItems == null) {
throw new IOException("Nothing to save");
}
/* nothing has changed? */
if (!this.hasChanged()) {
return false;
}
/* loop and save */
for (int i = 0; i < this.mergeItems.length; i++) {
this.mergeItems[i].save();
}
/* saved */
return true;
}
// ------------------------------------------------------------------------
/* string representation */
public String toString()
{
StringBuffer sb = new StringBuffer();
sb.append("English: ");
sb.append(this.locale_en_props);
if (this.mergeItems != null) {
for (int i = 0; i < this.mergeItems.length; i++) {
sb.append("\n");
MergeItem mi = this.mergeItems[i];
sb.append(" --> ");
sb.append(mi.getFile());
if (mi.hasChanged()) {
sb.append(" [changed]");
}
}
}
return sb.toString();
}
// ------------------------------------------------------------------------
// ------------------------------------------------------------------------
/* filter which accepts directories and "LocalStrings_*.properties" files */
private static FileFilter localStringsFilter = null;
protected static FileFilter getLocalStringsFilter()
{
if (localStringsFilter == null) {
localStringsFilter = new FileFilter() {
public boolean accept(File file) {
if (file.isDirectory()) {
return true;
} else
if (file.isFile()) {
String name = file.getName();
return (name.startsWith("LocalStrings_") && name.endsWith(".properties"));
} else {
return false;
}
}
};
}
return localStringsFilter;
}
/* recursively decend directory to find all "LocalStrings_*.properties" files */
protected static boolean _findMergeItems(File subDir, java.util.List<MergeLocalStrings> mergeList)
throws IOException
{
boolean ok = true;
/* is a directory? */
if (!subDir.isDirectory()) {
return ok;
}
/* get files in this directory */
FileFilter filter = getLocalStringsFilter();
File filesInSubdir[] = subDir.listFiles(filter);
if ((filesInSubdir == null) || (filesInSubdir.length <= 0)) {
return ok;
}
/* parse files/dirs */
boolean foundProps = false;
File enFile = null;
java.util.List<File> files = new Vector<File>();
java.util.List<File> dirs = new Vector<File>();
for (int i = 0; i < filesInSubdir.length; i++) {
File f = filesInSubdir[i];
if (f.isFile()) {
foundProps = true;
if (f.getName().equals(LOCALSTRINGS_EN_PROPERTIES)) {
enFile = f;
} else {
files.add(f);
}
} else
if (f.isDirectory()) {
dirs.add(f);
}
}
/* display discovered LocalStrings files */
if (foundProps) {
if (enFile != null) {
File xxFile[] = files.toArray(new File[files.size()]);
mergeList.add(new MergeLocalStrings(enFile,xxFile));
} else {
ok = false;
Print.sysPrintln("*** MISSING: " + LOCALSTRINGS_EN_PROPERTIES);
for (Iterator<File> f = files.iterator(); f.hasNext();) {
File xxFile = f.next();
Print.sysPrintln(" ==> " + xxFile + " [skipped]");
}
}
}
/* recurse/decend into sub-directories */
for (Iterator<File> d = dirs.iterator(); d.hasNext();) {
if (!_findMergeItems(d.next(),mergeList)) {
ok = false;
}
}
/* return ok? */
return ok;
}
/* return a list of all discovered merged "LocalStrings_*.properties" files */
protected static java.util.List<MergeLocalStrings> findMergeItems(File subDir)
throws IOException
{
java.util.List<MergeLocalStrings> mergeItems = new Vector<MergeLocalStrings>();
if (_findMergeItems(subDir,mergeItems)) {
return mergeItems;
} else {
return null;
}
}
// ------------------------------------------------------------------------
// ------------------------------------------------------------------------
// ------------------------------------------------------------------------
// Main entry point
private static final String ARG_SCAN[] = new String[] { "scan" };
private static final String ARG_MERGE[] = new String[] { "merge" };
public static void main(String argv[])
{
RTConfig.setCommandLineArgs(argv);
/* scan for "LocalStrings_en.properties" */
if (RTConfig.hasProperty(ARG_SCAN)) {
try {
File scanDir = RTConfig.getFile(ARG_SCAN,new File("."));
java.util.List<MergeLocalStrings> list = MergeLocalStrings.findMergeItems(scanDir);
if (list != null) {
Print.sysPrintln("");
for (Iterator<MergeLocalStrings> i = list.iterator(); i.hasNext();) {
MergeLocalStrings mls = i.next();
Print.sysPrintln(mls.toString());
Print.sysPrintln("");
}
}
} catch (IOException ioe) {
Print.sysPrintln("");
Print.sysPrintln("Scan error: " + ioe.getMessage());
}
System.exit(0);
}
/* merge/save "LocalStrings_en.properties" */
if (RTConfig.hasProperty(ARG_MERGE)) {
try {
File scanDir = RTConfig.getFile(ARG_MERGE,new File("."));
java.util.List<MergeLocalStrings> list = MergeLocalStrings.findMergeItems(scanDir);
if (list != null) {
for (Iterator<MergeLocalStrings> i = list.iterator(); i.hasNext();) {
MergeLocalStrings mls = i.next();
Print.sysPrintln(mls.toString());
mls.save();
Print.sysPrintln("");
}
}
} catch (IOException ioe) {
Print.sysPrintln("");
Print.sysPrintln("Merge error: " + ioe.getMessage());
}
System.exit(0);
}
/* usage */
Print.sysPrintln("Missing options:");
Print.sysPrintln(" -scan=<sourceDir> Display list of LocalStrings_XX.properties files");
Print.sysPrintln(" -merge=<sourceDir> Merged/save LocalStrings_XX.properties files");
Print.sysPrintln("Notes:");
Print.sysPrintln(" 1) Scan/Merge process will terminate if any errors are encountered");
}
}
| |
package com.fireflysource.net.http.common.v1.encoder;
import com.fireflysource.common.collection.trie.ArrayTrie;
import com.fireflysource.common.collection.trie.Trie;
import com.fireflysource.common.io.BufferUtils;
import com.fireflysource.common.slf4j.LazyLogger;
import com.fireflysource.common.string.StringUtils;
import com.fireflysource.common.sys.ProjectVersion;
import com.fireflysource.common.sys.SystemLogger;
import com.fireflysource.net.http.common.codec.PreEncodedHttpField;
import com.fireflysource.net.http.common.exception.BadMessageException;
import com.fireflysource.net.http.common.model.*;
import java.nio.BufferOverflowException;
import java.nio.ByteBuffer;
import java.util.Arrays;
import java.util.function.Supplier;
import static com.fireflysource.net.http.common.model.HttpStatus.INTERNAL_SERVER_ERROR_500;
import static com.fireflysource.net.http.common.model.HttpTokens.EndOfContent;
/**
* HttpGenerator. Builds HTTP Messages.
* <p>
* If the system property "com.fireflysource.net.http.common.v1.encoder.HttpGenerator.STRICT" is set to true,
* then the generator will strictly pass on the exact strings received from methods and header
* fields. Otherwise a fast case insensitive string lookup is used that may alter the
* case and white space of some methods/headers
*/
public class HttpGenerator {
private final static LazyLogger LOG = SystemLogger.create(HttpGenerator.class);
public static final MetaData.Response CONTINUE_100_INFO = new MetaData.Response(HttpVersion.HTTP_1_1, 100, null, null, -1);
public static final MetaData.Response PROGRESS_102_INFO = new MetaData.Response(HttpVersion.HTTP_1_1, 102, null, null, -1);
public final static MetaData.Response RESPONSE_500_INFO =
new MetaData.Response(HttpVersion.HTTP_1_1, INTERNAL_SERVER_ERROR_500, null, new HttpFields() {{
put(HttpHeader.CONNECTION, HttpHeaderValue.CLOSE);
}}, 0);
// other statics
public static final int CHUNK_SIZE = 12;
private final static byte[] COLON_SPACE = new byte[]{':', ' '};
private final static int SEND_SERVER = 0x01;
private final static int SEND_XPOWERED_BY = 0x02;
private final static Trie<Boolean> ASSUMED_CONTENT_METHODS = new ArrayTrie<>(8);
// common _content
private static final byte[] ZERO_CHUNK = {(byte) '0', (byte) '\015', (byte) '\012'};
private static final byte[] LAST_CHUNK = {(byte) '0', (byte) '\015', (byte) '\012', (byte) '\015', (byte) '\012'};
private static final byte[] CONTENT_LENGTH_0 = StringUtils.getBytes("Content-Length: 0\r\n");
private static final byte[] CONNECTION_CLOSE = StringUtils.getBytes("Connection: close\r\n");
private static final byte[] HTTP_1_1_SPACE = StringUtils.getBytes(HttpVersion.HTTP_1_1 + " ");
private static final byte[] TRANSFER_ENCODING_CHUNKED = StringUtils.getBytes("Transfer-Encoding: chunked\r\n");
private static final byte[][] SEND = new byte[][]{
new byte[0],
StringUtils.getBytes("Server: Firefly(" + ProjectVersion.getValue() + ")\r\n"),
StringUtils.getBytes("X-Powered-By: Firefly(" + ProjectVersion.getValue() + ")\r\n"),
StringUtils.getBytes(
"Server: Firefly(" + ProjectVersion.getValue() + ")\r\n" +
"X-Powered-By: Firefly(" + ProjectVersion.getValue() + ")\r\n")
};
private static final PreparedResponse[] PREPARED_RESPONSE = new PreparedResponse[HttpStatus.MAX_CODE + 1];
static {
ASSUMED_CONTENT_METHODS.put(HttpMethod.POST.getValue(), Boolean.TRUE);
ASSUMED_CONTENT_METHODS.put(HttpMethod.PUT.getValue(), Boolean.TRUE);
}
static {
int versionLength = HttpVersion.HTTP_1_1.toString().length();
for (int i = 0; i < PREPARED_RESPONSE.length; i++) {
HttpStatus.Code code = HttpStatus.getCode(i);
if (code == null) {
continue;
}
String reason = code.getMessage();
byte[] line = new byte[versionLength + 5 + reason.length() + 2];
ByteBuffer.wrap(HttpVersion.HTTP_1_1.getBytes()).get(line, 0, versionLength);
line[versionLength] = ' ';
line[versionLength + 1] = (byte) ('0' + i / 100);
line[versionLength + 2] = (byte) ('0' + (i % 100) / 10);
line[versionLength + 3] = (byte) ('0' + (i % 10));
line[versionLength + 4] = ' ';
for (int j = 0; j < reason.length(); j++) {
line[versionLength + 5 + j] = (byte) reason.charAt(j);
}
line[versionLength + 5 + reason.length()] = HttpTokens.CARRIAGE_RETURN;
line[versionLength + 6 + reason.length()] = HttpTokens.LINE_FEED;
PREPARED_RESPONSE[i] = new PreparedResponse();
PREPARED_RESPONSE[i].schemeCode = Arrays.copyOfRange(line, 0, versionLength + 5);
PREPARED_RESPONSE[i].reason = Arrays.copyOfRange(line, versionLength + 5, line.length - 2);
PREPARED_RESPONSE[i].responseLine = line;
}
}
private final int send;
private State state = State.START;
private EndOfContent endOfContent = EndOfContent.UNKNOWN_CONTENT;
private long contentPrepared = 0;
private boolean noContentResponse = false;
private Boolean persistent = null;
private Supplier<HttpFields> trailers = null;
// data
private boolean needCRLF = false;
public HttpGenerator() {
this(false, false);
}
public HttpGenerator(boolean sendServerVersion, boolean sendXPoweredBy) {
send = (sendServerVersion ? SEND_SERVER : 0) | (sendXPoweredBy ? SEND_XPOWERED_BY : 0);
}
public static void setServerVersion(String serverVersion) {
SEND[SEND_SERVER] = StringUtils.getBytes("Server: " + serverVersion + "\r\n");
SEND[SEND_XPOWERED_BY] = StringUtils.getBytes("X-Powered-By: " + serverVersion + "\r\n");
SEND[SEND_SERVER | SEND_XPOWERED_BY] = StringUtils.getBytes(
"Server: " + serverVersion + "\r\n" +
"X-Powered-By: " + serverVersion + "\r\n");
}
private static void putContentLength(ByteBuffer header, long contentLength) {
if (contentLength == 0) {
header.put(CONTENT_LENGTH_0);
} else {
header.put(HttpHeader.CONTENT_LENGTH.getBytesColonSpace());
BufferUtils.putDecLong(header, contentLength);
header.put(HttpTokens.CRLF);
}
}
public static byte[] getReasonBuffer(int code) {
PreparedResponse status = code < PREPARED_RESPONSE.length ? PREPARED_RESPONSE[code] : null;
if (status != null) {
return status.reason;
} else {
return null;
}
}
private static void putSanitisedName(String s, ByteBuffer buffer) {
int l = s.length();
for (int i = 0; i < l; i++) {
char c = s.charAt(i);
if (c < 0 || c > 0xff || c == '\r' || c == '\n' || c == ':') {
buffer.put((byte) '?');
} else {
buffer.put((byte) (0xff & c));
}
}
}
private static void putSanitisedValue(String s, ByteBuffer buffer) {
int l = s.length();
for (int i = 0; i < l; i++) {
char c = s.charAt(i);
if (c < 0 || c > 0xff || c == '\r' || c == '\n') {
buffer.put((byte) ' ');
} else {
buffer.put((byte) (0xff & c));
}
}
}
public static void putTo(HttpField field, ByteBuffer bufferInFillMode) {
if (field instanceof PreEncodedHttpField) {
((PreEncodedHttpField) field).putTo(bufferInFillMode, HttpVersion.HTTP_1_0);
} else {
HttpHeader header = field.getHeader();
if (header != null) {
bufferInFillMode.put(header.getBytesColonSpace());
} else {
putSanitisedName(field.getName(), bufferInFillMode);
bufferInFillMode.put(COLON_SPACE);
}
putSanitisedValue(field.getValue(), bufferInFillMode);
BufferUtils.putCRLF(bufferInFillMode);
}
}
public static void putTo(HttpFields fields, ByteBuffer bufferInFillMode) {
for (HttpField field : fields) {
if (field != null) {
putTo(field, bufferInFillMode);
}
}
BufferUtils.putCRLF(bufferInFillMode);
}
public void reset() {
state = State.START;
endOfContent = EndOfContent.UNKNOWN_CONTENT;
noContentResponse = false;
persistent = null;
contentPrepared = 0;
needCRLF = false;
trailers = null;
}
@Deprecated
public boolean getSendServerVersion() {
return (send & SEND_SERVER) != 0;
}
@Deprecated
public void setSendServerVersion(boolean sendServerVersion) {
throw new UnsupportedOperationException();
}
public State getState() {
return state;
}
public boolean isState(State state) {
return this.state == state;
}
public boolean isIdle() {
return state == State.START;
}
public boolean isEnd() {
return state == State.END;
}
public boolean isCommitted() {
return state.ordinal() >= State.COMMITTED.ordinal();
}
public boolean isChunking() {
return endOfContent == EndOfContent.CHUNKED_CONTENT;
}
public boolean isNoContent() {
return noContentResponse;
}
/**
* @return true, if known to be persistent
*/
public boolean isPersistent() {
return Boolean.TRUE.equals(persistent);
}
public void setPersistent(boolean persistent) {
this.persistent = persistent;
}
public boolean isWritten() {
return contentPrepared > 0;
}
public long getContentPrepared() {
return contentPrepared;
}
public void abort() {
persistent = false;
state = State.END;
endOfContent = null;
}
public Result generateRequest(MetaData.Request info, ByteBuffer header, ByteBuffer chunk, ByteBuffer content, boolean last) {
switch (state) {
case START: {
if (info == null) {
return Result.NEED_INFO;
}
if (header == null) {
return Result.NEED_HEADER;
}
// prepare the header
int pos = BufferUtils.flipToFill(header);
try {
// generate ResponseLine
generateRequestLine(info, header);
if (info.getHttpVersion() == HttpVersion.HTTP_0_9) {
throw new BadMessageException(INTERNAL_SERVER_ERROR_500, "HTTP/0.9 not supported");
}
generateHeaders(info, header, content, last);
boolean expect100 = info.getFields().contains(HttpHeader.EXPECT, HttpHeaderValue.CONTINUE.getValue());
if (expect100) {
state = State.COMMITTED;
} else {
// handle the content.
int len = BufferUtils.length(content);
if (len > 0) {
contentPrepared += len;
if (isChunking()) {
prepareChunk(header, len);
}
}
state = last ? State.COMPLETING : State.COMMITTED;
}
return Result.FLUSH;
} catch (BadMessageException e) {
throw e;
} catch (BufferOverflowException e) {
throw new BadMessageException(INTERNAL_SERVER_ERROR_500, "Request header too large", e);
} catch (Exception e) {
throw new BadMessageException(INTERNAL_SERVER_ERROR_500, e.getMessage(), e);
} finally {
BufferUtils.flipToFlush(header, pos);
}
}
case COMMITTED: {
return committed(chunk, content, last);
}
case COMPLETING: {
return completing(chunk, content);
}
case END:
if (BufferUtils.hasContent(content)) {
if (LOG.isDebugEnabled()) {
LOG.debug("discarding content in COMPLETING");
}
BufferUtils.clear(content);
}
return Result.DONE;
default:
throw new IllegalStateException();
}
}
private Result committed(ByteBuffer chunk, ByteBuffer content, boolean last) {
int len = BufferUtils.length(content);
// handle the content.
if (len > 0) {
if (isChunking()) {
if (chunk == null) {
return Result.NEED_CHUNK;
}
BufferUtils.clearToFill(chunk);
prepareChunk(chunk, len);
BufferUtils.flipToFlush(chunk, 0);
}
contentPrepared += len;
}
if (last) {
state = State.COMPLETING;
return len > 0 ? Result.FLUSH : Result.CONTINUE;
}
return len > 0 ? Result.FLUSH : Result.DONE;
}
private Result completing(ByteBuffer chunk, ByteBuffer content) {
if (BufferUtils.hasContent(content)) {
if (LOG.isDebugEnabled()) {
LOG.debug("discarding content in COMPLETING");
}
BufferUtils.clear(content);
}
if (isChunking()) {
if (trailers != null) {
// Do we need a chunk buffer?
if (chunk == null || chunk.capacity() <= CHUNK_SIZE) {
return Result.NEED_CHUNK_TRAILER;
}
HttpFields trailers = this.trailers.get();
if (trailers != null) {
// Write the last chunk
BufferUtils.clearToFill(chunk);
generateTrailers(chunk, trailers);
BufferUtils.flipToFlush(chunk, 0);
endOfContent = EndOfContent.UNKNOWN_CONTENT;
return Result.FLUSH;
}
}
// Do we need a chunk buffer?
if (chunk == null) {
return Result.NEED_CHUNK;
}
// Write the last chunk
BufferUtils.clearToFill(chunk);
prepareChunk(chunk, 0);
BufferUtils.flipToFlush(chunk, 0);
endOfContent = EndOfContent.UNKNOWN_CONTENT;
return Result.FLUSH;
}
state = State.END;
return Boolean.TRUE.equals(persistent) ? Result.DONE : Result.SHUTDOWN_OUT;
}
public Result generateResponse(MetaData.Response info, boolean head, ByteBuffer header, ByteBuffer chunk, ByteBuffer content, boolean last) {
switch (state) {
case START: {
if (info == null) {
return Result.NEED_INFO;
}
HttpVersion version = info.getHttpVersion();
if (version == null) {
throw new BadMessageException(INTERNAL_SERVER_ERROR_500, "No version");
}
if (version == HttpVersion.HTTP_0_9) {
persistent = false;
endOfContent = EndOfContent.EOF_CONTENT;
if (BufferUtils.hasContent(content)) {
contentPrepared += content.remaining();
}
state = last ? State.COMPLETING : State.COMMITTED;
return Result.FLUSH;
}
// Do we need a response header
if (header == null) {
return Result.NEED_HEADER;
}
// prepare the header
int pos = BufferUtils.flipToFill(header);
try {
// generate ResponseLine
generateResponseLine(info, header);
// Handle 1xx and no content responses
int status = info.getStatus();
if (status >= 100 && status < 200) {
noContentResponse = true;
if (status != HttpStatus.SWITCHING_PROTOCOLS_101) {
header.put(HttpTokens.CRLF);
state = State.COMPLETING_1XX;
return Result.FLUSH;
}
} else if (status == HttpStatus.NO_CONTENT_204 || status == HttpStatus.NOT_MODIFIED_304) {
noContentResponse = true;
}
generateHeaders(info, header, content, last);
// handle the content.
int len = BufferUtils.length(content);
if (len > 0) {
contentPrepared += len;
if (isChunking() && !head) {
prepareChunk(header, len);
}
}
state = last ? State.COMPLETING : State.COMMITTED;
} catch (BadMessageException e) {
throw e;
} catch (BufferOverflowException e) {
throw new BadMessageException(INTERNAL_SERVER_ERROR_500, "Response header too large", e);
} catch (Exception e) {
throw new BadMessageException(INTERNAL_SERVER_ERROR_500, e.getMessage(), e);
} finally {
BufferUtils.flipToFlush(header, pos);
}
return Result.FLUSH;
}
case COMMITTED: {
return committed(chunk, content, last);
}
case COMPLETING_1XX: {
reset();
return Result.DONE;
}
case COMPLETING: {
return completing(chunk, content);
}
case END:
if (BufferUtils.hasContent(content)) {
if (LOG.isDebugEnabled()) {
LOG.debug("discarding content in COMPLETING");
}
BufferUtils.clear(content);
}
return Result.DONE;
default:
throw new IllegalStateException();
}
}
private void prepareChunk(ByteBuffer chunk, int remaining) {
// if we need CRLF add this to header
if (needCRLF) {
BufferUtils.putCRLF(chunk);
}
// Add the chunk size to the header
if (remaining > 0) {
BufferUtils.putHexInt(chunk, remaining);
BufferUtils.putCRLF(chunk);
needCRLF = true;
} else {
chunk.put(LAST_CHUNK);
needCRLF = false;
}
}
private void generateTrailers(ByteBuffer buffer, HttpFields trailer) {
// if we need CRLF add this to header
if (needCRLF) {
BufferUtils.putCRLF(buffer);
}
// Add the chunk size to the header
buffer.put(ZERO_CHUNK);
int n = trailer.size();
for (int f = 0; f < n; f++) {
HttpField field = trailer.getField(f);
putTo(field, buffer);
}
BufferUtils.putCRLF(buffer);
}
private void generateRequestLine(MetaData.Request request, ByteBuffer header) {
header.put(StringUtils.getBytes(request.getMethod()));
header.put((byte) ' ');
header.put(StringUtils.getBytes(request.getURIString()));
header.put((byte) ' ');
header.put(request.getHttpVersion().getBytes());
header.put(HttpTokens.CRLF);
}
private void generateResponseLine(MetaData.Response response, ByteBuffer header) {
// Look for prepared response line
int status = response.getStatus();
PreparedResponse preprepared = status < PREPARED_RESPONSE.length ? PREPARED_RESPONSE[status] : null;
String reason = response.getReason();
if (preprepared != null) {
if (reason == null) {
header.put(preprepared.responseLine);
} else {
header.put(preprepared.schemeCode);
header.put(getReasonBytes(reason));
header.put(HttpTokens.CRLF);
}
} else { // generate response line
header.put(HTTP_1_1_SPACE);
header.put((byte) ('0' + status / 100));
header.put((byte) ('0' + (status % 100) / 10));
header.put((byte) ('0' + (status % 10)));
header.put((byte) ' ');
if (reason == null) {
header.put((byte) ('0' + status / 100));
header.put((byte) ('0' + (status % 100) / 10));
header.put((byte) ('0' + (status % 10)));
} else {
header.put(getReasonBytes(reason));
}
header.put(HttpTokens.CRLF);
}
}
private byte[] getReasonBytes(String reason) {
if (reason.length() > 1024) {
reason = reason.substring(0, 1024);
}
byte[] _bytes = StringUtils.getBytes(reason);
for (int i = _bytes.length; i-- > 0; ) {
if (_bytes[i] == '\r' || _bytes[i] == '\n') {
_bytes[i] = '?';
}
}
return _bytes;
}
private void generateHeaders(MetaData info, ByteBuffer header, ByteBuffer content, boolean last) {
final MetaData.Request request = (info instanceof MetaData.Request) ? (MetaData.Request) info : null;
final MetaData.Response response = (info instanceof MetaData.Response) ? (MetaData.Response) info : null;
if (LOG.isDebugEnabled()) {
LOG.debug("generateHeaders {} last={} content={}", info, last, BufferUtils.toDetailString(content));
LOG.debug(info.getFields().toString());
}
// default field values
int send = this.send;
HttpField transfer_encoding = null;
boolean http11 = info.getHttpVersion() == HttpVersion.HTTP_1_1;
boolean close = false;
trailers = http11 ? info.getTrailerSupplier() : null;
boolean chunked_hint = trailers != null;
boolean content_type = false;
long content_length = info.getContentLength();
boolean content_length_field = false;
// Generate fields
HttpFields fields = info.getFields();
if (fields != null) {
int n = fields.size();
for (int f = 0; f < n; f++) {
HttpField field = fields.getField(f);
HttpHeader h = field.getHeader();
if (h == null) {
putTo(field, header);
} else {
switch (h) {
case CONTENT_LENGTH:
if (content_length < 0) {
content_length = field.getLongValue();
} else if (content_length != field.getLongValue()) {
throw new BadMessageException(INTERNAL_SERVER_ERROR_500, String.format("Incorrect Content-Length %d!=%d", content_length, field.getLongValue()));
}
content_length_field = true;
break;
case CONTENT_TYPE: {
// write the field to the header
content_type = true;
putTo(field, header);
break;
}
case TRANSFER_ENCODING: {
if (http11) {
// Don't add yet, treat this only as a hint that there is content
// with a preference to chunk if we can
transfer_encoding = field;
chunked_hint = field.contains(HttpHeaderValue.CHUNKED.getValue());
}
break;
}
case CONNECTION: {
putTo(field, header);
if (field.contains(HttpHeaderValue.CLOSE.getValue())) {
close = true;
persistent = false;
}
if (info.getHttpVersion() == HttpVersion.HTTP_1_0 && persistent == null && field.contains(HttpHeaderValue.KEEP_ALIVE.getValue())) {
persistent = true;
}
break;
}
case SERVER: {
send = send & ~SEND_SERVER;
putTo(field, header);
break;
}
default:
putTo(field, header);
}
}
}
}
// Can we work out the content length?
if (last && content_length < 0 && trailers == null) {
content_length = contentPrepared + BufferUtils.length(content);
}
// Calculate how to end _content and connection, _content length and transfer encoding
// settings from http://tools.ietf.org/html/rfc7230#section-3.3.3
boolean assumed_content_request = request != null && Boolean.TRUE.equals(ASSUMED_CONTENT_METHODS.get(request.getMethod()));
boolean assumed_content = assumed_content_request || content_type || chunked_hint;
boolean nocontent_request = request != null && content_length <= 0 && !assumed_content;
if (persistent == null) {
persistent = http11 || (request != null && HttpMethod.CONNECT.is(request.getMethod()));
}
// If the message is known not to have content
if (noContentResponse || nocontent_request) {
// We don't need to indicate a body length
endOfContent = EndOfContent.NO_CONTENT;
// But it is an error if there actually is content
if (contentPrepared > 0 || content_length > 0) {
if (contentPrepared == 0 && last) {
// TODO discard content for backward compatibility with 9.3 releases
// TODO review if it is still needed in 9.4 or can we just throw.
content.clear();
} else {
throw new BadMessageException(INTERNAL_SERVER_ERROR_500, "Content for no content response");
}
}
}
// Else if we are HTTP/1.1, and the content length is unknown, and we are either persistent
// or it is a request with content (which cannot EOF), or the app has requested chunk
else if (http11 && (chunked_hint || content_length < 0 && (persistent || assumed_content_request))) {
// we use chunk
endOfContent = EndOfContent.CHUNKED_CONTENT;
// try to use user supplied encoding as it may have other values.
if (transfer_encoding == null)
header.put(TRANSFER_ENCODING_CHUNKED);
else if (transfer_encoding.toString().endsWith(HttpHeaderValue.CHUNKED.toString())) {
putTo(transfer_encoding, header);
transfer_encoding = null;
} else if (!chunked_hint) {
putTo(new HttpField(HttpHeader.TRANSFER_ENCODING, transfer_encoding.getValue() + ",chunked"), header);
transfer_encoding = null;
} else {
throw new BadMessageException(INTERNAL_SERVER_ERROR_500, "Bad Transfer-Encoding");
}
}
// Else if we have known the content length and are a request or a persistent response,
else if (content_length >= 0 && (request != null || persistent)) {
// Use the content length
endOfContent = EndOfContent.CONTENT_LENGTH;
putContentLength(header, content_length);
}
// Else if we are a response
else if (response != null) {
// We must use EOF - even if we were trying to be persistent
endOfContent = EndOfContent.EOF_CONTENT;
persistent = false;
if (content_length >= 0 && (content_length > 0 || assumed_content || content_length_field)) {
putContentLength(header, content_length);
}
if (http11 && !close) {
header.put(CONNECTION_CLOSE);
}
}
// Else we must be a request
else {
// with no way to indicate body length
throw new BadMessageException(INTERNAL_SERVER_ERROR_500, "Unknown content length for request");
}
if (LOG.isDebugEnabled()) {
LOG.debug(endOfContent.toString());
}
// Add transfer encoding if it is not chunking
if (transfer_encoding != null) {
if (chunked_hint) {
String v = transfer_encoding.getValue();
int c = v.lastIndexOf(',');
if (c > 0 && v.lastIndexOf(HttpHeaderValue.CHUNKED.toString(), c) > c) {
putTo(new HttpField(HttpHeader.TRANSFER_ENCODING, v.substring(0, c).trim()), header);
}
} else {
putTo(transfer_encoding, header);
}
}
// Send server?
int status = response != null ? response.getStatus() : -1;
if (status > 199) {
header.put(SEND[send]);
}
// end the header.
header.put(HttpTokens.CRLF);
}
@Override
public String toString() {
return String.format("%s@%x{s=%s}",
getClass().getSimpleName(),
hashCode(),
state);
}
// states
public enum State {
START,
COMMITTED,
COMPLETING,
COMPLETING_1XX,
END
}
public enum Result {
NEED_CHUNK, // Need a small chunk buffer of CHUNK_SIZE
NEED_INFO, // Need the request/response metadata info
NEED_HEADER, // Need buffer to build HTTP headers into
NEED_CHUNK_TRAILER, // Need a large chunk buffer for last chunk and trailers
FLUSH, // The buffers previously generated should be flushed
CONTINUE, // Continue generating the message
SHUTDOWN_OUT, // Need EOF to be signaled
DONE // The current phase of generation is complete
}
// Build cache of response lines for status
private static class PreparedResponse {
byte[] reason;
byte[] schemeCode;
byte[] responseLine;
}
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.prestosql.operator;
import com.google.common.collect.ImmutableList;
import io.airlift.units.DataSize;
import io.prestosql.ExceededMemoryLimitException;
import io.prestosql.operator.TopNOperator.TopNOperatorFactory;
import io.prestosql.spi.Page;
import io.prestosql.sql.planner.plan.PlanNodeId;
import io.prestosql.testing.MaterializedResult;
import org.testng.annotations.AfterMethod;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
import java.util.List;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.ScheduledExecutorService;
import static io.airlift.concurrent.Threads.daemonThreadsNamed;
import static io.airlift.units.DataSize.Unit.BYTE;
import static io.prestosql.RowPagesBuilder.rowPagesBuilder;
import static io.prestosql.SessionTestUtils.TEST_SESSION;
import static io.prestosql.operator.OperatorAssertion.assertOperatorEquals;
import static io.prestosql.spi.block.SortOrder.ASC_NULLS_LAST;
import static io.prestosql.spi.block.SortOrder.DESC_NULLS_LAST;
import static io.prestosql.spi.type.BigintType.BIGINT;
import static io.prestosql.spi.type.DoubleType.DOUBLE;
import static io.prestosql.spi.type.VarcharType.VARCHAR;
import static io.prestosql.testing.MaterializedResult.resultBuilder;
import static io.prestosql.testing.TestingTaskContext.createTaskContext;
import static io.prestosql.testing.assertions.Assert.assertEquals;
import static java.util.concurrent.Executors.newCachedThreadPool;
import static java.util.concurrent.Executors.newScheduledThreadPool;
import static org.testng.Assert.fail;
@Test(singleThreaded = true)
public class TestTopNOperator
{
private ExecutorService executor;
private ScheduledExecutorService scheduledExecutor;
private DriverContext driverContext;
@BeforeMethod
public void setUp()
{
executor = newCachedThreadPool(daemonThreadsNamed("test-executor-%s"));
scheduledExecutor = newScheduledThreadPool(2, daemonThreadsNamed("test-scheduledExecutor-%s"));
driverContext = createTaskContext(executor, scheduledExecutor, TEST_SESSION)
.addPipelineContext(0, true, true, false)
.addDriverContext();
}
@AfterMethod
public void tearDown()
{
executor.shutdownNow();
scheduledExecutor.shutdownNow();
}
@Test
public void testSingleFieldKey()
{
List<Page> input = rowPagesBuilder(BIGINT, DOUBLE)
.row(1L, 0.1)
.row(2L, 0.2)
.pageBreak()
.row(-1L, -0.1)
.row(4L, 0.4)
.pageBreak()
.row(5L, 0.5)
.row(4L, 0.41)
.row(6L, 0.6)
.pageBreak()
.build();
TopNOperatorFactory operatorFactory = new TopNOperatorFactory(
0,
new PlanNodeId("test"),
ImmutableList.of(BIGINT, DOUBLE),
2,
ImmutableList.of(0),
ImmutableList.of(DESC_NULLS_LAST));
MaterializedResult expected = resultBuilder(driverContext.getSession(), BIGINT, DOUBLE)
.row(6L, 0.6)
.row(5L, 0.5)
.build();
assertOperatorEquals(operatorFactory, driverContext, input, expected);
}
@Test
public void testMultiFieldKey()
{
List<Page> input = rowPagesBuilder(VARCHAR, BIGINT)
.row("a", 1L)
.row("b", 2L)
.pageBreak()
.row("f", 3L)
.row("a", 4L)
.pageBreak()
.row("d", 5L)
.row("d", 7L)
.row("e", 6L)
.build();
TopNOperatorFactory operatorFactory = new TopNOperatorFactory(
0,
new PlanNodeId("test"),
ImmutableList.of(VARCHAR, BIGINT),
3,
ImmutableList.of(0, 1),
ImmutableList.of(DESC_NULLS_LAST, DESC_NULLS_LAST));
MaterializedResult expected = MaterializedResult.resultBuilder(driverContext.getSession(), VARCHAR, BIGINT)
.row("f", 3L)
.row("e", 6L)
.row("d", 7L)
.build();
assertOperatorEquals(operatorFactory, driverContext, input, expected);
}
@Test
public void testReverseOrder()
{
List<Page> input = rowPagesBuilder(BIGINT, DOUBLE)
.row(1L, 0.1)
.row(2L, 0.2)
.pageBreak()
.row(-1L, -0.1)
.row(4L, 0.4)
.pageBreak()
.row(5L, 0.5)
.row(4L, 0.41)
.row(6L, 0.6)
.pageBreak()
.build();
TopNOperatorFactory operatorFactory = new TopNOperatorFactory(
0,
new PlanNodeId("test"),
ImmutableList.of(BIGINT, DOUBLE),
2,
ImmutableList.of(0),
ImmutableList.of(ASC_NULLS_LAST));
MaterializedResult expected = resultBuilder(driverContext.getSession(), BIGINT, DOUBLE)
.row(-1L, -0.1)
.row(1L, 0.1)
.build();
assertOperatorEquals(operatorFactory, driverContext, input, expected);
}
@Test
public void testLimitZero()
throws Exception
{
List<Page> input = rowPagesBuilder(BIGINT).row(1L).build();
TopNOperatorFactory factory = new TopNOperatorFactory(
0,
new PlanNodeId("test"),
ImmutableList.of(BIGINT),
0,
ImmutableList.of(0),
ImmutableList.of(DESC_NULLS_LAST));
try (Operator operator = factory.createOperator(driverContext)) {
assertEquals(operator.isFinished(), true);
assertEquals(operator.needsInput(), false);
assertEquals(operator.getOutput(), null);
}
}
@Test
public void testExceedMemoryLimit()
throws Exception
{
List<Page> input = rowPagesBuilder(BIGINT)
.row(1L)
.build();
DriverContext smallDiverContext = createTaskContext(executor, scheduledExecutor, TEST_SESSION, new DataSize(1, BYTE))
.addPipelineContext(0, true, true, false)
.addDriverContext();
TopNOperatorFactory operatorFactory = new TopNOperatorFactory(
0,
new PlanNodeId("test"),
ImmutableList.of(BIGINT),
100,
ImmutableList.of(0),
ImmutableList.of(ASC_NULLS_LAST));
try (Operator operator = operatorFactory.createOperator(smallDiverContext)) {
operator.addInput(input.get(0));
fail("must fail because of exceeding local memory limit");
}
catch (ExceededMemoryLimitException ignore) {
}
}
}
| |
package com.wafflehaus.wearyouatt;
import android.app.Activity;
import android.content.Intent;
import android.content.IntentSender;
import android.content.res.Resources;
import android.os.Bundle;
import android.support.wearable.activity.ConfirmationActivity;
import android.support.wearable.view.GridViewPager;
import android.util.Log;
import android.view.View;
import android.view.View.OnApplyWindowInsetsListener;
import android.view.WindowInsets;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.ScheduledFuture;
import java.util.concurrent.TimeUnit;
import com.google.android.gms.common.ConnectionResult;
import com.google.android.gms.common.api.GoogleApiClient;
import com.google.android.gms.common.api.GoogleApiClient.ConnectionCallbacks;
import com.google.android.gms.common.api.GoogleApiClient.OnConnectionFailedListener;
import com.google.android.gms.common.api.PendingResult;
import com.google.android.gms.common.api.ResultCallback;
import com.google.android.gms.common.api.Status;
import com.google.android.gms.common.data.FreezableUtils;
import com.google.android.gms.wearable.DataApi;
import com.google.android.gms.wearable.DataApi.DataItemResult;
import com.google.android.gms.wearable.DataEvent;
import com.google.android.gms.wearable.DataEventBuffer;
import com.google.android.gms.wearable.MessageApi;
import com.google.android.gms.wearable.MessageEvent;
import com.google.android.gms.wearable.Node;
import com.google.android.gms.wearable.NodeApi;
import com.google.android.gms.wearable.PutDataMapRequest;
import com.google.android.gms.wearable.PutDataRequest;
import com.google.android.gms.wearable.Wearable;
import com.google.android.gms.wearable.WearableStatusCodes;
public class MainActivity extends Activity implements DataApi.DataListener,
MessageApi.MessageListener, NodeApi.NodeListener, ConnectionCallbacks,
OnConnectionFailedListener {
private static final String TAG = "MainActivity";
/** Request code for launching the Intent to resolve Google Play services errors. */
private static final int REQUEST_RESOLVE_ERROR = 1000;
private GoogleApiClient mGoogleApiClient;
private boolean mResolvingError = false;
// Send DataItems.
private ScheduledExecutorService mGeneratorExecutor;
private ScheduledFuture<?> mDataItemGeneratorFuture;
private static final String COUNT_KEY = "count";
private static final String COUNT_PATH = "/count";
private static final String PRESENCE_KEY = "presence";
private static final String PRESENCE_PATH = "/presence";
private SampleGridPagerAdapter sampleGridPagerAdapter;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
final Resources res = getResources();
final GridViewPager pager = (GridViewPager) findViewById(R.id.pager);
pager.setOnApplyWindowInsetsListener(new OnApplyWindowInsetsListener() {
@Override
public WindowInsets onApplyWindowInsets(View v, WindowInsets insets) {
// Adjust page margins:
// A little extra horizontal spacing between pages looks a bit
// less crowded on a round display.
final boolean round = insets.isRound();
int rowMargin = res.getDimensionPixelOffset(R.dimen.page_row_margin);
int colMargin = res.getDimensionPixelOffset(round ?
R.dimen.page_column_margin_round : R.dimen.page_column_margin);
pager.setPageMargins(rowMargin, colMargin);
return insets;
}
});
sampleGridPagerAdapter = new SampleGridPagerAdapter(this, getFragmentManager());
pager.setAdapter(sampleGridPagerAdapter);
mGoogleApiClient = new GoogleApiClient.Builder(this)
.addApi(Wearable.API)
.addConnectionCallbacks(this)
.addOnConnectionFailedListener(this)
.build();
}
private void fireMessage(final String presence){
// Send the RPC
PendingResult<NodeApi.GetConnectedNodesResult> nodes = Wearable.NodeApi.getConnectedNodes(mGoogleApiClient);
nodes.setResultCallback(new ResultCallback<NodeApi.GetConnectedNodesResult>() {
@Override
public void onResult(NodeApi.GetConnectedNodesResult result) {
for (int i = 0; i < result.getNodes().size(); i++) {
Node node = result.getNodes().get(i);
String nName = node.getDisplayName();
String nId = node.getId();
Log.d(TAG, "Node name and ID: " + nName + " | " + nId);
Wearable.MessageApi.addListener(mGoogleApiClient, new MessageApi.MessageListener() {
@Override
public void onMessageReceived(MessageEvent messageEvent) {
Log.d(TAG, "Message received: " + messageEvent);
}
});
PendingResult<MessageApi.SendMessageResult> messageResult = Wearable.MessageApi.sendMessage(mGoogleApiClient, node.getId(),
"/ContactListActivity", presence.getBytes());
messageResult.setResultCallback(new ResultCallback<MessageApi.SendMessageResult>() {
@Override
public void onResult(MessageApi.SendMessageResult sendMessageResult) {
Status status = sendMessageResult.getStatus();
Log.d(TAG, "Status: " + status.toString());
if (status.getStatusCode() != WearableStatusCodes.SUCCESS) {
//alertButton.setProgress(-1);
//label.setText("Tap to retry. Alert not sent :(");
}
}
});
}
}
});
}
@Override
protected void onStart() {
super.onStart();
if (!mResolvingError) {
mGoogleApiClient.connect();
}
}
@Override
public void onResume() {
super.onResume();
/*mDataItemGeneratorFuture = mGeneratorExecutor.scheduleWithFixedDelay(
new DataItemGenerator(), 1, 5, TimeUnit.SECONDS);*/
}
@Override
public void onPause() {
super.onPause();
mDataItemGeneratorFuture.cancel(true /* mayInterruptIfRunning */);
}
@Override
protected void onStop() {
if (!mResolvingError) {
Wearable.DataApi.removeListener(mGoogleApiClient, this);
Wearable.MessageApi.removeListener(mGoogleApiClient, this);
Wearable.NodeApi.removeListener(mGoogleApiClient, this);
mGoogleApiClient.disconnect();
}
super.onStop();
}
public void confirm(View view){
fireMessage(sampleGridPagerAdapter.selectedOption);
}
public void successConfirmationActivityAnimation(){
startConfirmationActivity(ConfirmationActivity.SUCCESS_ANIMATION,
"Status set!");
}
private void startConfirmationActivity(int animationType, String message) {
Intent confirmationActivity = new Intent(this, ConfirmationActivity.class)
.setFlags(Intent.FLAG_ACTIVITY_NEW_TASK | Intent.FLAG_ACTIVITY_NO_ANIMATION)
.putExtra(ConfirmationActivity.EXTRA_ANIMATION_TYPE, animationType)
.putExtra(ConfirmationActivity.EXTRA_MESSAGE, message);
startActivity(confirmationActivity);
}
@Override //OnConnectionFailedListener
public void onConnectionFailed(ConnectionResult result) {
if (mResolvingError) {
// Already attempting to resolve an error.
return;
} else if (result.hasResolution()) {
try {
mResolvingError = true;
result.startResolutionForResult(this, REQUEST_RESOLVE_ERROR);
} catch (IntentSender.SendIntentException e) {
// There was an error with the resolution intent. Try again.
mGoogleApiClient.connect();
}
} else {
Log.e(TAG, "Connection to Google API client has failed");
mResolvingError = false;
Wearable.DataApi.removeListener(mGoogleApiClient, this);
Wearable.MessageApi.removeListener(mGoogleApiClient, this);
Wearable.NodeApi.removeListener(mGoogleApiClient, this);
}
}
@Override
public void onConnected(Bundle arg0) {
LOGD(TAG, "Google API Client was connected");
mResolvingError = false;
Wearable.DataApi.addListener(mGoogleApiClient, this);
Wearable.MessageApi.addListener(mGoogleApiClient, this);
Wearable.NodeApi.addListener(mGoogleApiClient, this);
}
@Override
public void onConnectionSuspended(int arg0) {
LOGD(TAG, "Connection to Google API client was suspended");
}
@Override
public void onPeerConnected(Node arg0) {
// TODO Auto-generated method stub
}
@Override
public void onPeerDisconnected(Node arg0) {
// TODO Auto-generated method stub
}
@Override
public void onMessageReceived(MessageEvent arg0) {
// TODO Auto-generated method stub
}
@Override
public void onDataChanged(DataEventBuffer dataEvents) {
LOGD(TAG, "onDataChanged: " + dataEvents);
final List<DataEvent> events = FreezableUtils.freezeIterable(dataEvents);
dataEvents.close();
runOnUiThread(new Runnable() {
@Override
public void run() {
for (DataEvent event : events) {
/*if (event.getType() == DataEvent.TYPE_CHANGED) {
mDataItemListAdapter.add(
new Event("DataItem Changed", event.getDataItem().toString()));
} else if (event.getType() == DataEvent.TYPE_DELETED) {
mDataItemListAdapter.add(
new Event("DataItem Deleted", event.getDataItem().toString()));
}*/
}
}
});
}
/**
* As simple wrapper around Log.d
*/
private static void LOGD(final String tag, String message) {
if (Log.isLoggable(tag, Log.DEBUG)) {
Log.d(tag, message);
}
}
/** Generates a DataItem based on an incrementing count. */
/*
private class DataItemGenerator implements Runnable {
private int count = 0;
@Override
public void run() {
PutDataMapRequest putDataMapRequest = PutDataMapRequest.create(COUNT_PATH);
putDataMapRequest.getDataMap().putInt(COUNT_KEY, count++);
PutDataRequest request = putDataMapRequest.asPutDataRequest();
LOGD(TAG, "Generating DataItem: " + request);
if (!mGoogleApiClient.isConnected()) {
return;
}
Wearable.DataApi.putDataItem(mGoogleApiClient, request)
.setResultCallback(new ResultCallback<DataItemResult>() {
@Override
public void onResult(DataItemResult dataItemResult) {
if (!dataItemResult.getStatus().isSuccess()) {
Log.e(TAG, "ERROR: failed to putDataItem, status code: "
+ dataItemResult.getStatus().getStatusCode());
}
}
});
}
}*/
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.factories;
import org.apache.flink.api.common.serialization.DeserializationSchema;
import org.apache.flink.api.common.serialization.SerializationSchema;
import org.apache.flink.configuration.ConfigOption;
import org.apache.flink.configuration.ConfigOptions;
import org.apache.flink.table.connector.ChangelogMode;
import org.apache.flink.table.connector.format.DecodingFormat;
import org.apache.flink.table.connector.format.EncodingFormat;
import org.apache.flink.table.connector.sink.DynamicTableSink;
import org.apache.flink.table.connector.source.DynamicTableSource;
import org.apache.flink.table.connector.source.ScanTableSource;
import org.apache.flink.table.data.RowData;
import org.apache.flink.table.factories.FactoryUtil.TableFactoryHelper;
import javax.annotation.Nullable;
import java.util.HashSet;
import java.util.Objects;
import java.util.Optional;
import java.util.Set;
import static org.apache.flink.table.factories.FactoryUtil.FORMAT;
import static org.apache.flink.table.factories.FactoryUtil.KEY_FORMAT;
import static org.apache.flink.table.factories.FactoryUtil.VALUE_FORMAT;
/**
* Test implementations for {@link DynamicTableSourceFactory} and {@link DynamicTableSinkFactory}.
*/
public final class TestDynamicTableFactory implements DynamicTableSourceFactory, DynamicTableSinkFactory {
public static final String IDENTIFIER = "test-connector";
public static final ConfigOption<String> TARGET = ConfigOptions
.key("target")
.stringType()
.noDefaultValue();
public static final ConfigOption<Long> BUFFER_SIZE = ConfigOptions
.key("buffer-size")
.longType()
.defaultValue(100L);
@Override
public DynamicTableSource createDynamicTableSource(Context context) {
final TableFactoryHelper helper = FactoryUtil.createTableFactoryHelper(this, context);
final Optional<DecodingFormat<DeserializationSchema<RowData>>> keyFormat = helper.discoverOptionalDecodingFormat(
DeserializationFormatFactory.class,
KEY_FORMAT);
final DecodingFormat<DeserializationSchema<RowData>> valueFormat = helper.discoverOptionalDecodingFormat(
DeserializationFormatFactory.class,
FORMAT).orElseGet(
() -> helper.discoverDecodingFormat(
DeserializationFormatFactory.class,
VALUE_FORMAT));
helper.validate();
return new DynamicTableSourceMock(
helper.getOptions().get(TARGET),
keyFormat.orElse(null),
valueFormat);
}
@Override
public DynamicTableSink createDynamicTableSink(Context context) {
final TableFactoryHelper helper = FactoryUtil.createTableFactoryHelper(this, context);
final Optional<EncodingFormat<SerializationSchema<RowData>>> keyFormat = helper.discoverOptionalEncodingFormat(
SerializationFormatFactory.class,
KEY_FORMAT);
final EncodingFormat<SerializationSchema<RowData>> valueFormat = helper.discoverOptionalEncodingFormat(
SerializationFormatFactory.class,
FORMAT).orElseGet(
() -> helper.discoverEncodingFormat(
SerializationFormatFactory.class,
VALUE_FORMAT));
helper.validate();
return new DynamicTableSinkMock(
helper.getOptions().get(TARGET),
helper.getOptions().get(BUFFER_SIZE),
keyFormat.orElse(null),
valueFormat);
}
@Override
public String factoryIdentifier() {
return IDENTIFIER;
}
@Override
public Set<ConfigOption<?>> requiredOptions() {
final Set<ConfigOption<?>> options = new HashSet<>();
options.add(TARGET);
return options;
}
@Override
public Set<ConfigOption<?>> optionalOptions() {
final Set<ConfigOption<?>> options = new HashSet<>();
options.add(BUFFER_SIZE);
options.add(KEY_FORMAT);
options.add(FORMAT);
options.add(VALUE_FORMAT);
return options;
}
// --------------------------------------------------------------------------------------------
// Table source
// --------------------------------------------------------------------------------------------
/**
* {@link DynamicTableSource} for testing.
*/
public static class DynamicTableSourceMock implements ScanTableSource {
public final String target;
public final @Nullable DecodingFormat<DeserializationSchema<RowData>> keyFormat;
public final DecodingFormat<DeserializationSchema<RowData>> valueFormat;
DynamicTableSourceMock(
String target,
@Nullable DecodingFormat<DeserializationSchema<RowData>> keyFormat,
DecodingFormat<DeserializationSchema<RowData>> valueFormat) {
this.target = target;
this.keyFormat = keyFormat;
this.valueFormat = valueFormat;
}
@Override
public ChangelogMode getChangelogMode() {
return null;
}
@Override
public ScanRuntimeProvider getScanRuntimeProvider(ScanContext runtimeProviderContext) {
return null;
}
@Override
public DynamicTableSource copy() {
return null;
}
@Override
public String asSummaryString() {
return null;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
DynamicTableSourceMock that = (DynamicTableSourceMock) o;
return target.equals(that.target) &&
Objects.equals(keyFormat, that.keyFormat) &&
valueFormat.equals(that.valueFormat);
}
@Override
public int hashCode() {
return Objects.hash(target, keyFormat, valueFormat);
}
}
// --------------------------------------------------------------------------------------------
// Table sink
// --------------------------------------------------------------------------------------------
/**
* {@link DynamicTableSink} for testing.
*/
public static class DynamicTableSinkMock implements DynamicTableSink {
public final String target;
public final Long bufferSize;
public final @Nullable EncodingFormat<SerializationSchema<RowData>> keyFormat;
public final EncodingFormat<SerializationSchema<RowData>> valueFormat;
DynamicTableSinkMock(
String target,
Long bufferSize,
@Nullable EncodingFormat<SerializationSchema<RowData>> keyFormat,
EncodingFormat<SerializationSchema<RowData>> valueFormat) {
this.target = target;
this.bufferSize = bufferSize;
this.keyFormat = keyFormat;
this.valueFormat = valueFormat;
}
@Override
public ChangelogMode getChangelogMode(ChangelogMode requestedMode) {
return null;
}
@Override
public SinkRuntimeProvider getSinkRuntimeProvider(Context context) {
return null;
}
@Override
public DynamicTableSink copy() {
return null;
}
@Override
public String asSummaryString() {
return null;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
DynamicTableSinkMock that = (DynamicTableSinkMock) o;
return target.equals(that.target) &&
bufferSize.equals(that.bufferSize) &&
Objects.equals(keyFormat, that.keyFormat) &&
valueFormat.equals(that.valueFormat);
}
@Override
public int hashCode() {
return Objects.hash(target, bufferSize, keyFormat, valueFormat);
}
}
}
| |
package org.khan.solver.cubesolver.puzzle.cube;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.khan.solver.cubesolver.Keywords;
import org.khan.solver.cubesolver.puzzle.PuzzlePiece;
import org.khan.solver.cubesolver.puzzle.piece.sides.PieceSide;
import org.khan.solver.cubesolver.puzzle.piece.sides.PieceSideKey;
import org.khan.solver.cubesolver.puzzle.piece.sides.SideNames;
/**
* This class represents a cube having all the required objects. A complete cube is represented
* by six puzzle pieces joined together. Each puzzle piece is assigned to a specific position in
* a cube. Class also provides function to get sides of different pieces which needed to be joined
* to make a complete cube.
*
* @author SDK
*
*/
public class Cube {
// cube bottom face
private PuzzlePiece bottomface;
// left face of cube
private PuzzlePiece leftface;
// right face of cube
private PuzzlePiece rightface;
// front face of cube
private PuzzlePiece frontface;
// top face of cube
private PuzzlePiece topface;
// bakc face of cube
private PuzzlePiece backface;
// map referencing same objects of cube to provide easy access to different faces of cube based on id
private Map<String, PuzzlePiece> puzzlePieceMap;
/**
* Copy constructor
*
* @param objectToCopy
*/
public Cube(Cube objectToCopy) {
puzzlePieceMap = new HashMap<String, PuzzlePiece>();
if(objectToCopy.bottomface != null)
setBottomface(new PuzzlePiece(objectToCopy.bottomface));
if(objectToCopy.leftface != null)
setLeftface(new PuzzlePiece(objectToCopy.leftface));
if(objectToCopy.rightface != null)
setRightface(new PuzzlePiece(objectToCopy.rightface));
if(objectToCopy.frontface != null)
setFrontface(new PuzzlePiece(objectToCopy.frontface));
if(objectToCopy.topface != null)
setTopface(new PuzzlePiece(objectToCopy.topface));
if(objectToCopy.backface != null)
setBackface(new PuzzlePiece(objectToCopy.backface));
}
/**
* Default constructor
*/
public Cube() {
bottomface = null;
leftface = null;
rightface = null;
frontface = null;
topface = null;
backface = null;
puzzlePieceMap = new HashMap<String, PuzzlePiece>();
}
/**
*
* @return
*/
public PuzzlePiece getBottomface() {
return bottomface;
}
/**
*
* @param bottomface
*/
public void setBottomface(PuzzlePiece bottomface) {
this.bottomface = bottomface;
puzzlePieceMap.put(bottomface.getUniqueID(), bottomface);
}
/**
*
* @return
*/
public PuzzlePiece getLeftface() {
return leftface;
}
/**
*
* @param leftface
*/
public void setLeftface(PuzzlePiece leftface) {
this.leftface = leftface;
puzzlePieceMap.put(leftface.getUniqueID(), leftface);
}
/**
*
* @return
*/
public PuzzlePiece getRightface() {
return rightface;
}
/**
*
* @param rightface
*/
public void setRightface(PuzzlePiece rightface) {
this.rightface = rightface;
puzzlePieceMap.put(rightface.getUniqueID(), rightface);
}
/**
*
* @return
*/
public PuzzlePiece getFrontface() {
return frontface;
}
/**
*
* @param frontface
*/
public void setFrontface(PuzzlePiece frontface) {
this.frontface = frontface;
puzzlePieceMap.put(frontface.getUniqueID(), frontface);
}
/**
*
* @return
*/
public PuzzlePiece getTopface() {
return topface;
}
/**
*
* @param topface
*/
public void setTopface(PuzzlePiece topface) {
this.topface = topface;
puzzlePieceMap.put(topface.getUniqueID(), topface);
}
/**
*
* @return
*/
public PuzzlePiece getBackface() {
return backface;
}
/**
*
* @param backface
*/
public void setBackface(PuzzlePiece backface) {
this.backface = backface;
puzzlePieceMap.put(backface.getUniqueID(), backface);
}
/**
*
* @param id
* @return
*/
public PuzzlePiece getPieceByID(String id){
return puzzlePieceMap.get(id);
}
/**
* This function will check which side is recently connected to which and then will return
* a map providing instructions that which other sides should also conform to the side of
* connected piece to have smooth joining.
*
* Example If Piece1 is connected to Top side then it will check left and right side of
* cube, if there are pieces on left and right side then it will provide the respective
* side of those pieces that must conform to the respective side of recently connected
* piece.
*
* Note: This function works, assuming a cube has valid BottomFace
*
* @param connectedSideName
* @param connectedSide
* @return
*/
/**
* This function will check which side is recently connected to the under observation piece and then will return
* a map providing instructions that which other sides should also conform to the side of
* connected piece to have smooth joining.
*
* Example If Piece1 is connected to Top side then it will check left and right side of
* cube, if there are pieces on left and right side then it will provide the respective
* side of those pieces that must conform to the respective side of recently connected
* piece.
*
* @param pieceToObserve puzzle piece around which sides will be observed
* @param sideOfObservingPiece connected side of piece under observation
* @param connectedSide the side of other piece which is connected to side of piece under observation
* @param top flag to indicate if its a top side, then we have to check all four sides of already connected pieces
* @return
*/
public Map<PieceSide,String> getConnectedSides(PuzzlePiece pieceToObserve,SideNames sideOfObservingPiece, SideNames connectedSide, boolean top){
Map<PieceSide,String> requiredConnectedSidesMap = new HashMap<PieceSide, String>();
switch(sideOfObservingPiece){
case TOP:
// check left side but in anti-clock wise manner
checkLeftSidePiece(requiredConnectedSidesMap,pieceToObserve, connectedSide, Keywords.ANTI_CLOCK_WISE);
// check right side but in clock wise manner
checkRightSidePiece(requiredConnectedSidesMap,pieceToObserve, connectedSide, Keywords.CLOCK_WISE);
// if its a top piece then check the last left side, where it needs to be joined
if(top) {
String sideToJoin = getMissingSide(requiredConnectedSidesMap,connectedSide);
checkTopPieceSide(requiredConnectedSidesMap,getFrontface().getUniqueID(), SideNames.BOTTOM.name(),sideToJoin);
}
break;
case BOTTOM:
// check left side but in clock wise manner
checkLeftSidePiece(requiredConnectedSidesMap, pieceToObserve,connectedSide, Keywords.CLOCK_WISE);
// check right side but in anti-clock wise manner
checkRightSidePiece(requiredConnectedSidesMap,pieceToObserve, connectedSide, Keywords.ANTI_CLOCK_WISE);
if(top) {
String sideToJoin = getMissingSide(requiredConnectedSidesMap,connectedSide);
if(getBackface() == null)
System.out.println("");
checkTopPieceSide(requiredConnectedSidesMap,getBackface().getUniqueID(), SideNames.TOP.name(),sideToJoin);
}
break;
case LEFT:
// check top side but in clock wise manner
checkTopSidePiece(requiredConnectedSidesMap, pieceToObserve,connectedSide, Keywords.CLOCK_WISE);
// check bottom side but in anti-clock wise manner
checkBottomSidePiece(requiredConnectedSidesMap, pieceToObserve,connectedSide, Keywords.ANTI_CLOCK_WISE);
if(top) {
String sideToJoin = getMissingSide(requiredConnectedSidesMap,connectedSide);
checkTopPieceSide(requiredConnectedSidesMap,getRightface().getUniqueID(), SideNames.RIGHT.name(),sideToJoin);
}
break;
case RIGHT:
// check top side but in anti-clock wise manner
checkTopSidePiece(requiredConnectedSidesMap,pieceToObserve, connectedSide, Keywords.ANTI_CLOCK_WISE);
// check bottom side but in anti-clock wise manner
checkBottomSidePiece(requiredConnectedSidesMap,pieceToObserve, connectedSide, Keywords.CLOCK_WISE);
if(top) {
String sideToJoin = getMissingSide(requiredConnectedSidesMap,connectedSide);
checkTopPieceSide(requiredConnectedSidesMap,getLeftface().getUniqueID(), SideNames.LEFT.name(),sideToJoin);
}
break;
default:
break;
}
return requiredConnectedSidesMap;
}
/**
* For a join to be valid, a piece must have conformity with the sides of
* already joined pieces, This function checks that conformity and update the map.
*
* @param requiredConnectedSidesMap
* @param sideToCheck
* @param direction
* @param connectedSide
*/
private void updateRequiredSidesMapForValidJoin(Map<PieceSide,String> requiredConnectedSidesMap, PieceSide sideToCheck,SideNames connectedSide,String direction){
// get the respective puzzle piece from side
PieceSideKey sideKey = sideToCheck.getConnectedSideKey();
PuzzlePiece puzzlePiece = puzzlePieceMap.get(sideKey.getPieceID());
if(puzzlePiece == null)
return;
// get the direction to check the side of already connected piece
String nextPieceDirection;
if(direction == Keywords.CLOCK_WISE)
nextPieceDirection = SideNames.getNextAdjacentSideClockWise(SideNames.findEnum(sideKey.getSideID()));
else
nextPieceDirection = SideNames.getNextAdjacentSideAntiClockWise(SideNames.findEnum(sideKey.getSideID()));
// finally get the side of already connected piece to be matched
PieceSide sideToMatch = puzzlePiece.getSideByID(nextPieceDirection);
if(!sideToMatch.isConnected())
if(direction == Keywords.CLOCK_WISE)
requiredConnectedSidesMap.put(sideToMatch, SideNames.getNextAdjacentSideAntiClockWise(connectedSide));
else
requiredConnectedSidesMap.put(sideToMatch, SideNames.getNextAdjacentSideClockWise(connectedSide));
}
/**
* Check the let side of piece under observation and get the required sides that should also be connected.
*
* @param requiredConnectedSidesMap
* @param pieceToObserve
* @param connectedSide
* @param direction
*/
public void checkLeftSidePiece( Map<PieceSide,String> requiredConnectedSidesMap, PuzzlePiece pieceToObserve, SideNames connectedSide, String direction){
if(pieceToObserve.getLeftSide().isConnected()) {
updateRequiredSidesMapForValidJoin(requiredConnectedSidesMap,pieceToObserve.getLeftSide(),connectedSide,direction);
}
}
/**
* Check the right side of piece under observation and get the required sides that should also be connected.
*
* @param requiredConnectedSidesMap
* @param pieceToObserve
* @param connectedSide
* @param direction
*/
public void checkRightSidePiece(Map<PieceSide,String> requiredConnectedSidesMap, PuzzlePiece pieceToObserve, SideNames connectedSide, String direction){
if(pieceToObserve.getRightSide().isConnected()) {
updateRequiredSidesMapForValidJoin(requiredConnectedSidesMap,pieceToObserve.getRightSide(),connectedSide,direction);
}
}
/**
* Check the top side of piece under observation and get the required sides that should also be connected.
*
* @param requiredConnectedSidesMap
* @param pieceToObserve
* @param connectedSide
* @param direction
*/
public void checkTopSidePiece(Map<PieceSide,String> requiredConnectedSidesMap, PuzzlePiece pieceToObserve, SideNames connectedSide, String direction){
if(pieceToObserve.getTopSide().isConnected()) {
updateRequiredSidesMapForValidJoin(requiredConnectedSidesMap,pieceToObserve.getTopSide(),connectedSide,direction);
}
}
/**
* Check the bottom side of piece under observation and get the required sides that should also be connected.
*
* @param requiredConnectedSidesMap
* @param pieceToObserve
* @param connectedSide
* @param direction
*/
public void checkBottomSidePiece(Map<PieceSide,String> requiredConnectedSidesMap, PuzzlePiece pieceToObserve, SideNames connectedSide, String direction){
if(pieceToObserve.getBottomSide().isConnected()) {
updateRequiredSidesMapForValidJoin(requiredConnectedSidesMap,pieceToObserve.getBottomSide(),connectedSide,direction);
}
}
/**
* Add piece in the cube based on the side of bottom piece to which new pieces is joined.
* If the piece is not joined to any of the side of the bottom piece, then its a top piece
*
* @param connectedSideName side of bottom piece to which it is connected
* @param piece newly joined puzzle piece
*/
public void addPiece(SideNames connectedSideName, PuzzlePiece piece ){
switch(connectedSideName){
case TOP:
setBackface(piece);
break;
case BOTTOM:
setFrontface(piece);
break;
case LEFT:
setLeftface(piece);
break;
case RIGHT:
setRightface(piece);
break;
default:
setTopface(piece);
break;
}
}
/**
* Find our the possible missing side for a puzzle piece
*
* @param requiredConnectedSidesMap map containing piece sides that must be joined
* @param connectedSide side of piece which is already connected
* @return name of missing side
*/
private String getMissingSide(Map<PieceSide,String> requiredConnectedSidesMap,SideNames connectedSide){
Collection<String> coll = requiredConnectedSidesMap.values();
List<String> list = new ArrayList<String>();
list.addAll( coll);
list.add(connectedSide.name());
if(!list.contains(SideNames.TOP.name()))
return SideNames.TOP.name();
else if(!list.contains(SideNames.BOTTOM.name()))
return SideNames.BOTTOM.name();
else if(!list.contains(SideNames.LEFT.name()))
return SideNames.LEFT.name();
else if(!list.contains(SideNames.RIGHT.name()))
return SideNames.RIGHT.name();
else
return null;
}
/**
* Check top piece side
*
* @param requiredConnectedSidesMap
* @param cubePieceID
* @param cubePieceSide
* @param topPieceSide
*/
private void checkTopPieceSide(Map<PieceSide,String> requiredConnectedSidesMap, String cubePieceID, String cubePieceSide, String topPieceSide){
PuzzlePiece piece = getPieceByID(cubePieceID);
requiredConnectedSidesMap.put(piece.getSideByID(cubePieceSide), topPieceSide);
}
public List<PieceSide> getFreeSidesInCompleteCubeExceptThisPiece(String pieceID){
List<PieceSide> freeSides = new ArrayList<PieceSide>();
for(String key: puzzlePieceMap.keySet()){
if(!key.equals(pieceID))
freeSides.addAll(puzzlePieceMap.get(key).getFreeSides());
}
return freeSides;
}
}
| |
package sjm.examples.query;
//import com.sun.java.swing.*;
//import com.sun.java.swing.border.*;
//import com.sun.java.swing.text.*;
import java.awt.BorderLayout;
import java.awt.Dimension;
import java.awt.event.InputEvent;
import java.awt.event.KeyEvent;
import javax.swing.JButton;
import javax.swing.JComponent;
import javax.swing.JPanel;
import javax.swing.JSplitPane;
import javax.swing.JTextArea;
import javax.swing.KeyStroke;
import javax.swing.border.EmptyBorder;
import sjm.utensil.SwingUtensil;
/*
* Copyright (c) 1999 Steven J. Metsker. All Rights Reserved.
*
* Steve Metsker makes no representations or warranties about
* the fitness of this software for any particular purpose,
* including the implied warranty of merchantability.
*/
/**
* This is a simple user environment (UE) for queries
* written in the Jaql language. This UE applies Jaql
* queries to the ChipSource axiom source.
* <p>
* This class contains just the methods that create Swing
* components, and uses a "mediator" to control how the
* components interact.
*
* @author Steven J. Metsker
*
* @version 1.0
*
* @see JaqlMediator
*/
public class JaqlUe {
protected JaqlMediator mediator;
protected JButton goButton;
protected JButton clearButton;
protected JTextArea metadataArea;
protected JTextArea queryArea;
protected JTextArea resultArea;
protected static int PREFERREDWIDTH = 600;
/*
* The panel for the "Go!" and "Clear" buttons.
*/
protected JPanel buttonPanel() {
JPanel p = new JPanel();
p.setLayout(new BorderLayout());
p.add(goButton(), "North");
p.add(clearButton(), "South");
p.setBorder(new EmptyBorder(10, 6, 5, 6));
return p;
}
/*
* The "Clear" button.
*/
protected JButton clearButton() {
if (clearButton == null) {
clearButton = new JButton("Clear");
clearButton.addActionListener(mediator());
clearButton.setFont(SwingUtensil.ideFont());
}
return clearButton;
}
/*
* The "Go!" button. This method also establishes "Ctrl-G"
* as a shortcut for pressing the button.
*/
protected JButton goButton() {
if (goButton == null) {
goButton = new JButton("Go!");
goButton.addActionListener(mediator());
goButton.setFont(SwingUtensil.ideFont());
// ctrl-g keystroke:
KeyStroke ctrlg = KeyStroke.getKeyStroke(
KeyEvent.VK_G, InputEvent.CTRL_MASK);
goButton.registerKeyboardAction(
mediator(),
ctrlg,
JComponent.WHEN_IN_FOCUSED_WINDOW);
}
return goButton;
}
/*
* The split pane that contains the query area and the
* result area.
*/
protected JSplitPane ioPane() {
Dimension min = new Dimension(PREFERREDWIDTH, 80);
Dimension pref = new Dimension(PREFERREDWIDTH, 180);
JPanel q = SwingUtensil.textPanel(
"Query", queryArea(), pref, min);
JPanel r = SwingUtensil.textPanel(
"Results", resultArea(), pref, min);
JSplitPane jsp = new JSplitPane(
JSplitPane.VERTICAL_SPLIT, false, q, r);
jsp.setDividerSize(3);
return jsp;
}
/**
* Launch the interactive development environment.
*/
public static void main(String[] args) {
sjm.utensil.SwingUtensil.launch(
new JaqlUe().mainPanel(), " Jaql and Chips");
}
/*
* The main panel, which contains all components.
*/
protected JPanel mainPanel() {
JPanel p = new JPanel();
p.setLayout(new BorderLayout());
p.add(upperPanel(), "Center");
p.add(metadataPanel(), "South");
return p;
}
/*
* The object that controls the component interactions.
*/
protected JaqlMediator mediator() {
if (mediator == null) {
mediator = new JaqlMediator();
mediator.initialize(
goButton(),
clearButton(),
queryArea(),
resultArea(),
metadataArea());
}
return mediator;
}
/*
* The metadata text area.
*/
protected JTextArea metadataArea() {
if (metadataArea == null) {
metadataArea = SwingUtensil.ideTextArea();
ChipSource cs = new ChipSource();
metadataArea.append(ChipSource.queryChip() + "\n");
metadataArea.append(ChipSource.queryCustomer() + "\n");
metadataArea.append(ChipSource.queryOrder() + "\n");
}
return metadataArea;
}
/*
* The panel that contains the metadata text area.
*/
protected JPanel metadataPanel() {
return SwingUtensil.textPanel(
"Metadata",
metadataArea(),
new Dimension(PREFERREDWIDTH, 120),
new Dimension(PREFERREDWIDTH, 80));
}
/*
* The input text area.
*/
protected JTextArea queryArea() {
if (queryArea == null) {
queryArea = SwingUtensil.ideTextArea();
queryArea.setText(
"select ChipName, PricePerBag from Chip \n" +
"where Oil != \"Sunflower\"");
}
return queryArea;
}
/*
* The output text area.
*/
protected JTextArea resultArea() {
if (resultArea == null) {
resultArea = SwingUtensil.ideTextArea();
}
return resultArea;
}
/*
* The panel that contains the query area, the result area
* and the buttons.
*/
protected JPanel upperPanel() {
JPanel p = new JPanel();
p.setLayout(new BorderLayout());
p.add(ioPane(), "Center");
p.add(buttonPanel(), "East");
return p;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package java.security.cert;
import java.security.AccessController;
import java.security.InvalidAlgorithmParameterException;
import java.security.NoSuchAlgorithmException;
import java.security.NoSuchProviderException;
import java.security.Provider;
import java.security.Security;
import java.util.Collection;
import org.apache.harmony.security.fortress.Engine;
/**
* This class provides the functionality to retrieve {@code Certificate}s and
* {@code CRL}s from a read-only repository. This repository may be very large
* and may store trusted as well as untrusted certificates.
*/
public class CertStore {
// Store spi implementation service name
private static final String SERVICE = "CertStore";
// Used to access common engine functionality
private static Engine engine = new Engine(SERVICE);
// Store default property name
private static final String PROPERTYNAME = "certstore.type";
// Default value of CertStore type. It returns if certpathbuild.type
// property is not defined in java.security file
private static final String DEFAULTPROPERTY = "LDAP";
// Store used provider
private final Provider provider;
// Store CertStoreSpi implementation
private final CertStoreSpi spiImpl;
// Store used type
private final String type;
// Store used parameters
private final CertStoreParameters certStoreParams;
/**
* Creates a new {@code CertStore} instance.
*
* @param storeSpi
* the implementation delegate.
* @param provider
* the security provider.
* @param type
* the certificate store type.
* @param params
* the certificate store parameters (may be {@code null}.
*/
protected CertStore(CertStoreSpi storeSpi, Provider provider, String type,
CertStoreParameters params) {
this.provider = provider;
this.type = type;
this.spiImpl = storeSpi;
this.certStoreParams = params;
}
/**
* Creates a new {@code CertStore} instance with the specified type and
* initialized with the specified parameters.
*
* @param type
* the certificate store type.
* @param params
* the certificate store parameters (may be {@code null}).
* @return the new certificate store instance.
* @throws NoSuchAlgorithmException
* if no provider can provide the specified certificate store
* type.
* @throws InvalidAlgorithmParameterException
* if the specified parameters cannot be used to initialize this
* certificate store instance.
* @throws NullPointerException if {@code type == null}
*/
public static CertStore getInstance(String type, CertStoreParameters params)
throws InvalidAlgorithmParameterException, NoSuchAlgorithmException {
if (type == null) {
throw new NullPointerException();
}
try {
synchronized (engine) {
engine.getInstance(type, params);
return new CertStore((CertStoreSpi) engine.spi, engine.provider,
type, params);
}
} catch (NoSuchAlgorithmException e) {
Throwable th = e.getCause();
if (th == null) {
throw e;
} else {
throw new InvalidAlgorithmParameterException(e.getMessage(), th);
}
}
}
/**
* Creates a new {@code CertStore} instance from the specified provider with
* the specified type and initialized with the specified parameters.
*
* @param type
* the certificate store type.
* @param params
* the certificate store parameters (may be {@code null}).
* @param provider
* the name of the provider.
* @return the new certificate store instance.
* @throws NoSuchAlgorithmException
* if the specified provider cannot provide the requested
* certificate store type.
* @throws NoSuchProviderException
* if no provider with the specified name can be found.
* @throws InvalidAlgorithmParameterException
* if the specified parameters cannot be used to initialize this
* certificate store instance.
* @throws IllegalArgumentException if {@code provider == null || provider.isEmpty()}
* @throws NullPointerException
* if {@code type} is {@code null}.
*/
public static CertStore getInstance(String type,
CertStoreParameters params, String provider)
throws InvalidAlgorithmParameterException,
NoSuchAlgorithmException, NoSuchProviderException {
if (provider == null || provider.isEmpty()) {
throw new IllegalArgumentException();
}
Provider impProvider = Security.getProvider(provider);
if (impProvider == null) {
throw new NoSuchProviderException(provider);
}
return getInstance(type, params, impProvider);
}
/**
* Creates a new {@code CertStore} instance from the specified provider with
* the specified type and initialized with the specified parameters.
* @param type
* the certificate store type.
* @param params
* the certificate store parameters (may be {@code null}).
* @param provider
* the name of the provider.
* @return the new certificate store instance.
* @throws NoSuchAlgorithmException
* if the specified provider cannot provide the requested
* certificate store type.
* @throws InvalidAlgorithmParameterException
* if the specified parameters cannot be used to initialize this
* certificate store instance.
* @throws IllegalArgumentException if {@code provider == null}
* @throws NullPointerException if {@code type == null}
*/
public static CertStore getInstance(String type,
CertStoreParameters params, Provider provider)
throws NoSuchAlgorithmException, InvalidAlgorithmParameterException {
if (provider == null) {
throw new IllegalArgumentException();
}
if (type == null) {
throw new NullPointerException();
}
try {
synchronized (engine) {
engine.getInstance(type, provider, params);
return new CertStore((CertStoreSpi) engine.spi, provider, type,
params);
}
} catch (NoSuchAlgorithmException e) {
Throwable th = e.getCause();
if (th == null) {
throw e;
} else {
throw new InvalidAlgorithmParameterException(e.getMessage(), th);
}
}
}
/**
* Returns the certificate store type.
*
* @return the certificate store type.
*/
public final String getType() {
return type;
}
/**
* Returns the security provider.
*
* @return the security provider.
*/
public final Provider getProvider() {
return provider;
}
/**
* Returns a copy of the certificate store parameters that were used to
* initialize this instance.
*
* @return a copy of the certificate store parameters or {@code null} if
* none were specified.
*/
public final CertStoreParameters getCertStoreParameters() {
if (certStoreParams == null) {
return null;
} else {
return (CertStoreParameters) certStoreParams.clone();
}
}
/**
* Returns the list of {@code Certificate}s for the specified {@code
* CertSelector} from this certificate store.
*
* @param selector
* the selector containing the criteria to search for
* certificates in this certificate store.
* @return the list of {@code Certificate}s that match the criteria of the
* specified selector.
* @throws CertStoreException
* if error(s) occur.
*/
public final Collection<? extends Certificate> getCertificates(CertSelector selector)
throws CertStoreException {
return spiImpl.engineGetCertificates(selector);
}
/**
* Returns the list of {@code CRL}s for the specified {@code CRLSelector}
* from this certificate store.
*
* @param selector
* the selector containing the criteria to search for certificate
* revocation lists in this store.
* @return the list of {@code CRL}s that match the criteria of the specified
* selector
* @throws CertStoreException
* if error(s) occur.
*/
public final Collection<? extends CRL> getCRLs(CRLSelector selector)
throws CertStoreException {
return spiImpl.engineGetCRLs(selector);
}
/**
* Returns the default {@code CertStore} type from the <i>Security
* Properties</i>.
*
* @return the default {@code CertStore} type from the <i>Security
* Properties</i>, or the string {@code "LDAP"} if it cannot be
* determined.
*/
public static final String getDefaultType() {
String defaultType = AccessController
.doPrivileged(new java.security.PrivilegedAction<String>() {
public String run() {
return Security.getProperty(PROPERTYNAME);
}
});
return (defaultType == null ? DEFAULTPROPERTY : defaultType);
}
}
| |
/*
* Copyright 2005 JBoss Inc
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.drools.guvnor.client.modeldriven.ui;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import org.drools.guvnor.client.common.DirtyableComposite;
import org.drools.guvnor.client.common.SmallLabel;
import org.drools.guvnor.client.common.ValueChanged;
import org.drools.guvnor.client.messages.Constants;
import org.drools.ide.common.client.modeldriven.SuggestionCompletionEngine;
import org.drools.ide.common.client.modeldriven.brl.DSLSentence;
import org.drools.ide.common.client.modeldriven.ui.ConstraintValueEditorHelper;
import com.google.gwt.core.client.GWT;
import com.google.gwt.event.dom.client.ChangeEvent;
import com.google.gwt.event.dom.client.ChangeHandler;
import com.google.gwt.event.dom.client.ClickEvent;
import com.google.gwt.event.dom.client.ClickHandler;
import com.google.gwt.user.client.Window;
import com.google.gwt.user.client.ui.Composite;
import com.google.gwt.user.client.ui.HTML;
import com.google.gwt.user.client.ui.HorizontalPanel;
import com.google.gwt.user.client.ui.Label;
import com.google.gwt.user.client.ui.ListBox;
import com.google.gwt.user.client.ui.TextBox;
import com.google.gwt.user.client.ui.VerticalPanel;
import com.google.gwt.user.client.ui.Widget;
/**
* This displays a widget to edit a DSL sentence.
*/
public class DSLSentenceWidget extends RuleModellerWidget {
private static final String ENUM_TAG = "ENUM";
private static final String DATE_TAG = "DATE";
private static final String BOOLEAN_TAG = "BOOLEAN";
private final List<Widget> widgets;
private final DSLSentence sentence;
private final VerticalPanel layout;
private HorizontalPanel currentRow;
private boolean readOnly;
public DSLSentenceWidget(RuleModeller modeller, DSLSentence sentence) {
this(modeller, sentence, null);
}
public DSLSentenceWidget(RuleModeller modeller, DSLSentence sentence, Boolean readOnly) {
super (modeller);
widgets = new ArrayList<Widget>();
this.sentence = sentence;
if (readOnly == null){
this.readOnly = false;
}else{
this.readOnly = readOnly;
}
this.layout = new VerticalPanel();
this.currentRow = new HorizontalPanel();
this.layout.add( currentRow );
this.layout.setCellWidth( currentRow,
"100%" );
this.layout.setWidth( "100%" );
if (this.readOnly) {
this.layout.addStyleName("editor-disabled-widget");
}
init();
}
private void init() {
makeWidgets( this.sentence.sentence );
initWidget( this.layout );
}
/**
* This will take a DSL line item, and split it into widget thingamies for
* displaying. One day, if this is too complex, this will have to be done on
* the server side.
*/
public void makeWidgets(String dslLine) {
int startVariable = dslLine.indexOf( "{" );
List<Widget> lineWidgets = new ArrayList<Widget>();
boolean firstOneIsBracket = (dslLine.indexOf( "{" ) == 0);
String startLabel = "";
if ( startVariable > 0 ) {
startLabel = dslLine.substring( 0,
startVariable );
} else if ( !firstOneIsBracket ) {
// There are no curly brackets in the text.
// Just print it
startLabel = dslLine;
}
Widget label = getLabel( startLabel );
lineWidgets.add( label );
while ( startVariable > 0 || firstOneIsBracket ) {
firstOneIsBracket = false;
int endVariable = dslLine.indexOf( "}",
startVariable );
String currVariable = dslLine.substring( startVariable + 1,
endVariable );
Widget varWidget = processVariable( currVariable );
lineWidgets.add( varWidget );
// Parse out the next label between variables
startVariable = dslLine.indexOf( "{",
endVariable );
String lbl;
if ( startVariable > 0 ) {
lbl = dslLine.substring( endVariable + 1,
startVariable );
} else {
lbl = dslLine.substring( endVariable + 1,
dslLine.length() );
}
if ( lbl.indexOf( "\\n" ) > -1 ) {
String[] lines = lbl.split( "\\\\n" );
for ( int i = 0; i < lines.length; i++ ) {
lineWidgets.add( new NewLine() );
lineWidgets.add( getLabel( lines[i] ) );
}
} else {
Widget currLabel = getLabel( lbl );
lineWidgets.add( currLabel );
}
}
for ( Widget widg : lineWidgets ) {
addWidget( widg );
}
updateSentence();
}
class NewLine extends Widget {
}
public Widget processVariable(String currVariable) {
Widget result = null;
// Formats are: <varName>:ENUM:<Field.type>
// <varName>:DATE:<dateFormat>
// <varName>:BOOLEAN:[checked | unchecked] <-initial value
if ( currVariable.contains( ":" ) ) {
if ( currVariable.contains( ":" + ENUM_TAG + ":" ) ) {
result = getEnumDropdown( currVariable );
} else if ( currVariable.contains( ":" + DATE_TAG + ":" ) ) {
result = getDateSelector( currVariable );
} else if ( currVariable.contains( ":" + BOOLEAN_TAG + ":" ) ) {
result = getCheckbox( currVariable );
} else {
String regex = currVariable.substring( currVariable.indexOf( ":" ) + 1,
currVariable.length() );
result = getBox( currVariable,
regex );
}
} else {
result = getBox( currVariable,
"" );
}
return result;
}
public Widget getEnumDropdown(String variableDef) {
Widget resultWidget = new DSLDropDown( variableDef );
return resultWidget;
}
public Widget getBox(String variableDef,
String regex) {
int colonIndex = variableDef.indexOf( ":" );
if ( colonIndex > 0 ) {
variableDef = variableDef.substring( 0,
colonIndex );
}
FieldEditor currentBox = new FieldEditor();
currentBox.setVisibleLength( variableDef.length() + 1 );
currentBox.setText( variableDef );
currentBox.setRestriction( regex );
return currentBox;
}
public Widget getCheckbox(String variableDef) {
return new DSLCheckBox( variableDef );
}
public Widget getDateSelector(String variableDef) {
String[] parts = variableDef.split( ":" + DATE_TAG + ":" );
return new DSLDateSelector( parts[0],
parts[1] );
}
public Widget getLabel(String labelDef) {
Label label = new SmallLabel();
label.setText( labelDef );
return label;
}
private void addWidget(Widget currentBox) {
if ( currentBox instanceof NewLine ) {
currentRow = new HorizontalPanel();
layout.add( currentRow );
layout.setCellWidth( currentRow,
"100%" );
} else {
currentRow.add( currentBox );
}
widgets.add( currentBox );
}
/**
* This will go through the widgets and build up a sentence.
*/
protected void updateSentence() {
String newSentence = "";
for ( Iterator<Widget> iter = widgets.iterator(); iter.hasNext(); ) {
Widget wid = iter.next();
if ( wid instanceof Label ) {
newSentence = newSentence + ((Label) wid).getText();
} else if ( wid instanceof FieldEditor ) {
FieldEditor editor = (FieldEditor) wid;
String varString = editor.getText();
String restriction = editor.getRestriction();
if ( !restriction.equals( "" ) ) {
varString = varString + ":" + restriction;
}
newSentence = newSentence + "{" + varString + "}";
} else if ( wid instanceof DSLDropDown ) {
// Add the meta-data back to the field so that is shows up as a
// dropdown when refreshed from repo
DSLDropDown drop = (DSLDropDown) wid;
ListBox box = drop.getListBox();
String type = drop.getType();
String factAndField = drop.getFactAndField();
newSentence = newSentence + "{" + box.getValue( box.getSelectedIndex() ) + ":" + type + ":" + factAndField + "} ";
} else if ( wid instanceof DSLCheckBox ) {
DSLCheckBox check = (DSLCheckBox) wid;
String checkValue = check.getCheckedValue();
newSentence = newSentence + "{" + checkValue + ":" + check.getType() + ":" + checkValue + "} ";
} else if ( wid instanceof DSLDateSelector ) {
DSLDateSelector dateSel = (DSLDateSelector) wid;
String dateString = dateSel.getDateString();
String format = dateSel.getVisualFormat();
newSentence = newSentence + "{" + dateString + ":" + dateSel.getType() + ":" + format + "} ";
} else if ( wid instanceof NewLine ) {
newSentence = newSentence + "\\n";
}
}
this.setModified(true);
this.sentence.sentence = newSentence.trim();
}
class FieldEditor extends DirtyableComposite {
private TextBox box;
private HorizontalPanel panel = new HorizontalPanel();
private String oldValue = "";
private String regex = "";
private Constants constants = ((Constants) GWT.create( Constants.class ));
public FieldEditor() {
box = new TextBox();
// box.setStyleName( "dsl-field-TextBox" );
panel.add( new HTML( " " ) );
panel.add( box );
panel.add( new HTML( " " ) );
box.addChangeHandler(new ChangeHandler() {
public void onChange(ChangeEvent event) {
TextBox otherBox = (TextBox) event.getSource();
if (!regex.equals("") && !otherBox.getText().matches(regex)) {
Window.alert(constants.TheValue0IsNotValidForThisField(otherBox.getText()));
box.setText(oldValue);
} else {
oldValue = otherBox.getText();
updateSentence();
makeDirty();
}
}
});
initWidget( panel );
}
public void setText(String t) {
box.setText( t );
}
public void setVisibleLength(int l) {
box.setVisibleLength( l );
}
public String getText() {
return box.getText();
}
public void setRestriction(String regex) {
this.regex = regex;
}
public String getRestriction() {
return this.regex;
}
public boolean isValid() {
boolean result = true;
if ( !regex.equals( "" ) ) result = this.box.getText().matches( this.regex );
return result;
}
}
class DSLDropDown extends DirtyableComposite {
final SuggestionCompletionEngine completions = getModeller().getSuggestionCompletions();
ListBox resultWidget = null;
// Format for the dropdown def is <varName>:<type>:<Fact.field>
private String varName = "";
private String type = "";
private String factAndField = "";
public DSLDropDown(String variableDef) {
int firstIndex = variableDef.indexOf( ":" );
int lastIndex = variableDef.lastIndexOf( ":" );
varName = variableDef.substring( 0,
firstIndex );
type = variableDef.substring( firstIndex + 1,
lastIndex );
factAndField = variableDef.substring( lastIndex + 1,
variableDef.length() );
int dotIndex = factAndField.indexOf( "." );
String type = factAndField.substring( 0,
dotIndex );
String field = factAndField.substring( dotIndex + 1,
factAndField.length() );
String[] data = completions.getEnumValues( type,
field );
ListBox list = new ListBox();
if ( data != null ) {
int selected = -1;
for ( int i = 0; i < data.length; i++ ) {
String realValue = data[i];
String display = data[i];
if ( data[i].indexOf( '=' ) > -1 ) {
String[] vs = ConstraintValueEditorHelper.splitValue( data[i] );
realValue = vs[0];
display = vs[1];
}
if ( varName.equals( realValue ) ) {
selected = i;
}
list.addItem( display,
realValue );
}
if ( selected >= 0 ) list.setSelectedIndex( selected );
}
list.addChangeHandler(new ChangeHandler() {
public void onChange(ChangeEvent event) {
updateSentence();
makeDirty();
}
});
initWidget( list );
resultWidget = list;
}
public ListBox getListBox() {
return resultWidget;
}
public void setListBox(ListBox resultWidget) {
this.resultWidget = resultWidget;
}
public String getType() {
return type;
}
public void setType(String type) {
this.type = type;
}
public String getFactAndField() {
return factAndField;
}
public void setFactAndField(String factAndField) {
this.factAndField = factAndField;
}
}
class DSLCheckBox extends Composite {
ListBox resultWidget = null;
// Format for the dropdown def is <varName>:<type>:<Fact.field>
private String varName = "";
public DSLCheckBox(String variableDef) {
int firstIndex = variableDef.indexOf( ":" );
int lastIndex = variableDef.lastIndexOf( ":" );
varName = variableDef.substring( 0,
firstIndex );
String checkedUnchecked = variableDef.substring( lastIndex + 1,
variableDef.length() );
resultWidget = new ListBox();
resultWidget.addItem( "true" );
resultWidget.addItem( "false" );
if ( checkedUnchecked.equalsIgnoreCase( "true" ) ) {
resultWidget.setSelectedIndex( 0 );
} else {
resultWidget.setSelectedIndex( 1 );
}
resultWidget.addClickHandler(new ClickHandler() {
public void onClick(ClickEvent event) {
updateSentence();
}
});
resultWidget.setVisible( true );
initWidget( resultWidget );
}
public ListBox getListBox() {
return resultWidget;
}
public void setListBox(ListBox resultWidget) {
this.resultWidget = resultWidget;
}
public String getType() {
return BOOLEAN_TAG;
}
public String getVarName() {
return varName;
}
public void setVarName(String varName) {
this.varName = varName;
}
public String getCheckedValue() {
return this.resultWidget.getSelectedIndex() == 0 ? "true" : "false";
}
}
class DSLDateSelector extends DatePickerLabel {
public DSLDateSelector(String selectedDate,
String dateFormat) {
super( selectedDate,
dateFormat );
addValueChanged( new ValueChanged() {
public void valueChanged(String newValue) {
updateSentence();
}
} );
}
public String getType() {
return DATE_TAG;
}
}
@Override
public boolean isReadOnly() {
return this.readOnly;
}
}
| |
/*
* Licensed to ElasticSearch and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. ElasticSearch licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.mapper.core;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.Fieldable;
import org.apache.lucene.index.FieldInfo;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.*;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.common.lucene.search.TermFilter;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.analysis.NamedAnalyzer;
import org.elasticsearch.index.field.data.FieldDataType;
import org.elasticsearch.index.mapper.*;
import org.elasticsearch.index.query.QueryParseContext;
import java.io.IOException;
/**
*
*/
public abstract class AbstractFieldMapper<T> implements FieldMapper<T>, Mapper {
public static class Defaults {
public static final Field.Index INDEX = Field.Index.ANALYZED;
public static final Field.Store STORE = Field.Store.NO;
public static final Field.TermVector TERM_VECTOR = Field.TermVector.NO;
public static final float BOOST = 1.0f;
public static final boolean OMIT_NORMS = false;
public static final boolean OMIT_TERM_FREQ_AND_POSITIONS = false;
}
public abstract static class OpenBuilder<T extends Builder, Y extends AbstractFieldMapper> extends AbstractFieldMapper.Builder<T, Y> {
protected OpenBuilder(String name) {
super(name);
}
@Override
public T index(Field.Index index) {
return super.index(index);
}
@Override
public T store(Field.Store store) {
return super.store(store);
}
@Override
public T termVector(Field.TermVector termVector) {
return super.termVector(termVector);
}
@Override
public T boost(float boost) {
return super.boost(boost);
}
@Override
public T omitNorms(boolean omitNorms) {
return super.omitNorms(omitNorms);
}
@Override
public T omitTermFreqAndPositions(boolean omitTermFreqAndPositions) {
return super.omitTermFreqAndPositions(omitTermFreqAndPositions);
}
@Override
public T indexName(String indexName) {
return super.indexName(indexName);
}
@Override
public T indexAnalyzer(NamedAnalyzer indexAnalyzer) {
return super.indexAnalyzer(indexAnalyzer);
}
@Override
public T searchAnalyzer(NamedAnalyzer searchAnalyzer) {
return super.searchAnalyzer(searchAnalyzer);
}
}
public abstract static class Builder<T extends Builder, Y extends AbstractFieldMapper> extends Mapper.Builder<T, Y> {
protected Field.Index index = Defaults.INDEX;
protected Field.Store store = Defaults.STORE;
protected Field.TermVector termVector = Defaults.TERM_VECTOR;
protected float boost = Defaults.BOOST;
protected boolean omitNorms = Defaults.OMIT_NORMS;
protected boolean omitTermFreqAndPositions = Defaults.OMIT_TERM_FREQ_AND_POSITIONS;
protected String indexName;
protected NamedAnalyzer indexAnalyzer;
protected NamedAnalyzer searchAnalyzer;
protected Boolean includeInAll;
protected Builder(String name) {
super(name);
}
protected T index(Field.Index index) {
this.index = index;
return builder;
}
protected T store(Field.Store store) {
this.store = store;
return builder;
}
protected T termVector(Field.TermVector termVector) {
this.termVector = termVector;
return builder;
}
protected T boost(float boost) {
this.boost = boost;
return builder;
}
protected T omitNorms(boolean omitNorms) {
this.omitNorms = omitNorms;
return builder;
}
protected T omitTermFreqAndPositions(boolean omitTermFreqAndPositions) {
this.omitTermFreqAndPositions = omitTermFreqAndPositions;
return builder;
}
protected T indexName(String indexName) {
this.indexName = indexName;
return builder;
}
protected T indexAnalyzer(NamedAnalyzer indexAnalyzer) {
this.indexAnalyzer = indexAnalyzer;
if (this.searchAnalyzer == null) {
this.searchAnalyzer = indexAnalyzer;
}
return builder;
}
protected T searchAnalyzer(NamedAnalyzer searchAnalyzer) {
this.searchAnalyzer = searchAnalyzer;
return builder;
}
protected T includeInAll(Boolean includeInAll) {
this.includeInAll = includeInAll;
return builder;
}
protected Names buildNames(BuilderContext context) {
return new Names(name, buildIndexName(context), indexName == null ? name : indexName, buildFullName(context), context.path().sourcePath());
}
protected String buildIndexName(BuilderContext context) {
String actualIndexName = indexName == null ? name : indexName;
return context.path().pathAsText(actualIndexName);
}
protected String buildFullName(BuilderContext context) {
return context.path().fullPathAsText(name);
}
}
protected final Names names;
protected final Field.Index index;
protected final Field.Store store;
protected final Field.TermVector termVector;
protected float boost;
protected final boolean omitNorms;
protected final boolean omitTermFreqAndPositions;
protected final FieldInfo.IndexOptions indexOptions;
protected final NamedAnalyzer indexAnalyzer;
protected final NamedAnalyzer searchAnalyzer;
protected AbstractFieldMapper(Names names, Field.Index index, Field.Store store, Field.TermVector termVector,
float boost, boolean omitNorms, boolean omitTermFreqAndPositions, NamedAnalyzer indexAnalyzer, NamedAnalyzer searchAnalyzer) {
this.names = names;
this.index = index;
this.store = store;
this.termVector = termVector;
this.boost = boost;
this.omitNorms = omitNorms;
this.omitTermFreqAndPositions = omitTermFreqAndPositions;
this.indexOptions = omitTermFreqAndPositions ? FieldInfo.IndexOptions.DOCS_ONLY : FieldInfo.IndexOptions.DOCS_AND_FREQS_AND_POSITIONS;
if (indexAnalyzer == null && !index.isAnalyzed()) {
this.indexAnalyzer = Lucene.KEYWORD_ANALYZER;
} else {
this.indexAnalyzer = indexAnalyzer;
}
if (searchAnalyzer == null && !index.isAnalyzed()) {
this.searchAnalyzer = Lucene.KEYWORD_ANALYZER;
} else {
this.searchAnalyzer = searchAnalyzer;
}
}
@Override
public String name() {
return names.name();
}
@Override
public Names names() {
return this.names;
}
@Override
public Field.Index index() {
return this.index;
}
@Override
public Field.Store store() {
return this.store;
}
@Override
public boolean stored() {
return store == Field.Store.YES;
}
@Override
public boolean indexed() {
return index != Field.Index.NO;
}
@Override
public boolean analyzed() {
return index == Field.Index.ANALYZED;
}
@Override
public Field.TermVector termVector() {
return this.termVector;
}
@Override
public float boost() {
return this.boost;
}
@Override
public boolean omitNorms() {
return this.omitNorms;
}
@Override
public boolean omitTermFreqAndPositions() {
return this.omitTermFreqAndPositions;
}
@Override
public Analyzer indexAnalyzer() {
return this.indexAnalyzer;
}
@Override
public Analyzer searchAnalyzer() {
return this.searchAnalyzer;
}
@Override
public void parse(ParseContext context) throws IOException {
try {
Fieldable field = parseCreateField(context);
if (field == null) {
return;
}
field.setOmitNorms(omitNorms);
field.setIndexOptions(indexOptions);
if (!customBoost()) {
field.setBoost(boost);
}
if (context.listener().beforeFieldAdded(this, field, context)) {
context.doc().add(field);
}
} catch (Exception e) {
throw new MapperParsingException("Failed to parse [" + names.fullName() + "]", e);
}
}
protected abstract Fieldable parseCreateField(ParseContext context) throws IOException;
/**
* Derived classes can override it to specify that boost value is set by derived classes.
*/
protected boolean customBoost() {
return false;
}
@Override
public void traverse(FieldMapperListener fieldMapperListener) {
fieldMapperListener.fieldMapper(this);
}
@Override
public void traverse(ObjectMapperListener objectMapperListener) {
// nothing to do here...
}
@Override
public Object valueForSearch(Fieldable field) {
return valueAsString(field);
}
@Override
public String indexedValue(String value) {
return value;
}
@Override
public Query queryStringTermQuery(Term term) {
return null;
}
@Override
public boolean useFieldQueryWithQueryString() {
return false;
}
@Override
public Query fieldQuery(String value, @Nullable QueryParseContext context) {
return new TermQuery(names().createIndexNameTerm(indexedValue(value)));
}
@Override
public Filter fieldFilter(String value, @Nullable QueryParseContext context) {
return new TermFilter(names().createIndexNameTerm(indexedValue(value)));
}
@Override
public Query fuzzyQuery(String value, String minSim, int prefixLength, int maxExpansions) {
return new FuzzyQuery(names().createIndexNameTerm(indexedValue(value)), Float.parseFloat(minSim), prefixLength, maxExpansions);
}
@Override
public Query fuzzyQuery(String value, double minSim, int prefixLength, int maxExpansions) {
return new FuzzyQuery(names().createIndexNameTerm(value), (float) minSim, prefixLength, maxExpansions);
}
@Override
public Query prefixQuery(String value, @Nullable MultiTermQuery.RewriteMethod method, @Nullable QueryParseContext context) {
PrefixQuery query = new PrefixQuery(names().createIndexNameTerm(indexedValue(value)));
if (method != null) {
query.setRewriteMethod(method);
}
return query;
}
@Override
public Filter prefixFilter(String value, @Nullable QueryParseContext context) {
return new PrefixFilter(names().createIndexNameTerm(indexedValue(value)));
}
@Override
public Query rangeQuery(String lowerTerm, String upperTerm, boolean includeLower, boolean includeUpper, @Nullable QueryParseContext context) {
return new TermRangeQuery(names.indexName(),
lowerTerm == null ? null : indexedValue(lowerTerm),
upperTerm == null ? null : indexedValue(upperTerm),
includeLower, includeUpper);
}
@Override
public Filter rangeFilter(String lowerTerm, String upperTerm, boolean includeLower, boolean includeUpper, @Nullable QueryParseContext context) {
return new TermRangeFilter(names.indexName(),
lowerTerm == null ? null : indexedValue(lowerTerm),
upperTerm == null ? null : indexedValue(upperTerm),
includeLower, includeUpper);
}
@Override
public void merge(Mapper mergeWith, MergeContext mergeContext) throws MergeMappingException {
if (!this.getClass().equals(mergeWith.getClass())) {
String mergedType = mergeWith.getClass().getSimpleName();
if (mergeWith instanceof AbstractFieldMapper) {
mergedType = ((AbstractFieldMapper) mergeWith).contentType();
}
mergeContext.addConflict("mapper [" + names.fullName() + "] of different type, current_type [" + contentType() + "], merged_type [" + mergedType + "]");
// different types, return
return;
}
AbstractFieldMapper fieldMergeWith = (AbstractFieldMapper) mergeWith;
if (!this.index.equals(fieldMergeWith.index)) {
mergeContext.addConflict("mapper [" + names.fullName() + "] has different index values");
}
if (!this.store.equals(fieldMergeWith.store)) {
mergeContext.addConflict("mapper [" + names.fullName() + "] has different store values");
}
if (!this.termVector.equals(fieldMergeWith.termVector)) {
mergeContext.addConflict("mapper [" + names.fullName() + "] has different term_vector values");
}
if (this.indexAnalyzer == null) {
if (fieldMergeWith.indexAnalyzer != null) {
mergeContext.addConflict("mapper [" + names.fullName() + "] has different index_analyzer");
}
} else if (fieldMergeWith.indexAnalyzer == null) {
mergeContext.addConflict("mapper [" + names.fullName() + "] has different index_analyzer");
} else if (!this.indexAnalyzer.name().equals(fieldMergeWith.indexAnalyzer.name())) {
mergeContext.addConflict("mapper [" + names.fullName() + "] has different index_analyzer");
}
if (this.searchAnalyzer == null) {
if (fieldMergeWith.searchAnalyzer != null) {
mergeContext.addConflict("mapper [" + names.fullName() + "] has different search_analyzer");
}
} else if (fieldMergeWith.searchAnalyzer == null) {
mergeContext.addConflict("mapper [" + names.fullName() + "] has different search_analyzer");
} else if (!this.searchAnalyzer.name().equals(fieldMergeWith.searchAnalyzer.name())) {
mergeContext.addConflict("mapper [" + names.fullName() + "] has different search_analyzer");
}
if (!mergeContext.mergeFlags().simulate()) {
// apply changeable values
this.boost = fieldMergeWith.boost;
}
}
@Override
public FieldDataType fieldDataType() {
return FieldDataType.DefaultTypes.STRING;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(names.name());
doXContentBody(builder);
builder.endObject();
return builder;
}
protected void doXContentBody(XContentBuilder builder) throws IOException {
builder.field("type", contentType());
if (!names.name().equals(names.indexNameClean())) {
builder.field("index_name", names.indexNameClean());
}
if (boost != 1.0f) {
builder.field("boost", boost);
}
if (indexAnalyzer != null && searchAnalyzer != null && indexAnalyzer.name().equals(searchAnalyzer.name()) && !indexAnalyzer.name().startsWith("_") && !indexAnalyzer.name().equals("default")) {
// same analyzers, output it once
builder.field("analyzer", indexAnalyzer.name());
} else {
if (indexAnalyzer != null && !indexAnalyzer.name().startsWith("_") && !indexAnalyzer.name().equals("default")) {
builder.field("index_analyzer", indexAnalyzer.name());
}
if (searchAnalyzer != null && !searchAnalyzer.name().startsWith("_") && !searchAnalyzer.name().equals("default")) {
builder.field("search_analyzer", searchAnalyzer.name());
}
}
}
protected abstract String contentType();
@Override
public void close() {
// nothing to do here, sub classes to override if needed
}
}
| |
/**
*/
package de.uni_hildesheim.sse.ivml.impl;
import de.uni_hildesheim.sse.ivml.EqualityExpression;
import de.uni_hildesheim.sse.ivml.IvmlPackage;
import de.uni_hildesheim.sse.ivml.LogicalExpressionPart;
import org.eclipse.emf.common.notify.Notification;
import org.eclipse.emf.common.notify.NotificationChain;
import org.eclipse.emf.ecore.EClass;
import org.eclipse.emf.ecore.InternalEObject;
import org.eclipse.emf.ecore.impl.ENotificationImpl;
import org.eclipse.emf.ecore.impl.MinimalEObjectImpl;
/**
* <!-- begin-user-doc -->
* An implementation of the model object '<em><b>Logical Expression Part</b></em>'.
* <!-- end-user-doc -->
* <p>
* The following features are implemented:
* </p>
* <ul>
* <li>{@link de.uni_hildesheim.sse.ivml.impl.LogicalExpressionPartImpl#getOp <em>Op</em>}</li>
* <li>{@link de.uni_hildesheim.sse.ivml.impl.LogicalExpressionPartImpl#getEx <em>Ex</em>}</li>
* </ul>
*
* @generated
*/
public class LogicalExpressionPartImpl extends MinimalEObjectImpl.Container implements LogicalExpressionPart
{
/**
* The default value of the '{@link #getOp() <em>Op</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getOp()
* @generated
* @ordered
*/
protected static final String OP_EDEFAULT = null;
/**
* The cached value of the '{@link #getOp() <em>Op</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getOp()
* @generated
* @ordered
*/
protected String op = OP_EDEFAULT;
/**
* The cached value of the '{@link #getEx() <em>Ex</em>}' containment reference.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getEx()
* @generated
* @ordered
*/
protected EqualityExpression ex;
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected LogicalExpressionPartImpl()
{
super();
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
protected EClass eStaticClass()
{
return IvmlPackage.Literals.LOGICAL_EXPRESSION_PART;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public String getOp()
{
return op;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public void setOp(String newOp)
{
String oldOp = op;
op = newOp;
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, IvmlPackage.LOGICAL_EXPRESSION_PART__OP, oldOp, op));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public EqualityExpression getEx()
{
return ex;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public NotificationChain basicSetEx(EqualityExpression newEx, NotificationChain msgs)
{
EqualityExpression oldEx = ex;
ex = newEx;
if (eNotificationRequired())
{
ENotificationImpl notification = new ENotificationImpl(this, Notification.SET, IvmlPackage.LOGICAL_EXPRESSION_PART__EX, oldEx, newEx);
if (msgs == null) msgs = notification; else msgs.add(notification);
}
return msgs;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public void setEx(EqualityExpression newEx)
{
if (newEx != ex)
{
NotificationChain msgs = null;
if (ex != null)
msgs = ((InternalEObject)ex).eInverseRemove(this, EOPPOSITE_FEATURE_BASE - IvmlPackage.LOGICAL_EXPRESSION_PART__EX, null, msgs);
if (newEx != null)
msgs = ((InternalEObject)newEx).eInverseAdd(this, EOPPOSITE_FEATURE_BASE - IvmlPackage.LOGICAL_EXPRESSION_PART__EX, null, msgs);
msgs = basicSetEx(newEx, msgs);
if (msgs != null) msgs.dispatch();
}
else if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, IvmlPackage.LOGICAL_EXPRESSION_PART__EX, newEx, newEx));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public NotificationChain eInverseRemove(InternalEObject otherEnd, int featureID, NotificationChain msgs)
{
switch (featureID)
{
case IvmlPackage.LOGICAL_EXPRESSION_PART__EX:
return basicSetEx(null, msgs);
}
return super.eInverseRemove(otherEnd, featureID, msgs);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public Object eGet(int featureID, boolean resolve, boolean coreType)
{
switch (featureID)
{
case IvmlPackage.LOGICAL_EXPRESSION_PART__OP:
return getOp();
case IvmlPackage.LOGICAL_EXPRESSION_PART__EX:
return getEx();
}
return super.eGet(featureID, resolve, coreType);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public void eSet(int featureID, Object newValue)
{
switch (featureID)
{
case IvmlPackage.LOGICAL_EXPRESSION_PART__OP:
setOp((String)newValue);
return;
case IvmlPackage.LOGICAL_EXPRESSION_PART__EX:
setEx((EqualityExpression)newValue);
return;
}
super.eSet(featureID, newValue);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public void eUnset(int featureID)
{
switch (featureID)
{
case IvmlPackage.LOGICAL_EXPRESSION_PART__OP:
setOp(OP_EDEFAULT);
return;
case IvmlPackage.LOGICAL_EXPRESSION_PART__EX:
setEx((EqualityExpression)null);
return;
}
super.eUnset(featureID);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public boolean eIsSet(int featureID)
{
switch (featureID)
{
case IvmlPackage.LOGICAL_EXPRESSION_PART__OP:
return OP_EDEFAULT == null ? op != null : !OP_EDEFAULT.equals(op);
case IvmlPackage.LOGICAL_EXPRESSION_PART__EX:
return ex != null;
}
return super.eIsSet(featureID);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public String toString()
{
if (eIsProxy()) return super.toString();
StringBuilder result = new StringBuilder(super.toString());
result.append(" (op: ");
result.append(op);
result.append(')');
return result.toString();
}
} //LogicalExpressionPartImpl
| |
/*
* Zed Attack Proxy (ZAP) and its related class files.
*
* ZAP is an HTTP/HTTPS proxy for assessing web application security.
*
* Copyright 2013 The ZAP Development Team
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.zaproxy.zap.extension.cmss;
import java.awt.BorderLayout;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.util.ArrayList;
import javax.swing.JButton;
import javax.swing.JCheckBox;
import javax.swing.JFrame;
import javax.swing.JLayeredPane;
import javax.swing.JPanel;
import javax.swing.border.EmptyBorder;
public class WhatToFingerPrintFrame extends JFrame implements ActionListener {
/** */
private static final long serialVersionUID = 1L;
private JPanel contentPane;
// we save in this list all checkBoses then use them to get web apps category names and then
// category numbers
private ArrayList<javax.swing.JCheckBox> checkBoxesList =
new ArrayList<javax.swing.JCheckBox>();
ArrayList<String> WhatToFingerprint = new ArrayList<String>();
/** Create the frame. */
public WhatToFingerPrintFrame() {
setDefaultCloseOperation(JFrame.HIDE_ON_CLOSE);
setBounds(100, 100, 768, 300);
setResizable(false);
contentPane = new JPanel();
contentPane.setBorder(new EmptyBorder(5, 5, 5, 5));
contentPane.setLayout(new BorderLayout(0, 0));
setContentPane(contentPane);
JLayeredPane layeredPane = new JLayeredPane();
contentPane.add(layeredPane, BorderLayout.CENTER);
JCheckBox chckbxDatabasemanagers = new JCheckBox("database-managers");
chckbxDatabasemanagers.setBounds(18, 7, 122, 23);
layeredPane.add(chckbxDatabasemanagers);
JCheckBox chckbxDocumentationtools = new JCheckBox("documentation-tools");
chckbxDocumentationtools.setBounds(566, 7, 137, 23);
layeredPane.add(chckbxDocumentationtools);
JCheckBox chckbxNewCheckBox = new JCheckBox("widgets");
chckbxNewCheckBox.setBounds(18, 56, 116, 23);
layeredPane.add(chckbxNewCheckBox);
JCheckBox chckbxWebshops = new JCheckBox("web-shops");
chckbxWebshops.setBounds(18, 82, 116, 23);
layeredPane.add(chckbxWebshops);
JCheckBox chckbxNewCheckBox_1 = new JCheckBox("photo-galleries");
chckbxNewCheckBox_1.setBounds(18, 107, 116, 23);
layeredPane.add(chckbxNewCheckBox_1);
JCheckBox chckbxNewCheckBox_2 = new JCheckBox("wikis");
chckbxNewCheckBox_2.setBounds(18, 133, 116, 23);
layeredPane.add(chckbxNewCheckBox_2);
JCheckBox chckbxNewCheckBox_3 = new JCheckBox("hosting-panels");
chckbxNewCheckBox_3.setBounds(18, 159, 116, 23);
layeredPane.add(chckbxNewCheckBox_3);
JCheckBox chckbxNewCheckBox_4 = new JCheckBox("analytics");
chckbxNewCheckBox_4.setBounds(18, 185, 116, 23);
layeredPane.add(chckbxNewCheckBox_4);
JCheckBox chckbxNewCheckBox_5 = new JCheckBox("blogs");
chckbxNewCheckBox_5.setBounds(142, 7, 116, 23);
layeredPane.add(chckbxNewCheckBox_5);
JCheckBox chckbxNewCheckBox_6 = new JCheckBox("issue-trackers");
chckbxNewCheckBox_6.setBounds(142, 30, 116, 23);
layeredPane.add(chckbxNewCheckBox_6);
JCheckBox chckbxNewCheckBox_7 = new JCheckBox("video-players");
chckbxNewCheckBox_7.setBounds(142, 56, 116, 23);
layeredPane.add(chckbxNewCheckBox_7);
JCheckBox chckbxNewCheckBox_8 = new JCheckBox("comment-systems");
chckbxNewCheckBox_8.setBounds(142, 82, 116, 23);
layeredPane.add(chckbxNewCheckBox_8);
JCheckBox chckbxNewCheckBox_9 = new JCheckBox("captchas");
chckbxNewCheckBox_9.setBounds(142, 107, 116, 23);
layeredPane.add(chckbxNewCheckBox_9);
JCheckBox chckbxNewCheckBox_10 = new JCheckBox("font-scripts");
chckbxNewCheckBox_10.setBounds(142, 133, 116, 23);
layeredPane.add(chckbxNewCheckBox_10);
JCheckBox chckbxNewCheckBox_11 = new JCheckBox("miscellaneous");
chckbxNewCheckBox_11.setBounds(142, 159, 116, 23);
layeredPane.add(chckbxNewCheckBox_11);
JCheckBox chckbxNewCheckBox_12 = new JCheckBox("editors");
chckbxNewCheckBox_12.setBounds(142, 185, 116, 23);
layeredPane.add(chckbxNewCheckBox_12);
JCheckBox chckbxNewCheckBox_13 = new JCheckBox("lms");
chckbxNewCheckBox_13.setBounds(276, 7, 114, 23);
layeredPane.add(chckbxNewCheckBox_13);
JCheckBox chckbxNewCheckBox_14 = new JCheckBox("cache-tools");
chckbxNewCheckBox_14.setBounds(276, 30, 114, 23);
layeredPane.add(chckbxNewCheckBox_14);
JCheckBox chckbxNewCheckBox_15 = new JCheckBox("rich-text-editors");
chckbxNewCheckBox_15.setBounds(276, 56, 114, 23);
layeredPane.add(chckbxNewCheckBox_15);
JCheckBox chckbxNewCheckBox_16 = new JCheckBox("javascript-graphics");
chckbxNewCheckBox_16.setBounds(276, 82, 122, 23);
layeredPane.add(chckbxNewCheckBox_16);
JCheckBox chckbxNewCheckBox_17 = new JCheckBox("mobile-frameworks");
chckbxNewCheckBox_17.setBounds(276, 107, 122, 23);
layeredPane.add(chckbxNewCheckBox_17);
JCheckBox chckbxNewCheckBox_18 = new JCheckBox("programming-languages");
chckbxNewCheckBox_18.setBounds(427, 185, 137, 23);
layeredPane.add(chckbxNewCheckBox_18);
JCheckBox chckbxNewCheckBox_19 = new JCheckBox("operating-systems");
chckbxNewCheckBox_19.setBounds(276, 159, 114, 23);
layeredPane.add(chckbxNewCheckBox_19);
JCheckBox chckbxNewCheckBox_20 = new JCheckBox("search-engines");
chckbxNewCheckBox_20.setBounds(276, 185, 114, 23);
layeredPane.add(chckbxNewCheckBox_20);
JCheckBox chckbxNewCheckBox_21 = new JCheckBox("cdn");
chckbxNewCheckBox_21.setBounds(427, 7, 137, 23);
layeredPane.add(chckbxNewCheckBox_21);
JCheckBox chckbxNewCheckBox_22 = new JCheckBox("marketing-automation");
chckbxNewCheckBox_22.setBounds(427, 30, 137, 23);
layeredPane.add(chckbxNewCheckBox_22);
JCheckBox chckbxNewCheckBox_23 = new JCheckBox("web-server-extensions");
chckbxNewCheckBox_23.setBounds(427, 56, 137, 23);
layeredPane.add(chckbxNewCheckBox_23);
JCheckBox chckbxNewCheckBox_24 = new JCheckBox("maps");
chckbxNewCheckBox_24.setBounds(427, 82, 137, 23);
layeredPane.add(chckbxNewCheckBox_24);
JCheckBox chckbxNewCheckBox_25 = new JCheckBox("advertising-networks");
chckbxNewCheckBox_25.setBounds(427, 107, 137, 23);
layeredPane.add(chckbxNewCheckBox_25);
JCheckBox chckbxNewCheckBox_26 = new JCheckBox("network-devices");
chckbxNewCheckBox_26.setBounds(427, 133, 137, 23);
layeredPane.add(chckbxNewCheckBox_26);
JCheckBox chckbxNewCheckBox_27 = new JCheckBox("media-servers");
chckbxNewCheckBox_27.setBounds(427, 159, 137, 23);
layeredPane.add(chckbxNewCheckBox_27);
JCheckBox chckbxNewCheckBox_28 = new JCheckBox("webcams");
chckbxNewCheckBox_28.setBounds(276, 133, 97, 23);
layeredPane.add(chckbxNewCheckBox_28);
JCheckBox chckbxNewCheckBox_29 = new JCheckBox("printers");
chckbxNewCheckBox_29.setBounds(18, 30, 116, 23);
layeredPane.add(chckbxNewCheckBox_29);
JButton btnOk = new JButton("OK");
btnOk.addActionListener(this);
btnOk.setBounds(615, 217, 97, 23);
layeredPane.add(btnOk);
checkBoxesList.add(chckbxNewCheckBox_1);
checkBoxesList.add(chckbxNewCheckBox_2);
checkBoxesList.add(chckbxNewCheckBox_3);
checkBoxesList.add(chckbxNewCheckBox_4);
checkBoxesList.add(chckbxNewCheckBox_5);
checkBoxesList.add(chckbxNewCheckBox_6);
checkBoxesList.add(chckbxNewCheckBox_7);
checkBoxesList.add(chckbxNewCheckBox_8);
checkBoxesList.add(chckbxNewCheckBox_9);
checkBoxesList.add(chckbxNewCheckBox_10);
checkBoxesList.add(chckbxNewCheckBox_11);
checkBoxesList.add(chckbxNewCheckBox_12);
checkBoxesList.add(chckbxNewCheckBox_13);
checkBoxesList.add(chckbxNewCheckBox_14);
checkBoxesList.add(chckbxNewCheckBox_15);
checkBoxesList.add(chckbxNewCheckBox_16);
checkBoxesList.add(chckbxNewCheckBox_17);
checkBoxesList.add(chckbxNewCheckBox_18);
checkBoxesList.add(chckbxNewCheckBox_19);
checkBoxesList.add(chckbxNewCheckBox_20);
checkBoxesList.add(chckbxNewCheckBox_21);
checkBoxesList.add(chckbxNewCheckBox_22);
checkBoxesList.add(chckbxNewCheckBox_23);
checkBoxesList.add(chckbxNewCheckBox_24);
checkBoxesList.add(chckbxNewCheckBox_25);
checkBoxesList.add(chckbxNewCheckBox_26);
checkBoxesList.add(chckbxNewCheckBox_27);
checkBoxesList.add(chckbxNewCheckBox_28);
checkBoxesList.add(chckbxNewCheckBox_29);
checkBoxesList.add(chckbxDatabasemanagers);
checkBoxesList.add(chckbxDocumentationtools);
checkBoxesList.add(chckbxWebshops);
checkBoxesList.add(chckbxNewCheckBox);
}
private void initWhatToFingerprint() {
for (JCheckBox checkBox : checkBoxesList) {
if (checkBox.isSelected()) {
System.out.println(checkBox.getText());
WhatToFingerprint.add(checkBox.getText());
}
}
}
public ArrayList<String> getWhatToFingerprint() {
return WhatToFingerprint;
}
@Override
public void actionPerformed(ActionEvent e) {
// TODO Auto-generated method stub
initWhatToFingerprint();
this.dispose();
}
}
| |
/*
* Copyright 2017-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.facebook.buck.jvm.java;
import com.facebook.buck.io.ProjectFilesystem;
import com.facebook.buck.model.BuildTarget;
import com.facebook.buck.rules.AddToRuleKey;
import com.facebook.buck.rules.AddsToRuleKey;
import com.facebook.buck.rules.BuildContext;
import com.facebook.buck.rules.BuildRule;
import com.facebook.buck.rules.BuildableContext;
import com.facebook.buck.rules.CellPathResolver;
import com.facebook.buck.rules.DefaultBuildTargetSourcePath;
import com.facebook.buck.rules.ExplicitBuildTargetSourcePath;
import com.facebook.buck.rules.RulePipelineStateFactory;
import com.facebook.buck.rules.SourcePath;
import com.facebook.buck.rules.SourcePathResolver;
import com.facebook.buck.rules.SourcePathRuleFinder;
import com.facebook.buck.step.Step;
import com.facebook.buck.util.MoreCollectors;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSortedSet;
import java.nio.file.Path;
import java.util.Optional;
import java.util.function.Predicate;
import javax.annotation.Nullable;
public class JarBuildStepsFactory
implements AddsToRuleKey, RulePipelineStateFactory<JavacPipelineState> {
private final ProjectFilesystem projectFilesystem;
private final SourcePathRuleFinder ruleFinder;
@AddToRuleKey private final ConfiguredCompiler configuredCompiler;
@AddToRuleKey private final ImmutableSortedSet<SourcePath> srcs;
@AddToRuleKey private final ImmutableSortedSet<SourcePath> resources;
@AddToRuleKey(stringify = true)
private final Optional<Path> resourcesRoot;
@AddToRuleKey private final Optional<SourcePath> manifestFile;
@AddToRuleKey private final ImmutableList<String> postprocessClassesCommands;
@SuppressWarnings("PMD.UnusedPrivateField")
@AddToRuleKey
private final ZipArchiveDependencySupplier abiClasspath;
private final boolean trackClassUsage;
private final ImmutableSortedSet<SourcePath> compileTimeClasspathSourcePaths;
@AddToRuleKey private final RemoveClassesPatternsMatcher classesToRemoveFromJar;
public JarBuildStepsFactory(
ProjectFilesystem projectFilesystem,
SourcePathRuleFinder ruleFinder,
ConfiguredCompiler configuredCompiler,
ImmutableSortedSet<SourcePath> srcs,
ImmutableSortedSet<SourcePath> resources,
Optional<Path> resourcesRoot,
Optional<SourcePath> manifestFile,
ImmutableList<String> postprocessClassesCommands,
ZipArchiveDependencySupplier abiClasspath,
boolean trackClassUsage,
ImmutableSortedSet<SourcePath> compileTimeClasspathSourcePaths,
RemoveClassesPatternsMatcher classesToRemoveFromJar) {
this.projectFilesystem = projectFilesystem;
this.ruleFinder = ruleFinder;
this.configuredCompiler = configuredCompiler;
this.srcs = srcs;
this.resources = resources;
this.resourcesRoot = resourcesRoot;
this.postprocessClassesCommands = postprocessClassesCommands;
this.manifestFile = manifestFile;
this.abiClasspath = abiClasspath;
this.trackClassUsage = trackClassUsage;
this.compileTimeClasspathSourcePaths = compileTimeClasspathSourcePaths;
this.classesToRemoveFromJar = classesToRemoveFromJar;
}
public boolean producesJar() {
return !srcs.isEmpty() || !resources.isEmpty() || manifestFile.isPresent();
}
public ImmutableSortedSet<SourcePath> getSources() {
return srcs;
}
public ImmutableSortedSet<SourcePath> getResources() {
return resources;
}
@Nullable
public SourcePath getSourcePathToOutput(BuildTarget buildTarget) {
return getOutputJarPath(buildTarget)
.map(path -> new ExplicitBuildTargetSourcePath(buildTarget, path))
.orElse(null);
}
@VisibleForTesting
public ImmutableSortedSet<SourcePath> getCompileTimeClasspathSourcePaths() {
return compileTimeClasspathSourcePaths;
}
public boolean useDependencyFileRuleKeys() {
return !srcs.isEmpty() && trackClassUsage;
}
public Predicate<SourcePath> getCoveredByDepFilePredicate(SourcePathResolver pathResolver) {
// a hash set is intentionally used to achieve constant time look-up
return abiClasspath.getArchiveMembers(pathResolver).collect(MoreCollectors.toImmutableSet())
::contains;
}
public ImmutableList<Step> getBuildStepsForAbiJar(
BuildContext context, BuildableContext buildableContext, BuildTarget buildTarget) {
Preconditions.checkState(producesJar());
ImmutableList.Builder<Step> steps = ImmutableList.builder();
CompilerParameters compilerParameters =
CompilerParameters.builder()
.setClasspathEntriesSourcePaths(
compileTimeClasspathSourcePaths, context.getSourcePathResolver())
.setSourceFileSourcePaths(srcs, projectFilesystem, context.getSourcePathResolver())
.setStandardPaths(buildTarget, projectFilesystem)
.setShouldTrackClassUsage(false)
.setShouldGenerateAbiJar(true)
.build();
ResourcesParameters resourcesParameters = getResourcesParameters();
Optional<JarParameters> jarParameters =
getJarParameters(context, buildTarget, compilerParameters);
CompileToJarStepFactory compileToJarStepFactory = (CompileToJarStepFactory) configuredCompiler;
compileToJarStepFactory.createCompileToJarStep(
context,
buildTarget,
context.getSourcePathResolver(),
ruleFinder,
projectFilesystem,
compilerParameters,
resourcesParameters,
ImmutableList.of(),
jarParameters,
steps,
buildableContext);
return steps.build();
}
public ImmutableList<Step> getBuildStepsForLibraryJar(
BuildContext context, BuildableContext buildableContext, BuildTarget buildTarget) {
ImmutableList.Builder<Step> steps = ImmutableList.builder();
CompilerParameters compilerParameters =
CompilerParameters.builder()
.setClasspathEntriesSourcePaths(
compileTimeClasspathSourcePaths, context.getSourcePathResolver())
.setSourceFileSourcePaths(srcs, projectFilesystem, context.getSourcePathResolver())
.setStandardPaths(buildTarget, projectFilesystem)
.setShouldTrackClassUsage(trackClassUsage)
.build();
ResourcesParameters resourcesParameters = getResourcesParameters();
Optional<JarParameters> jarParameters =
getJarParameters(context, buildTarget, compilerParameters);
CompileToJarStepFactory compileToJarStepFactory = (CompileToJarStepFactory) configuredCompiler;
compileToJarStepFactory.createCompileToJarStep(
context,
buildTarget,
context.getSourcePathResolver(),
ruleFinder,
projectFilesystem,
compilerParameters,
resourcesParameters,
postprocessClassesCommands,
jarParameters,
steps,
buildableContext);
JavaLibraryRules.addAccumulateClassNamesStep(
buildTarget,
projectFilesystem,
getSourcePathToOutput(buildTarget),
buildableContext,
context,
steps);
return steps.build();
}
protected ResourcesParameters getResourcesParameters() {
return ResourcesParameters.builder()
.setResources(this.resources)
.setResourcesRoot(this.resourcesRoot)
.build();
}
protected Optional<JarParameters> getJarParameters(
BuildContext context, BuildTarget buildTarget, CompilerParameters compilerParameters) {
return getOutputJarPath(buildTarget)
.map(
output ->
JarParameters.builder()
.setEntriesToJar(ImmutableSortedSet.of(compilerParameters.getOutputDirectory()))
.setManifestFile(
manifestFile.map(context.getSourcePathResolver()::getAbsolutePath))
.setJarPath(output)
.setRemoveEntryPredicate(classesToRemoveFromJar)
.build());
}
public ImmutableList<SourcePath> getInputsAfterBuildingLocally(
BuildContext context, CellPathResolver cellPathResolver, BuildTarget buildTarget) {
Preconditions.checkState(useDependencyFileRuleKeys());
return DefaultClassUsageFileReader.loadFromFile(
projectFilesystem,
cellPathResolver,
projectFilesystem.getPathForRelativePath(getDepFileRelativePath(buildTarget)),
getDepOutputPathToAbiSourcePath(context.getSourcePathResolver()));
}
private Optional<Path> getOutputJarPath(BuildTarget buildTarget) {
if (!producesJar()) {
return Optional.empty();
}
if (HasJavaAbi.isSourceAbiTarget(buildTarget)) {
return Optional.of(CompilerParameters.getAbiJarPath(buildTarget, projectFilesystem));
} else if (HasJavaAbi.isLibraryTarget(buildTarget)) {
return Optional.of(DefaultJavaLibrary.getOutputJarPath(buildTarget, projectFilesystem));
} else {
throw new IllegalArgumentException();
}
}
private Path getDepFileRelativePath(BuildTarget buildTarget) {
return CompilerParameters.getOutputJarDirPath(buildTarget, projectFilesystem)
.resolve("used-classes.json");
}
private ImmutableMap<Path, SourcePath> getDepOutputPathToAbiSourcePath(
SourcePathResolver pathResolver) {
ImmutableMap.Builder<Path, SourcePath> pathToSourcePathMapBuilder = ImmutableMap.builder();
for (SourcePath sourcePath : compileTimeClasspathSourcePaths) {
BuildRule rule = ruleFinder.getRule(sourcePath).get();
Path path = pathResolver.getAbsolutePath(sourcePath);
if (rule instanceof HasJavaAbi) {
if (((HasJavaAbi) rule).getAbiJar().isPresent()) {
BuildTarget buildTarget = ((HasJavaAbi) rule).getAbiJar().get();
pathToSourcePathMapBuilder.put(path, new DefaultBuildTargetSourcePath(buildTarget));
}
} else if (rule instanceof CalculateAbi) {
pathToSourcePathMapBuilder.put(path, sourcePath);
}
}
return pathToSourcePathMapBuilder.build();
}
@Override
public JavacPipelineState newInstance() {
return new JavacPipelineState();
}
}
| |
package edu.cloudy.layout;
import edu.cloudy.geom.SWCPoint;
import edu.cloudy.geom.SWCRectangle;
import edu.cloudy.layout.overlaps.ForceDirectedOverlapRemoval;
import edu.cloudy.layout.overlaps.ForceDirectedUniformity;
import edu.cloudy.nlp.Word;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
/**
* @author spupyrev
* May 12, 2013
*/
public class SeamCarvingAlgo extends BaseLayoutAlgo
{
private SWCRectangle[] initialWordPositions;
public SeamCarvingAlgo(SWCRectangle[] initialWordPositions)
{
super();
this.initialWordPositions = initialWordPositions;
}
public SeamCarvingAlgo()
{
this(null);
}
@Override
protected void run()
{
if( initialWordPositions == null )
initialWordPositions = initialPlacement();
//compute the zones
Zone[][] zones = createZones(initialWordPositions);
wordPositions = initialWordPositions;
//run seam carving
wordPositions = removeSeams(zones, initialWordPositions);
new ForceDirectedOverlapRemoval<SWCRectangle>().run(wordPositions);
new ForceDirectedUniformity<SWCRectangle>().run(wordPositions);
}
private SWCRectangle[] initialPlacement()
{
//initial layout
LayoutResult initialLayout = new MDSAlgo(false).layout(wordGraph);
SWCRectangle[] wordPositions = new SWCRectangle[words.length];
int i = 0;
for (Word w : words)
{
wordPositions[i] = initialLayout.getWordPosition(w);
i++;
}
return wordPositions;
}
/**
* create n*m array of Zones
*/
private Zone[][] createZones(SWCRectangle[] wordPositions)
{
//alignWords(wordPositions);
Set<Double> xValues = new HashSet<Double>();
Set<Double> yValues = new HashSet<Double>();
//double minX = Double.POSITIVE_INFINITY, maxX = Double.NEGATIVE_INFINITY;
//double minY = Double.POSITIVE_INFINITY, maxY = Double.NEGATIVE_INFINITY;
for (SWCRectangle rect : wordPositions)
{
xValues.add(rect.getMinX());
xValues.add(rect.getMaxX());
yValues.add(rect.getMinY());
yValues.add(rect.getMaxY());
//minX = Math.min(minX, rect.getMinX());
//maxX = Math.max(maxX, rect.getMaxX());
//minY = Math.min(minY, rect.getMinY());
//maxY = Math.max(maxY, rect.getMaxY());
}
//removing equal values
double[] xx = new double[xValues.size()];
double[] yy = new double[yValues.size()];
int it = 0;
for (Double d : xValues)
xx[it++] = d;
it = 0;
for (Double d : yValues)
yy[it++] = d;
Arrays.sort(xx);
Arrays.sort(yy);
int n = xx.length - 1;
int m = yy.length - 1;
//assume that the first coordinate is x
Zone[][] zones = new Zone[n][m];
for (int i = 0; i < n; i++)
{
for (int j = 0; j < m; j++)
{
SWCRectangle r = new SWCRectangle(xx[i], yy[j], xx[i + 1] - xx[i], yy[j + 1] - yy[j]);
zones[i][j] = new Zone(r, i, j);
//zones[i][j].setOccupied(zoneIsOccupied(zones[i][j], wordPositions));
}
}
//checkZoneConsistency(zones, wordPositions);
return zones;
}
/**
* make sure that no pairs of words have
* 0 < rect1.getX - rect2.getX < eps
* TODO: unfortunately, this alignment may introduce overlaps
*/
private void alignWords(SWCRectangle[] wordPositions)
{
double EPS = 0.1;
List<Double> xValues = new ArrayList<Double>();
List<Double> yValues = new ArrayList<Double>();
for (SWCRectangle rect : wordPositions)
{
xValues.add(rect.getMinX());
xValues.add(rect.getMaxX());
yValues.add(rect.getMinY());
yValues.add(rect.getMaxY());
}
Collections.sort(xValues);
Collections.sort(yValues);
Map<Double, Double> indX = new HashMap<Double, Double>();
int top = 0;
for (int i = 0; i < xValues.size(); i++)
if (xValues.get(i) > xValues.get(top) + EPS)
{
top++;
xValues.set(top, xValues.get(i));
indX.put(xValues.get(i), xValues.get(top));
}
else
{
indX.put(xValues.get(i), xValues.get(top));
}
for (SWCRectangle rect : wordPositions)
{
double oldX = rect.getX();
double newX = indX.get(rect.getX());
if (oldX != newX)
{
rect.setRect(newX, rect.getY(), rect.getWidth(), rect.getHeight());
//Logger.println("moved X from " + oldX + " to " + newX);
}
}
Map<Double, Double> indY = new HashMap<Double, Double>();
top = 0;
for (int i = 0; i < yValues.size(); i++)
if (yValues.get(i) > yValues.get(top) + EPS)
{
top++;
yValues.set(top, yValues.get(i));
indY.put(yValues.get(i), yValues.get(top));
}
else
{
indY.put(yValues.get(i), yValues.get(top));
}
for (SWCRectangle rect : wordPositions)
{
double oldY = rect.getY();
double newY = indY.get(rect.getY());
if (oldY != newY)
{
rect.setRect(rect.getX(), newY, rect.getWidth(), rect.getHeight());
//Logger.println("moved Y from " + oldY + " to " + newY);
}
}
}
/**
* Determines the order of removing horizontal and vertical seams
*
* @return -- Map of words to rectangle2D's
*/
private SWCRectangle[] removeSeams(Zone[][] zones, SWCRectangle[] wordPositions)
{
SWCRectangle[] returnedPositions = Arrays.copyOf(wordPositions, wordPositions.length);
//calculate the largest word
double maxWordSize = computeMaxWordSizes(returnedPositions);
//scaling factor is used to normalize energy
double scalingFactor = computeScalingFactor(returnedPositions);
int MAX_ITERATIONS = 500;
int iter = 0;
double minSeamSize = 10;
while (iter++ < MAX_ITERATIONS)
{
if (iter % 30 == 0)
alignWords(returnedPositions);
double[][] E = energy(zones, returnedPositions, maxWordSize, scalingFactor);
List<Zone> horizontalSeam = new ArrayList<Zone>();
List<Zone> verticalSeam = new ArrayList<Zone>();
double horizontalSeamCost = findOptimalSeam(true, zones, returnedPositions, E, horizontalSeam, minSeamSize);
double verticalSeamCost = findOptimalSeam(false, zones, returnedPositions, E, verticalSeam, minSeamSize);
//no more removals
if (horizontalSeamCost >= Double.POSITIVE_INFINITY && verticalSeamCost >= Double.POSITIVE_INFINITY)
{
/*Logger.println("\n-------------------\nEnergy table");
for (int i = 0; i < E.length; i++) {
for (int j = 0; j < E[i].length; j++) {
if (Double.isInfinite(E[i][j]))
Logger.printf(" -----");
else
Logger.printf(" %.2f", E[i][j]);
Logger.printf("(%.2f)", zones[i][j].getHeight());
}
Logger.println("");
}*/
if (minSeamSize <= 0.5)
break;
minSeamSize /= 3.0;
continue;
}
if (horizontalSeamCost < verticalSeamCost)
{
//Logger.println("horizontalSeamCost = " + horizontalSeamCost);
removeHorizontalSeamByFullReconstruction(zones, returnedPositions, horizontalSeam);
}
else
{
//Logger.println("verticalSeamCost = " + verticalSeamCost);
removeVerticalSeamByFullReconstruction(zones, returnedPositions, verticalSeam);
}
//removeHorizontalSeam(zones, wordPositions, zonePath);
//this method is not slower, but much simplier
zones = createZones(returnedPositions);
//checkZoneConsistency(zones, wordPositions);
}
//Logger.println("done " + iter + " iterations");
return returnedPositions;
}
private void removeHorizontalSeamByFullReconstruction(Zone[][] zones, SWCRectangle[] wordPositions, List<Zone> zonePath)
{
Map<Double, Double> zoneY = new HashMap<Double, Double>();
double minHeight = Double.POSITIVE_INFINITY;
for (Zone z : zonePath)
{
minHeight = Math.min(minHeight, z.getRectangle().getHeight());
zoneY.put(z.getRectangle().getMinX(), z.getRectangle().getMinY());
}
//Logger.println("minHeight = " + minHeight);
//remove minHeight
for (SWCRectangle rect : wordPositions)
{
double removedZoneY = zoneY.get(rect.getMinX());
if (removedZoneY < rect.getMinY())
rect.setRect(rect.getMinX(), rect.getMinY() - minHeight, rect.getWidth(), rect.getHeight());
}
}
private void removeVerticalSeamByFullReconstruction(Zone[][] zones, SWCRectangle[] wordPositions, List<Zone> zonePath)
{
Map<Double, Double> zoneX = new HashMap<Double, Double>();
double minWidth = Double.POSITIVE_INFINITY;
for (Zone z : zonePath)
{
minWidth = Math.min(minWidth, z.getRectangle().getWidth());
zoneX.put(z.getRectangle().getMinY(), z.getRectangle().getMinX());
}
//Logger.println("minWidth = " + minWidth);
//remove minWidth
for (SWCRectangle rect : wordPositions)
{
double removedZoneX = zoneX.get(rect.getMinY());
if (removedZoneX < rect.getMinX())
rect.setRect(rect.getMinX() - minWidth, rect.getMinY(), rect.getWidth(), rect.getHeight());
}
}
@SuppressWarnings("unused")
private void checkZoneConsistency(Zone[][] zones, SWCRectangle[] wordPositions)
{
int n = zones.length;
int m = zones[0].length;
//check indices
for (int i = 0; i < n; i++)
for (int j = 0; j < m; j++)
{
assert (zones[i][j].getIndexI() == i);
assert (zones[i][j].getIndexJ() == j);
}
//check zone coordinates
for (int i = 0; i < n; i++)
for (int j = 0; j < m; j++)
{
assert (j + 1 >= m || zones[i][j].getWidth() == zones[i][j + 1].getWidth());
assert (i + 1 >= n || zones[i][j].getHeight() == zones[i + 1][j].getHeight());
}
//check word positions
for (int i = 0; i < n; i++)
{
for (int j = 0; j < m; j++)
{
boolean occupy = zoneIsOccupied(zones[i][j], wordPositions);
assert (zones[i][j].isOccupied == occupy);
}
}
}
private boolean zoneIsOccupied(Zone zone, SWCRectangle[] wordPositions)
{
SWCPoint center = new SWCPoint(zone.getRectangle().getCenterX(), zone.getRectangle().getCenterY());
for (SWCRectangle rect : wordPositions)
{
if (rect.contains(center))
return true;
}
return false;
}
//sets the value of maxSize of all the words
private double computeMaxWordSizes(SWCRectangle[] wordPositions)
{
double maxWordSize = 0;
for (SWCRectangle rect : wordPositions)
{
double renderedSize = rect.getWidth();
maxWordSize = Math.max(maxWordSize, renderedSize);
}
return maxWordSize;
}
private double computeScalingFactor(SWCRectangle[] wordPositions)
{
double minX = Double.POSITIVE_INFINITY, maxX = Double.NEGATIVE_INFINITY;
double minY = Double.POSITIVE_INFINITY, maxY = Double.NEGATIVE_INFINITY;
for (SWCRectangle rect : wordPositions)
{
minX = Math.min(minX, rect.getMinX());
maxX = Math.max(maxX, rect.getMaxX());
minY = Math.min(minY, rect.getMinY());
maxY = Math.max(maxY, rect.getMaxY());
}
return Math.max(maxX - minX, maxY - minY) / 2;
}
/**
* computes the dynamic programming table for horizontal seams
*/
private double findOptimalSeam(boolean horizontal, Zone[][] zones, SWCRectangle[] wordPositions, double[][] E, List<Zone> zonePath, double minSeamSize)
{
int n = zones.length;
int m = zones[0].length;
double[][] Ec = new double[n][m];
int[][] parent = new int[n][m];
// Fill in the table
for (int cell = 0; cell < n * m; cell++)
{
int i, j;
if (horizontal)
{
i = cell / m;
j = cell % m;
}
else
{
i = cell % n;
j = cell / n;
}
parent[i][j] = -1;
Ec[i][j] = 0;
if (zones[i][j].isOccupied)
{
Ec[i][j] = Double.POSITIVE_INFINITY;
continue;
}
int dt = 5;
//find optimum on previous row
if (horizontal && i - 1 >= 0)
{
Ec[i][j] = Double.POSITIVE_INFINITY;
for (int t = -dt; t <= dt; t++)
if (j + t >= 0 && j + t < m)
if (Ec[i][j] > Ec[i - 1][j + t])
{
Ec[i][j] = Ec[i - 1][j + t];
parent[i][j] = j + t;
}
}
else if (!horizontal && j - 1 >= 0)
{
Ec[i][j] = Double.POSITIVE_INFINITY;
for (int t = -dt; t <= dt; t++)
if (i + t >= 0 && i + t < n)
if (Ec[i][j] > Ec[i + t][j - 1])
{
Ec[i][j] = Ec[i + t][j - 1];
parent[i][j] = i + t;
}
}
//add current energy
//make sure that is is beneficial to choose larger zones
double size = (horizontal ? zones[i][j].getHeight() : zones[i][j].getWidth());
if (size < minSeamSize)
Ec[i][j] = Double.POSITIVE_INFINITY;
else if (size < 1.0)
Ec[i][j] += E[i][j] / size;
else
Ec[i][j] += E[i][j] * size;
}
/*System.out.println("Seam table");
for (int i = 0; i < Ec.length; i++) {
for (int j = 0; j < Ec[i].length; j++) {
if (Double.isInfinite(Ec[i][j]))
System.out.printf(" ----");
else
System.out.printf(" %.2f", Ec[i][j]);
}
System.out.println();
}
System.out.println("\n-------------------\nEnergy table");
for (int i = 0; i < E.length; i++) {
for (int j = 0; j < E[i].length; j++) {
if (Double.isInfinite(E[i][j]))
System.out.printf(" ----");
else
System.out.printf(" %.2f", E[i][j]);
}
System.out.println();
}*/
//find optimal column
int minIndex = -1;
double minSum = Double.POSITIVE_INFINITY;
if (horizontal)
{
for (int i = 0; i < m; i++)
{
if (minSum > Ec[n - 1][i])
{
minSum = Ec[n - 1][i];
minIndex = i;
}
}
}
else
{
for (int i = 0; i < n; i++)
{
if (minSum > Ec[i][m - 1])
{
minSum = Ec[i][m - 1];
minIndex = i;
}
}
}
//System.out.println("minSum");
if (minIndex == -1)
return Double.POSITIVE_INFINITY;
//System.out.println("\nMin index is: " + minIndex);
//System.out.println("Min sum is: " + minSum);
int curI = n - 1;
int curJ = m - 1;
if (horizontal)
curJ = minIndex;
else
curI = minIndex;
while (curI >= 0 && curJ >= 0)
{
if (zones[curI][curJ].isOccupied)
throw new RuntimeException("smth wrong with dp");
zonePath.add(zones[curI][curJ]);
if (horizontal)
{
curJ = parent[curI][curJ];
curI--;
}
else
{
curI = parent[curI][curJ];
curJ--;
}
}
if (curI != -1 || curJ != -1)
throw new RuntimeException("smth wrong with dp");
return minSum;
}
/**
* computes the energy for all zones
*/
private double[][] energy(Zone[][] zones, SWCRectangle[] wordPositions, double maxWordSize, double scalingFactor)
{
int n = zones.length;
int m = zones[0].length;
double[][] E = new double[n][m];
for (int i = 0; i < n; i++)
for (int j = 0; j < m; j++)
E[i][j] = energy(zones[i][j], wordPositions, maxWordSize, scalingFactor);
return E;
}
/**
* compute energy for the specified zone
*/
private double energy(Zone zone, SWCRectangle[] wordPositions, double maxWordSize, double scalingFactor)
{
if (zone.isOccupied)
return Double.POSITIVE_INFINITY;
double result = 0;
for (SWCRectangle rect : wordPositions)
{
if (rect.contains(zone.getRectangle().getCenterX(), zone.getRectangle().getCenterY()))
{
zone.isOccupied = true;
return Double.POSITIVE_INFINITY;
}
double wordSize = rect.getWidth();
double mux = rect.getCenterX();
double muy = rect.getCenterY();
double x = zone.getRectangle().getCenterX();
double y = zone.getRectangle().getCenterY();
double diffX = (x - mux) / scalingFactor;
double diffY = (y - muy) / scalingFactor;
double exponent = (diffX * diffX + diffY * diffY) / 2;
//this works a way faster than Math.exp
result += (wordSize / maxWordSize) * (1.0 / 2 * Math.PI) * (fastexp(-exponent));
//result += (wordSize / maxWordSize) * (1.0 / 2 * Math.PI) * (Math.exp(-exponent));
}
assert (result < Double.POSITIVE_INFINITY);
return result;
}
public static double fastexp(double val)
{
final long tmp = (long)(1512775 * val + 1072632447);
return Double.longBitsToDouble(tmp << 32);
}
private class Zone
{
private SWCRectangle rect;
boolean isOccupied = false;
private int indexI;
private int indexJ;
public Zone(SWCRectangle rect, int i, int j)
{
this.rect = rect;
indexI = i;
indexJ = j;
}
public SWCRectangle getRectangle()
{
return rect;
}
public double getHeight()
{
return rect.getHeight();
}
public double getWidth()
{
return rect.getWidth();
}
public int getIndexI()
{
return indexI;
}
public int getIndexJ()
{
return indexJ;
}
}
}
| |
package fr.tvbarthel.lib.blurdialogfragment;
import android.app.Activity;
import android.content.res.Configuration;
import android.content.res.Resources;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.graphics.Rect;
import android.graphics.RectF;
import android.graphics.drawable.BitmapDrawable;
import android.os.AsyncTask;
import android.os.Build;
import android.support.v7.app.ActionBar;
import android.support.v7.app.ActionBarActivity;
import android.util.Log;
import android.view.Gravity;
import android.view.View;
import android.view.WindowManager;
import android.widget.FrameLayout;
import android.widget.ImageView;
/**
* Encapsulate the whole behaviour to provide a blur effect on a DialogFragment.
* <p/>
* All the screen behind the dialog will be blurred except the action bar.
* <p/>
* Simply linked all methods to the matching lifecycle ones.
*/
public class BlurDialogEngine {
/**
* Log cat
*/
private static final String TAG = BlurDialogEngine.class.getSimpleName();
/**
* Since image is going to be blurred, we don't care about resolution.
* Down scale factor to reduce blurring time and memory allocation.
*/
private static final float BLUR_DOWN_SCALE_FACTOR = 4.0f;
/**
* Radius used to blur the background
*/
private static final int BLUR_RADIUS = 8;
/**
* Image view used to display blurred background.
*/
private ImageView mBlurredBackgroundView;
/**
* Layout params used to add blurred background.
*/
private FrameLayout.LayoutParams mBlurredBackgroundLayoutParams;
/**
* Task used to capture screen and blur it.
*/
private BlurAsyncTask mBluringTask;
/**
* Used to enable or disable debug mod.
*/
private boolean mDebudEnable = false;
/**
* Factor used to down scale background. High quality isn't necessary
* since the background will be blurred.
*/
private float mDownScaleFactor = BLUR_DOWN_SCALE_FACTOR;
/**
* Radius used for fast blur algorithm.
*/
private int mBlurRadius = BLUR_RADIUS;
/**
* Holding activity.
*/
private Activity mHoldingActivity;
/**
* Constructor.
*
* @param holdingActivity activity which holds the DialogFragment.
*/
public BlurDialogEngine(Activity holdingActivity) {
mHoldingActivity = holdingActivity;
}
/**
* Resume the engine.
*
* @param retainedInstance use getRetainInstance.
*/
public void onResume(boolean retainedInstance) {
if (mBlurredBackgroundView == null || retainedInstance) {
mBluringTask = new BlurAsyncTask();
mBluringTask.execute();
}
}
/**
* Must be linked to the original lifecycle.
*/
public void onDismiss() {
//remove blurred background and clear memory, could be null if dismissed before blur effect
//processing ends
if (mBlurredBackgroundView != null) {
mBlurredBackgroundView.setVisibility(View.GONE);
mBlurredBackgroundView = null;
}
//cancel async task
mBluringTask.cancel(true);
mBluringTask = null;
}
/**
* Must be linked to the original lifecycle.
*/
public void onDestroy() {
mHoldingActivity = null;
}
/**
* Enable / disable debug mode.
* <p/>
* LogCat and graphical information directly on blurred screen.
*
* @param enable true to display log in LogCat.
*/
public void debug(boolean enable) {
mDebudEnable = enable;
}
/**
* Apply custom down scale factor.
* <p/>
* By default down scale factor is set to
* {@link BlurDialogEngine#BLUR_DOWN_SCALE_FACTOR}
* <p/>
* Higher down scale factor will increase blurring speed but reduce final rendering quality.
*
* @param factor customized down scale factor, must be at least 1.0 ( no down scale applied )
*/
public void setDownScaleFactor(float factor) {
if (factor >= 1.0f) {
mDownScaleFactor = factor;
} else {
mDownScaleFactor = 1.0f;
}
}
/**
* Apply custom blur radius.
* <p/>
* By default blur radius is set to
* {@link BlurDialogEngine#BLUR_RADIUS}
*
* @param radius custom radius used to blur.
*/
public void setBlurRadius(int radius) {
if (radius >= 0) {
mBlurRadius = radius;
} else {
mBlurRadius = 0;
}
}
/**
* Blur the given bitmap and add it to the activity.
*
* @param bkg should be a bitmap of the background.
* @param view background view.
*/
private void blur(Bitmap bkg, View view) {
long startMs = System.currentTimeMillis();
//define layout params to the previous imageView in order to match its parent
mBlurredBackgroundLayoutParams = new FrameLayout.LayoutParams(
FrameLayout.LayoutParams.MATCH_PARENT,
FrameLayout.LayoutParams.MATCH_PARENT
);
//overlay used to build scaled preview and blur background
Bitmap overlay = null;
//evaluate top offset due to action bar
int actionBarHeight = 0;
try {
if (mHoldingActivity instanceof ActionBarActivity) {
ActionBar supportActionBar
= ((ActionBarActivity) mHoldingActivity).getSupportActionBar();
if (supportActionBar != null) {
actionBarHeight = supportActionBar.getHeight();
}
} else if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB) {
android.app.ActionBar actionBar = mHoldingActivity.getActionBar();
if (actionBar != null) {
actionBarHeight = actionBar.getHeight();
}
}
} catch (NoClassDefFoundError e) {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB) {
android.app.ActionBar actionBar = mHoldingActivity.getActionBar();
if (actionBar != null) {
actionBarHeight = actionBar.getHeight();
}
}
}
//evaluate top offset due to status bar
int statusBarHeight = 0;
if ((mHoldingActivity.getWindow().getAttributes().flags
& WindowManager.LayoutParams.FLAG_FULLSCREEN) == 0) {
//not in fullscreen mode
statusBarHeight = getStatusBarHeight();
}
final int topOffset = actionBarHeight + statusBarHeight;
final int bottomOffset = getNavigationBarOffset();
//add offset to the source boundaries since we don't want to blur actionBar pixels
Rect srcRect = new Rect(
0,
actionBarHeight + statusBarHeight,
bkg.getWidth(),
bkg.getHeight() - bottomOffset
);
//in order to keep the same ratio as the one which will be used for rendering, also
//add the offset to the overlay.
overlay = Bitmap.createBitmap((int) ((view.getWidth()) / mDownScaleFactor),
(int) ((view.getMeasuredHeight() - topOffset - bottomOffset) / mDownScaleFactor),
Bitmap.Config.RGB_565);
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.HONEYCOMB
|| mHoldingActivity instanceof ActionBarActivity) {
//add offset as top margin since actionBar height must also considered when we display
// the blurred background. Don't want to draw on the actionBar.
mBlurredBackgroundLayoutParams.setMargins(
0,
actionBarHeight,
0,
0
);
mBlurredBackgroundLayoutParams.gravity = Gravity.TOP;
}
//scale and draw background view on the canvas overlay
Canvas canvas = new Canvas(overlay);
Paint paint = new Paint();
paint.setFlags(Paint.FILTER_BITMAP_FLAG);
//build drawing destination boundaries
final RectF destRect = new RectF(0, 0, overlay.getWidth(), overlay.getHeight());
//draw background from source area in source background to the destination area on the overlay
canvas.drawBitmap(bkg, srcRect, destRect, paint);
//apply fast blur on overlay
overlay = FastBlurHelper.doBlur(overlay, mBlurRadius, false);
if (mDebudEnable) {
String blurTime = (System.currentTimeMillis() - startMs) + " ms";
//display information in LogCat
Log.d(TAG, "Radius : " + mBlurRadius);
Log.d(TAG, "Down Scale Factor : " + mDownScaleFactor);
Log.d(TAG, "Blurred achieved in : " + blurTime);
Log.d(TAG, "Allocation : " + bkg.getRowBytes() + "ko (screen capture) + "
+ overlay.getRowBytes() + "ko (FastBlur)");
//display blurring time directly on screen
Rect bounds = new Rect();
Canvas canvas1 = new Canvas(overlay);
paint.setColor(Color.BLACK);
paint.setAntiAlias(true);
paint.setTextSize(20.0f);
paint.getTextBounds(blurTime, 0, blurTime.length(), bounds);
canvas1.drawText(blurTime, 2, bounds.height(), paint);
}
//set bitmap in an image view for final rendering
mBlurredBackgroundView = new ImageView(mHoldingActivity);
mBlurredBackgroundView.setImageDrawable(new BitmapDrawable(mHoldingActivity.getResources(), overlay));
}
/**
* retrieve status bar height in px
*
* @return status bar height in px
*/
private int getStatusBarHeight() {
int result = 0;
int resourceId = mHoldingActivity.getResources().getIdentifier("status_bar_height", "dimen", "android");
if (resourceId > 0) {
result = mHoldingActivity.getResources().getDimensionPixelSize(resourceId);
}
return result;
}
/**
* Retrieve offset introduce by the navigation bar.
*
* @return bottom offset due to navigation bar.
*/
private int getNavigationBarOffset() {
int result = 0;
Resources resources = mHoldingActivity.getResources();
if (Build.VERSION.SDK_INT == Build.VERSION_CODES.LOLLIPOP
&& resources.getConfiguration().orientation == Configuration.ORIENTATION_PORTRAIT) {
int resourceId = resources.getIdentifier("navigation_bar_height", "dimen", "android");
if (resourceId > 0) {
result = resources.getDimensionPixelSize(resourceId);
}
}
return result;
}
/**
* Async task used to process blur out of ui thread
*/
public class BlurAsyncTask extends AsyncTask<Void, Void, Void> {
private Bitmap mBackground;
private View mBackgroundView;
@Override
protected void onPreExecute() {
super.onPreExecute();
mBackgroundView = mHoldingActivity.getWindow().getDecorView();
//retrieve background view, must be achieved on ui thread since
//only the original thread that created a view hierarchy can touch its views.
Rect rect = new Rect();
mBackgroundView.getWindowVisibleDisplayFrame(rect);
mBackgroundView.destroyDrawingCache();
mBackgroundView.setDrawingCacheEnabled(true);
mBackgroundView.buildDrawingCache(true);
mBackground = mBackgroundView.getDrawingCache(true);
/**
* After rotation, the DecorView has no height and no width. Therefore
* .getDrawingCache() return null. That's why we have to force measure and layout.
*/
if (mBackground == null) {
mBackgroundView.measure(
View.MeasureSpec.makeMeasureSpec(rect.width(), View.MeasureSpec.EXACTLY),
View.MeasureSpec.makeMeasureSpec(rect.height(), View.MeasureSpec.EXACTLY)
);
mBackgroundView.layout(0, 0, mBackgroundView.getMeasuredWidth(),
mBackgroundView.getMeasuredHeight());
mBackgroundView.destroyDrawingCache();
mBackgroundView.setDrawingCacheEnabled(true);
mBackgroundView.buildDrawingCache(true);
mBackground = mBackgroundView.getDrawingCache(true);
}
}
@Override
protected Void doInBackground(Void... params) {
//process to the blue
blur(mBackground, mBackgroundView);
//clear memory
mBackground.recycle();
mBackgroundView.destroyDrawingCache();
mBackgroundView.setDrawingCacheEnabled(false);
return null;
}
@Override
protected void onPostExecute(Void aVoid) {
super.onPostExecute(aVoid);
mHoldingActivity.getWindow().addContentView(
mBlurredBackgroundView,
mBlurredBackgroundLayoutParams
);
mBackgroundView = null;
mBackground = null;
}
}
}
| |
/*
* This file is part of p455w0rd's Library.
* Copyright (c) 2016, p455w0rd (aka TheRealp455w0rd), All rights reserved
* unless
* otherwise stated.
*
* p455w0rd's Library is free software: you can redistribute it and/or modify
* it under the terms of the MIT License.
*
* p455w0rd's Library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* MIT License for more details.
*
* You should have received a copy of the MIT License
* along with p455w0rd's Library. If not, see
* <https://opensource.org/licenses/MIT>.
*/
package p455w0rdslib.util;
import java.util.List;
import java.util.Map;
import java.util.Set;
import com.google.common.collect.BiMap;
import com.google.common.collect.ListMultimap;
import net.minecraft.client.Minecraft;
import net.minecraft.client.gui.Gui;
import net.minecraft.client.gui.GuiScreen;
import net.minecraft.client.gui.inventory.GuiContainer;
import net.minecraft.client.model.ModelBox;
import net.minecraft.client.model.ModelRenderer;
import net.minecraft.client.model.TexturedQuad;
import net.minecraft.client.renderer.RenderItem;
import net.minecraft.client.renderer.entity.RenderLivingBase;
import net.minecraft.client.renderer.entity.RenderPlayer;
import net.minecraft.client.renderer.entity.layers.LayerRenderer;
import net.minecraft.client.resources.IResourcePack;
import net.minecraft.entity.Entity;
import net.minecraft.entity.EntityLivingBase;
import net.minecraft.entity.ai.EntityAIAttackRangedBow;
import net.minecraft.entity.monster.EntityEnderman;
import net.minecraft.entity.monster.EntitySkeleton;
import net.minecraft.entity.player.EntityPlayer;
import net.minecraft.inventory.Slot;
import net.minecraft.item.Item;
import net.minecraft.item.ItemStack;
import net.minecraft.nbt.NBTTagCompound;
import net.minecraft.network.datasync.DataParameter;
import net.minecraft.network.datasync.EntityDataManager;
import net.minecraft.util.DamageSource;
import net.minecraft.util.EnumFacing;
import net.minecraft.util.IntHashMap;
import net.minecraft.util.math.BlockPos;
import net.minecraft.util.math.Vec3d;
import net.minecraft.world.World;
import net.minecraft.world.biome.Biome;
import net.minecraftforge.common.capabilities.CapabilityDispatcher;
import net.minecraftforge.fml.common.IWorldGenerator;
import net.minecraftforge.fml.common.ModContainer;
import net.minecraftforge.fml.common.registry.EntityRegistry;
import net.minecraftforge.fml.common.registry.EntityRegistry.EntityRegistration;
import net.minecraftforge.fml.common.registry.GameRegistry;
import net.minecraftforge.fml.common.registry.RegistryDelegate;
import net.minecraftforge.fml.relauncher.ReflectionHelper;
/**
* This class is has methods which use ATs and/or Reflection<br>
* to make dealing with private/private final mc methods<br>
* easier.
*
* @author p455w0rd
*
*/
public class MCPrivateUtils {
public static void addResourcePack(IResourcePack pack) {
List<Object> packList = ReflectionHelper.getPrivateValue(Minecraft.class, Minecraft.getMinecraft(), ReflectionUtils.determineSRG("defaultResourcePacks"));
packList.add(pack);
ReflectionHelper.setPrivateValue(Minecraft.class, Minecraft.getMinecraft(), packList, ReflectionUtils.determineSRG("defaultResourcePacks"));
}
public static List<LayerRenderer<? extends EntityLivingBase>> getLayerRenderers(RenderLivingBase<?> renderPlayer) {
return ReflectionHelper.getPrivateValue(RenderLivingBase.class, renderPlayer, ReflectionUtils.determineSRG("layerRenderers"));
}
public static void setLayerRenderers(List<LayerRenderer<EntityPlayer>> layers, RenderPlayer renderPlayer) {
ReflectionHelper.setPrivateValue(RenderLivingBase.class, renderPlayer, layers, ReflectionUtils.determineSRG("layerRenderers"));
}
public static int getTextureOffsetX(ModelRenderer modelRenderer) {
return ReflectionHelper.getPrivateValue(ModelRenderer.class, modelRenderer, ReflectionUtils.determineSRG("textureOffsetX"));
}
public static int getTextureOffsetY(ModelRenderer modelRenderer) {
return ReflectionHelper.getPrivateValue(ModelRenderer.class, modelRenderer, ReflectionUtils.determineSRG("textureOffsetY"));
}
public static TexturedQuad[] getQuadList(ModelBox modelBox) {
return ReflectionHelper.getPrivateValue(ModelBox.class, modelBox, ReflectionUtils.determineSRG("quadList"));
}
public static DataParameter<Boolean> getEndermanScreaming(EntityEnderman enderman) {
return ReflectionHelper.getPrivateValue(EntityEnderman.class, enderman, ReflectionUtils.determineSRG("SCREAMING"));
}
public static void setEndermanScreaming(EntityEnderman enderman, boolean isScreaming) {
EntityDataManager dm = ReflectionHelper.getPrivateValue(Entity.class, enderman, ReflectionUtils.determineSRG("dataManager"));
DataParameter<Boolean> screaming = getEndermanScreaming(enderman);
dm.set(screaming, Boolean.valueOf(isScreaming));
dm.setDirty(screaming);
ReflectionHelper.setPrivateValue(Entity.class, enderman, dm, ReflectionUtils.determineSRG("dataManager"));
}
public static float getRenderItemZLevel(RenderItem ri) {
return ReflectionHelper.getPrivateValue(RenderItem.class, ri, ReflectionUtils.determineZLevelSRG("zLevel", RenderItem.class));
}
public static void setRenderItemZLevel(RenderItem ri, float zLevel) {
ReflectionHelper.setPrivateValue(RenderItem.class, ri, zLevel, ReflectionUtils.determineZLevelSRG("zLevel", RenderItem.class));
}
public static float getGuiZLevel(Gui gui) {
return ReflectionHelper.getPrivateValue(Gui.class, gui, ReflectionUtils.determineZLevelSRG("zLevel", Gui.class));
}
public static void setGuiZLevel(Gui gui, float zLevel) {
ReflectionHelper.setPrivateValue(Gui.class, gui, zLevel, ReflectionUtils.determineZLevelSRG("zLevel", Gui.class));
}
public static RenderItem getGuiScreenRenderItem(GuiScreen gui) {
return ReflectionHelper.getPrivateValue(GuiScreen.class, gui, ReflectionUtils.determineSRG("itemRender"));
}
public static void setGuiScreenRendererZLevel(GuiScreen gui, float zLevel) {
ReflectionHelper.setPrivateValue(RenderItem.class, getGuiScreenRenderItem(gui), zLevel, ReflectionUtils.determineZLevelSRG("zLevel", RenderItem.class));
}
public static boolean getGuiDragSplitting(GuiContainer gui) {
return ReflectionHelper.getPrivateValue(GuiContainer.class, gui, ReflectionUtils.determineSRG("dragSplitting"));
}
public static Set<Slot> getGuiDragSplittingSlots(GuiContainer gui) {
return ReflectionHelper.getPrivateValue(GuiContainer.class, gui, ReflectionUtils.determineSRG("dragSplittingSlots"));
}
public static int getGuiDragSplittingLimit(GuiContainer gui) {
return ReflectionHelper.getPrivateValue(GuiContainer.class, gui, ReflectionUtils.determineSRG("dragSplittingLimit"));
}
public static Slot getGuiClickedSlot(GuiContainer gui) {
return ReflectionHelper.getPrivateValue(GuiContainer.class, gui, ReflectionUtils.determineSRG("clickedSlot"));
}
public static ItemStack getGuiDraggedStack(GuiContainer gui) {
return ReflectionHelper.getPrivateValue(GuiContainer.class, gui, ReflectionUtils.determineSRG("draggedStack"));
}
public static boolean getGuiIsRightMouseClick(GuiContainer gui) {
return ReflectionHelper.getPrivateValue(GuiContainer.class, gui, ReflectionUtils.determineSRG("isRightMouseClick"));
}
public static int getGuiDragSplittingRemnant(GuiContainer gui) {
return ReflectionHelper.getPrivateValue(GuiContainer.class, gui, ReflectionUtils.determineSRG("dragSplittingRemnant"));
}
public static void setGuiDragSplittingRemnant(GuiContainer gui, int amount) {
ReflectionHelper.setPrivateValue(GuiContainer.class, gui, amount, ReflectionUtils.determineSRG("dragSplittingRemnant"));
}
public static CapabilityDispatcher getItemStackCapabilities(ItemStack stack) {
return ReflectionHelper.getPrivateValue(ItemStack.class, stack, "capabilities");
}
public static void setItemStackCapabilities(ItemStack stack, CapabilityDispatcher dispatcher) {
ReflectionHelper.setPrivateValue(ItemStack.class, stack, dispatcher, "capabilities");
}
public static NBTTagCompound getItemStackCapNBT(ItemStack stack) {
return ReflectionHelper.getPrivateValue(ItemStack.class, stack, "capNBT");
}
public static void setItemStackItem(ItemStack stack, Item item) {
ReflectionHelper.setPrivateValue(ItemStack.class, stack, item, ReflectionUtils.determineSRG("item"));
}
public static void setItemStackDelegate(ItemStack stack, RegistryDelegate<Item> delegate) {
ReflectionHelper.setPrivateValue(ItemStack.class, stack, delegate, "delegate");
}
public static int getGuiContainerXSize(GuiContainer gui) {
return ReflectionHelper.getPrivateValue(GuiContainer.class, gui, ReflectionUtils.determineSRG("xSize"));
}
public static int getGuiContainerYSize(GuiContainer gui) {
return ReflectionHelper.getPrivateValue(GuiContainer.class, gui, ReflectionUtils.determineSRG("ySize"));
}
public static float getRainfall(Biome biome) {
return ReflectionHelper.getPrivateValue(Biome.class, biome, ReflectionUtils.determineSRG("rainfall"));
}
public static void setRainfall(Biome biome, float rainfall) {
ReflectionHelper.setPrivateValue(Biome.class, biome, rainfall, ReflectionUtils.determineSRG("rainfall"));
}
public static boolean isRainEnabled(Biome biome) {
return ReflectionHelper.getPrivateValue(Biome.class, biome, ReflectionUtils.determineSRG("enableRain"));
}
public static void setRainEnabled(Biome biome, boolean enableRain) {
ReflectionHelper.setPrivateValue(Biome.class, biome, enableRain, ReflectionUtils.determineSRG("enableRain"));
}
public static BiMap<Class<? extends Entity>, EntityRegistration> getEntityClassRegistry() {
return ReflectionHelper.getPrivateValue(EntityRegistry.class, EntityRegistry.instance(), "entityClassRegistrations");
}
public static Map<String, ModContainer> getEntityNameRegistry() {
return ReflectionHelper.getPrivateValue(EntityRegistry.class, EntityRegistry.instance(), "entityNames");
}
public static ListMultimap<ModContainer, EntityRegistration> getEntityRegistration() {
return ReflectionHelper.getPrivateValue(EntityRegistry.class, EntityRegistry.instance(), "entityRegistrations");
}
public static void setLastDamageSource(EntityLivingBase entity, DamageSource source) {
ReflectionHelper.setPrivateValue(EntityLivingBase.class, entity, source, ReflectionUtils.determineSRG("lastDamageSource"));
}
public static void setLastDamageStamp(EntityLivingBase entity, long time) {
ReflectionHelper.setPrivateValue(EntityLivingBase.class, entity, time, ReflectionUtils.determineSRG("lastDamageStamp"));
}
public static List<Entity> getUnloadedEntityList(World world) {
return ReflectionHelper.getPrivateValue(World.class, world, ReflectionUtils.determineSRG("unloadedEntityList"));
}
public static BlockPos getEntityLastPortalPos(Entity entity) {
return ReflectionHelper.getPrivateValue(Entity.class, entity, ReflectionUtils.determineSRG("lastPortalPos"));
}
public static void setEntityLastPortalPos(Entity entity, BlockPos pos) {
ReflectionHelper.setPrivateValue(Entity.class, entity, pos, ReflectionUtils.determineSRG("lastPortalPos"));
}
public static Vec3d getEntityLastPortalVec(Entity entity) {
return ReflectionHelper.getPrivateValue(Entity.class, entity, ReflectionUtils.determineSRG("lastPortalVec"));
}
public static void setEntityLastPortalVec(Entity entity, Vec3d vector) {
ReflectionHelper.setPrivateValue(Entity.class, entity, vector, ReflectionUtils.determineSRG("lastPortalVec"));
}
public static EnumFacing getEntityTeleportDirection(Entity entity) {
return ReflectionHelper.getPrivateValue(Entity.class, entity, ReflectionUtils.determineSRG("teleportDirection"));
}
public static void setEntityTeleportDirection(Entity entity, EnumFacing direction) {
ReflectionHelper.setPrivateValue(Entity.class, entity, direction, ReflectionUtils.determineSRG("teleportDirection"));
}
public static IntHashMap<Entity> getEntitiesForWorld(World world) {
return ReflectionHelper.getPrivateValue(World.class, world, ReflectionUtils.determineSRG("entitiesById"));
}
public static void setSkeletonBowAI(EntitySkeleton skeleton, EntityAIAttackRangedBow ai) {
ReflectionHelper.setPrivateValue(EntitySkeleton.class, skeleton, ai, ReflectionUtils.determineSRG("aiArrowAttack"));
}
public static Set<IWorldGenerator> getWorldGenerators() {
return ReflectionHelper.getPrivateValue(GameRegistry.class, null, "worldGenerators");
}
public static Map<IWorldGenerator, Integer> getWorldGeneratorIndexList() {
return ReflectionHelper.getPrivateValue(GameRegistry.class, null, "worldGeneratorIndex");
}
}
| |
package dataMapper.diagram.part;
import org.eclipse.core.runtime.Platform;
import org.eclipse.emf.ecore.EAnnotation;
import org.eclipse.emf.ecore.EObject;
import org.eclipse.gmf.runtime.notation.Diagram;
import org.eclipse.gmf.runtime.notation.View;
import org.eclipse.gmf.tooling.runtime.structure.DiagramStructure;
/**
* This registry is used to determine which type of visual object should be
* created for the corresponding Diagram, Node, ChildNode or Link represented
* by a domain model object.
*
* @generated
*/
public class DataMapperVisualIDRegistry {
/**
* @generated
*/
private static final String DEBUG_KEY = "org.wso2.developerstudio.visualdatamapper.diagram/debug/visualID"; //$NON-NLS-1$
/**
* @generated
*/
public static int getVisualID(View view) {
if (view instanceof Diagram) {
if (dataMapper.diagram.edit.parts.DataMapperRootEditPart.MODEL_ID
.equals(view.getType())) {
return dataMapper.diagram.edit.parts.DataMapperRootEditPart.VISUAL_ID;
} else {
return -1;
}
}
return dataMapper.diagram.part.DataMapperVisualIDRegistry
.getVisualID(view.getType());
}
/**
* @generated
*/
public static String getModelID(View view) {
View diagram = view.getDiagram();
while (view != diagram) {
EAnnotation annotation = view.getEAnnotation("Shortcut"); //$NON-NLS-1$
if (annotation != null) {
return (String) annotation.getDetails().get("modelID"); //$NON-NLS-1$
}
view = (View) view.eContainer();
}
return diagram != null ? diagram.getType() : null;
}
/**
* @generated
*/
public static int getVisualID(String type) {
try {
return Integer.parseInt(type);
} catch (NumberFormatException e) {
if (Boolean.TRUE.toString().equalsIgnoreCase(
Platform.getDebugOption(DEBUG_KEY))) {
dataMapper.diagram.part.DataMapperDiagramEditorPlugin
.getInstance().logError(
"Unable to parse view type as a visualID number: "
+ type);
}
}
return -1;
}
/**
* @generated
*/
public static String getType(int visualID) {
return Integer.toString(visualID);
}
/**
* @generated
*/
public static int getDiagramVisualID(EObject domainElement) {
if (domainElement == null) {
return -1;
}
if (dataMapper.DataMapperPackage.eINSTANCE.getDataMapperRoot()
.isSuperTypeOf(domainElement.eClass())
&& isDiagram((dataMapper.DataMapperRoot) domainElement)) {
return dataMapper.diagram.edit.parts.DataMapperRootEditPart.VISUAL_ID;
}
return -1;
}
/**
* @generated
*/
public static int getNodeVisualID(View containerView, EObject domainElement) {
if (domainElement == null) {
return -1;
}
String containerModelID = dataMapper.diagram.part.DataMapperVisualIDRegistry
.getModelID(containerView);
if (!dataMapper.diagram.edit.parts.DataMapperRootEditPart.MODEL_ID
.equals(containerModelID)) {
return -1;
}
int containerVisualID;
if (dataMapper.diagram.edit.parts.DataMapperRootEditPart.MODEL_ID
.equals(containerModelID)) {
containerVisualID = dataMapper.diagram.part.DataMapperVisualIDRegistry
.getVisualID(containerView);
} else {
if (containerView instanceof Diagram) {
containerVisualID = dataMapper.diagram.edit.parts.DataMapperRootEditPart.VISUAL_ID;
} else {
return -1;
}
}
switch (containerVisualID) {
case dataMapper.diagram.edit.parts.DataMapperRootEditPart.VISUAL_ID :
if (dataMapper.DataMapperPackage.eINSTANCE
.getDataMapperDiagram().isSuperTypeOf(
domainElement.eClass())) {
return dataMapper.diagram.edit.parts.DataMapperDiagramEditPart.VISUAL_ID;
}
break;
case dataMapper.diagram.edit.parts.DataMapperDiagramEditPart.VISUAL_ID :
if (dataMapper.DataMapperPackage.eINSTANCE.getInput()
.isSuperTypeOf(domainElement.eClass())) {
return dataMapper.diagram.edit.parts.InputEditPart.VISUAL_ID;
}
if (dataMapper.DataMapperPackage.eINSTANCE.getOutput()
.isSuperTypeOf(domainElement.eClass())) {
return dataMapper.diagram.edit.parts.OutputEditPart.VISUAL_ID;
}
if (dataMapper.DataMapperPackage.eINSTANCE.getOperations()
.isSuperTypeOf(domainElement.eClass())) {
return dataMapper.diagram.edit.parts.OperationsEditPart.VISUAL_ID;
}
break;
case dataMapper.diagram.edit.parts.InputEditPart.VISUAL_ID :
if (dataMapper.DataMapperPackage.eINSTANCE.getTreeNode()
.isSuperTypeOf(domainElement.eClass())) {
return dataMapper.diagram.edit.parts.TreeNodeEditPart.VISUAL_ID;
}
break;
case dataMapper.diagram.edit.parts.TreeNodeEditPart.VISUAL_ID :
if (dataMapper.DataMapperPackage.eINSTANCE.getTreeNode()
.isSuperTypeOf(domainElement.eClass())) {
return dataMapper.diagram.edit.parts.TreeNode2EditPart.VISUAL_ID;
}
if (dataMapper.DataMapperPackage.eINSTANCE.getAttribute()
.isSuperTypeOf(domainElement.eClass())) {
return dataMapper.diagram.edit.parts.AttributeEditPart.VISUAL_ID;
}
if (dataMapper.DataMapperPackage.eINSTANCE.getElement()
.isSuperTypeOf(domainElement.eClass())) {
return dataMapper.diagram.edit.parts.ElementEditPart.VISUAL_ID;
}
break;
case dataMapper.diagram.edit.parts.TreeNode2EditPart.VISUAL_ID :
if (dataMapper.DataMapperPackage.eINSTANCE.getTreeNode()
.isSuperTypeOf(domainElement.eClass())) {
return dataMapper.diagram.edit.parts.TreeNode2EditPart.VISUAL_ID;
}
if (dataMapper.DataMapperPackage.eINSTANCE.getAttribute()
.isSuperTypeOf(domainElement.eClass())) {
return dataMapper.diagram.edit.parts.AttributeEditPart.VISUAL_ID;
}
if (dataMapper.DataMapperPackage.eINSTANCE.getElement()
.isSuperTypeOf(domainElement.eClass())) {
return dataMapper.diagram.edit.parts.ElementEditPart.VISUAL_ID;
}
break;
case dataMapper.diagram.edit.parts.AttributeEditPart.VISUAL_ID :
if (dataMapper.DataMapperPackage.eINSTANCE.getInNode()
.isSuperTypeOf(domainElement.eClass())) {
return dataMapper.diagram.edit.parts.InNodeEditPart.VISUAL_ID;
}
if (dataMapper.DataMapperPackage.eINSTANCE.getOutNode()
.isSuperTypeOf(domainElement.eClass())) {
return dataMapper.diagram.edit.parts.OutNodeEditPart.VISUAL_ID;
}
break;
case dataMapper.diagram.edit.parts.ElementEditPart.VISUAL_ID :
if (dataMapper.DataMapperPackage.eINSTANCE.getInNode()
.isSuperTypeOf(domainElement.eClass())) {
return dataMapper.diagram.edit.parts.InNode2EditPart.VISUAL_ID;
}
if (dataMapper.DataMapperPackage.eINSTANCE.getOutNode()
.isSuperTypeOf(domainElement.eClass())) {
return dataMapper.diagram.edit.parts.OutNode2EditPart.VISUAL_ID;
}
break;
case dataMapper.diagram.edit.parts.OutputEditPart.VISUAL_ID :
if (dataMapper.DataMapperPackage.eINSTANCE.getTreeNode()
.isSuperTypeOf(domainElement.eClass())) {
return dataMapper.diagram.edit.parts.TreeNode3EditPart.VISUAL_ID;
}
break;
case dataMapper.diagram.edit.parts.TreeNode3EditPart.VISUAL_ID :
if (dataMapper.DataMapperPackage.eINSTANCE.getTreeNode()
.isSuperTypeOf(domainElement.eClass())) {
return dataMapper.diagram.edit.parts.TreeNode2EditPart.VISUAL_ID;
}
if (dataMapper.DataMapperPackage.eINSTANCE.getAttribute()
.isSuperTypeOf(domainElement.eClass())) {
return dataMapper.diagram.edit.parts.AttributeEditPart.VISUAL_ID;
}
if (dataMapper.DataMapperPackage.eINSTANCE.getElement()
.isSuperTypeOf(domainElement.eClass())) {
return dataMapper.diagram.edit.parts.ElementEditPart.VISUAL_ID;
}
break;
case dataMapper.diagram.edit.parts.OperationsEditPart.VISUAL_ID :
if (dataMapper.DataMapperPackage.eINSTANCE.getConcat()
.isSuperTypeOf(domainElement.eClass())) {
return dataMapper.diagram.edit.parts.ConcatEditPart.VISUAL_ID;
}
break;
case dataMapper.diagram.edit.parts.ConcatEditPart.VISUAL_ID :
if (dataMapper.DataMapperPackage.eINSTANCE.getInNode()
.isSuperTypeOf(domainElement.eClass())) {
return dataMapper.diagram.edit.parts.InNode3EditPart.VISUAL_ID;
}
if (dataMapper.DataMapperPackage.eINSTANCE.getOutNode()
.isSuperTypeOf(domainElement.eClass())) {
return dataMapper.diagram.edit.parts.OutNode3EditPart.VISUAL_ID;
}
break;
}
return -1;
}
/**
* @generated
*/
public static boolean canCreateNode(View containerView, int nodeVisualID) {
String containerModelID = dataMapper.diagram.part.DataMapperVisualIDRegistry
.getModelID(containerView);
if (!dataMapper.diagram.edit.parts.DataMapperRootEditPart.MODEL_ID
.equals(containerModelID)) {
return false;
}
int containerVisualID;
if (dataMapper.diagram.edit.parts.DataMapperRootEditPart.MODEL_ID
.equals(containerModelID)) {
containerVisualID = dataMapper.diagram.part.DataMapperVisualIDRegistry
.getVisualID(containerView);
} else {
if (containerView instanceof Diagram) {
containerVisualID = dataMapper.diagram.edit.parts.DataMapperRootEditPart.VISUAL_ID;
} else {
return false;
}
}
switch (containerVisualID) {
case dataMapper.diagram.edit.parts.DataMapperRootEditPart.VISUAL_ID :
if (dataMapper.diagram.edit.parts.DataMapperDiagramEditPart.VISUAL_ID == nodeVisualID) {
return true;
}
break;
case dataMapper.diagram.edit.parts.DataMapperDiagramEditPart.VISUAL_ID :
if (dataMapper.diagram.edit.parts.InputEditPart.VISUAL_ID == nodeVisualID) {
return true;
}
if (dataMapper.diagram.edit.parts.OutputEditPart.VISUAL_ID == nodeVisualID) {
return true;
}
if (dataMapper.diagram.edit.parts.OperationsEditPart.VISUAL_ID == nodeVisualID) {
return true;
}
break;
case dataMapper.diagram.edit.parts.InputEditPart.VISUAL_ID :
if (dataMapper.diagram.edit.parts.TreeNodeEditPart.VISUAL_ID == nodeVisualID) {
return true;
}
break;
case dataMapper.diagram.edit.parts.TreeNodeEditPart.VISUAL_ID :
if (dataMapper.diagram.edit.parts.TreeNode2EditPart.VISUAL_ID == nodeVisualID) {
return true;
}
if (dataMapper.diagram.edit.parts.AttributeEditPart.VISUAL_ID == nodeVisualID) {
return true;
}
if (dataMapper.diagram.edit.parts.ElementEditPart.VISUAL_ID == nodeVisualID) {
return true;
}
break;
case dataMapper.diagram.edit.parts.TreeNode2EditPart.VISUAL_ID :
if (dataMapper.diagram.edit.parts.TreeNode2EditPart.VISUAL_ID == nodeVisualID) {
return true;
}
if (dataMapper.diagram.edit.parts.AttributeEditPart.VISUAL_ID == nodeVisualID) {
return true;
}
if (dataMapper.diagram.edit.parts.ElementEditPart.VISUAL_ID == nodeVisualID) {
return true;
}
break;
case dataMapper.diagram.edit.parts.AttributeEditPart.VISUAL_ID :
if (dataMapper.diagram.edit.parts.InNodeEditPart.VISUAL_ID == nodeVisualID) {
return true;
}
if (dataMapper.diagram.edit.parts.OutNodeEditPart.VISUAL_ID == nodeVisualID) {
return true;
}
break;
case dataMapper.diagram.edit.parts.ElementEditPart.VISUAL_ID :
if (dataMapper.diagram.edit.parts.InNode2EditPart.VISUAL_ID == nodeVisualID) {
return true;
}
if (dataMapper.diagram.edit.parts.OutNode2EditPart.VISUAL_ID == nodeVisualID) {
return true;
}
break;
case dataMapper.diagram.edit.parts.OutputEditPart.VISUAL_ID :
if (dataMapper.diagram.edit.parts.TreeNode3EditPart.VISUAL_ID == nodeVisualID) {
return true;
}
break;
case dataMapper.diagram.edit.parts.TreeNode3EditPart.VISUAL_ID :
if (dataMapper.diagram.edit.parts.TreeNode2EditPart.VISUAL_ID == nodeVisualID) {
return true;
}
if (dataMapper.diagram.edit.parts.AttributeEditPart.VISUAL_ID == nodeVisualID) {
return true;
}
if (dataMapper.diagram.edit.parts.ElementEditPart.VISUAL_ID == nodeVisualID) {
return true;
}
break;
case dataMapper.diagram.edit.parts.OperationsEditPart.VISUAL_ID :
if (dataMapper.diagram.edit.parts.ConcatEditPart.VISUAL_ID == nodeVisualID) {
return true;
}
break;
case dataMapper.diagram.edit.parts.ConcatEditPart.VISUAL_ID :
if (dataMapper.diagram.edit.parts.InNode3EditPart.VISUAL_ID == nodeVisualID) {
return true;
}
if (dataMapper.diagram.edit.parts.OutNode3EditPart.VISUAL_ID == nodeVisualID) {
return true;
}
break;
}
return false;
}
/**
* @generated
*/
public static int getLinkWithClassVisualID(EObject domainElement) {
if (domainElement == null) {
return -1;
}
if (dataMapper.DataMapperPackage.eINSTANCE.getDataMapperLink()
.isSuperTypeOf(domainElement.eClass())) {
return dataMapper.diagram.edit.parts.DataMapperLinkEditPart.VISUAL_ID;
}
return -1;
}
/**
* User can change implementation of this method to handle some specific
* situations not covered by default logic.
*
* @generated
*/
private static boolean isDiagram(dataMapper.DataMapperRoot element) {
return true;
}
/**
* @generated
*/
public static boolean checkNodeVisualID(View containerView,
EObject domainElement, int candidate) {
if (candidate == -1) {
//unrecognized id is always bad
return false;
}
int basic = getNodeVisualID(containerView, domainElement);
return basic == candidate;
}
/**
* @generated
*/
public static boolean isCompartmentVisualID(int visualID) {
return false;
}
/**
* @generated
*/
public static boolean isSemanticLeafVisualID(int visualID) {
switch (visualID) {
case dataMapper.diagram.edit.parts.DataMapperRootEditPart.VISUAL_ID :
return false;
case dataMapper.diagram.edit.parts.InNodeEditPart.VISUAL_ID :
case dataMapper.diagram.edit.parts.OutNodeEditPart.VISUAL_ID :
case dataMapper.diagram.edit.parts.InNode2EditPart.VISUAL_ID :
case dataMapper.diagram.edit.parts.OutNode2EditPart.VISUAL_ID :
case dataMapper.diagram.edit.parts.InNode3EditPart.VISUAL_ID :
case dataMapper.diagram.edit.parts.OutNode3EditPart.VISUAL_ID :
return true;
default :
break;
}
return false;
}
/**
* @generated
*/
public static final DiagramStructure TYPED_INSTANCE = new DiagramStructure() {
/**
* @generated
*/
@Override
public int getVisualID(View view) {
return dataMapper.diagram.part.DataMapperVisualIDRegistry
.getVisualID(view);
}
/**
* @generated
*/
@Override
public String getModelID(View view) {
return dataMapper.diagram.part.DataMapperVisualIDRegistry
.getModelID(view);
}
/**
* @generated
*/
@Override
public int getNodeVisualID(View containerView, EObject domainElement) {
return dataMapper.diagram.part.DataMapperVisualIDRegistry
.getNodeVisualID(containerView, domainElement);
}
/**
* @generated
*/
@Override
public boolean checkNodeVisualID(View containerView,
EObject domainElement, int candidate) {
return dataMapper.diagram.part.DataMapperVisualIDRegistry
.checkNodeVisualID(containerView, domainElement, candidate);
}
/**
* @generated
*/
@Override
public boolean isCompartmentVisualID(int visualID) {
return dataMapper.diagram.part.DataMapperVisualIDRegistry
.isCompartmentVisualID(visualID);
}
/**
* @generated
*/
@Override
public boolean isSemanticLeafVisualID(int visualID) {
return dataMapper.diagram.part.DataMapperVisualIDRegistry
.isSemanticLeafVisualID(visualID);
}
};
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.commons.math4.analysis.function;
import org.apache.commons.math4.analysis.ParametricUnivariateFunction;
import org.apache.commons.math4.analysis.differentiation.DerivativeStructure;
import org.apache.commons.math4.analysis.differentiation.UnivariateDifferentiableFunction;
import org.apache.commons.math4.exception.DimensionMismatchException;
import org.apache.commons.math4.exception.NotStrictlyPositiveException;
import org.apache.commons.math4.exception.NullArgumentException;
import org.apache.commons.math4.util.FastMath;
/**
* <a href="http://en.wikipedia.org/wiki/Generalised_logistic_function">
* Generalised logistic</a> function.
*
* @since 3.0
*/
public class Logistic implements UnivariateDifferentiableFunction {
/** Lower asymptote. */
private final double a;
/** Upper asymptote. */
private final double k;
/** Growth rate. */
private final double b;
/** Parameter that affects near which asymptote maximum growth occurs. */
private final double oneOverN;
/** Parameter that affects the position of the curve along the ordinate axis. */
private final double q;
/** Abscissa of maximum growth. */
private final double m;
/**
* @param k If {@code b > 0}, value of the function for x going towards +∞.
* If {@code b < 0}, value of the function for x going towards -∞.
* @param m Abscissa of maximum growth.
* @param b Growth rate.
* @param q Parameter that affects the position of the curve along the
* ordinate axis.
* @param a If {@code b > 0}, value of the function for x going towards -∞.
* If {@code b < 0}, value of the function for x going towards +∞.
* @param n Parameter that affects near which asymptote the maximum
* growth occurs.
* @throws NotStrictlyPositiveException if {@code n <= 0}.
*/
public Logistic(double k,
double m,
double b,
double q,
double a,
double n)
throws NotStrictlyPositiveException {
if (n <= 0) {
throw new NotStrictlyPositiveException(n);
}
this.k = k;
this.m = m;
this.b = b;
this.q = q;
this.a = a;
oneOverN = 1 / n;
}
/** {@inheritDoc} */
@Override
public double value(double x) {
return value(m - x, k, b, q, a, oneOverN);
}
/**
* Parametric function where the input array contains the parameters of
* the {@link Logistic#Logistic(double,double,double,double,double,double)
* logistic function}, ordered as follows:
* <ul>
* <li>k</li>
* <li>m</li>
* <li>b</li>
* <li>q</li>
* <li>a</li>
* <li>n</li>
* </ul>
*/
public static class Parametric implements ParametricUnivariateFunction {
/**
* Computes the value of the sigmoid at {@code x}.
*
* @param x Value for which the function must be computed.
* @param param Values for {@code k}, {@code m}, {@code b}, {@code q},
* {@code a} and {@code n}.
* @return the value of the function.
* @throws NullArgumentException if {@code param} is {@code null}.
* @throws DimensionMismatchException if the size of {@code param} is
* not 6.
* @throws NotStrictlyPositiveException if {@code param[5] <= 0}.
*/
@Override
public double value(double x, double ... param)
throws NullArgumentException,
DimensionMismatchException,
NotStrictlyPositiveException {
validateParameters(param);
return Logistic.value(param[1] - x, param[0],
param[2], param[3],
param[4], 1 / param[5]);
}
/**
* Computes the value of the gradient at {@code x}.
* The components of the gradient vector are the partial
* derivatives of the function with respect to each of the
* <em>parameters</em>.
*
* @param x Value at which the gradient must be computed.
* @param param Values for {@code k}, {@code m}, {@code b}, {@code q},
* {@code a} and {@code n}.
* @return the gradient vector at {@code x}.
* @throws NullArgumentException if {@code param} is {@code null}.
* @throws DimensionMismatchException if the size of {@code param} is
* not 6.
* @throws NotStrictlyPositiveException if {@code param[5] <= 0}.
*/
@Override
public double[] gradient(double x, double ... param)
throws NullArgumentException,
DimensionMismatchException,
NotStrictlyPositiveException {
validateParameters(param);
final double b = param[2];
final double q = param[3];
final double mMinusX = param[1] - x;
final double oneOverN = 1 / param[5];
final double exp = FastMath.exp(b * mMinusX);
final double qExp = q * exp;
final double qExp1 = qExp + 1;
final double factor1 = (param[0] - param[4]) * oneOverN / FastMath.pow(qExp1, oneOverN);
final double factor2 = -factor1 / qExp1;
// Components of the gradient.
final double gk = Logistic.value(mMinusX, 1, b, q, 0, oneOverN);
final double gm = factor2 * b * qExp;
final double gb = factor2 * mMinusX * qExp;
final double gq = factor2 * exp;
final double ga = Logistic.value(mMinusX, 0, b, q, 1, oneOverN);
final double gn = factor1 * FastMath.log(qExp1) * oneOverN;
return new double[] { gk, gm, gb, gq, ga, gn };
}
/**
* Validates parameters to ensure they are appropriate for the evaluation of
* the {@link #value(double,double[])} and {@link #gradient(double,double[])}
* methods.
*
* @param param Values for {@code k}, {@code m}, {@code b}, {@code q},
* {@code a} and {@code n}.
* @throws NullArgumentException if {@code param} is {@code null}.
* @throws DimensionMismatchException if the size of {@code param} is
* not 6.
* @throws NotStrictlyPositiveException if {@code param[5] <= 0}.
*/
private void validateParameters(double[] param)
throws NullArgumentException,
DimensionMismatchException,
NotStrictlyPositiveException {
if (param == null) {
throw new NullArgumentException();
}
if (param.length != 6) {
throw new DimensionMismatchException(param.length, 6);
}
if (param[5] <= 0) {
throw new NotStrictlyPositiveException(param[5]);
}
}
}
/**
* @param mMinusX {@code m - x}.
* @param k {@code k}.
* @param b {@code b}.
* @param q {@code q}.
* @param a {@code a}.
* @param oneOverN {@code 1 / n}.
* @return the value of the function.
*/
private static double value(double mMinusX,
double k,
double b,
double q,
double a,
double oneOverN) {
return a + (k - a) / FastMath.pow(1 + q * FastMath.exp(b * mMinusX), oneOverN);
}
/** {@inheritDoc}
* @since 3.1
*/
@Override
public DerivativeStructure value(final DerivativeStructure t) {
return t.negate().add(m).multiply(b).exp().multiply(q).add(1).pow(oneOverN).reciprocal().multiply(k - a).add(a);
}
}
| |
/*
* Copyright (c) 2017, Adam <Adam@sigterm.info>
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package net.runelite.client.account;
import com.google.gson.Gson;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.OutputStreamWriter;
import java.io.Writer;
import java.nio.charset.StandardCharsets;
import java.time.Instant;
import java.util.UUID;
import javax.inject.Inject;
import javax.inject.Named;
import javax.inject.Singleton;
import lombok.Getter;
import lombok.extern.slf4j.Slf4j;
import net.runelite.client.config.ConfigManager;
import net.runelite.client.eventbus.EventBus;
import net.runelite.client.eventbus.Subscribe;
import net.runelite.client.events.SessionClose;
import net.runelite.client.events.SessionOpen;
import net.runelite.client.util.LinkBrowser;
import net.runelite.client.ws.WSClient;
import net.runelite.http.api.account.AccountClient;
import net.runelite.http.api.account.OAuthResponse;
import net.runelite.http.api.ws.messages.LoginResponse;
import okhttp3.OkHttpClient;
@Singleton
@Slf4j
public class SessionManager
{
@Getter
private AccountSession accountSession;
private final EventBus eventBus;
private final ConfigManager configManager;
private final WSClient wsClient;
private final File sessionFile;
private final AccountClient accountClient;
@Inject
private SessionManager(
@Named("sessionfile") File sessionfile,
ConfigManager configManager,
EventBus eventBus,
WSClient wsClient,
OkHttpClient okHttpClient)
{
this.configManager = configManager;
this.eventBus = eventBus;
this.wsClient = wsClient;
this.sessionFile = sessionfile;
this.accountClient = new AccountClient(okHttpClient);
eventBus.register(this);
}
public void loadSession()
{
if (!sessionFile.exists())
{
log.info("No session file exists");
return;
}
AccountSession session;
try (FileInputStream in = new FileInputStream(sessionFile))
{
session = new Gson().fromJson(new InputStreamReader(in, StandardCharsets.UTF_8), AccountSession.class);
log.debug("Loaded session for {}", session.getUsername());
}
catch (Exception ex)
{
log.warn("Unable to load session file", ex);
return;
}
// Check if session is still valid
accountClient.setUuid(session.getUuid());
if (!accountClient.sessionCheck())
{
log.debug("Loaded session {} is invalid", session.getUuid());
return;
}
openSession(session, false);
}
private void saveSession()
{
if (accountSession == null)
{
return;
}
try (Writer fw = new OutputStreamWriter(new FileOutputStream(sessionFile), StandardCharsets.UTF_8))
{
new Gson().toJson(accountSession, fw);
log.debug("Saved session to {}", sessionFile);
}
catch (IOException ex)
{
log.warn("Unable to save session file", ex);
}
}
private void deleteSession()
{
sessionFile.delete();
}
/**
* Set the given session as the active session and open a socket to the
* server with the given session
*
* @param session session
*/
private void openSession(AccountSession session, boolean openSocket)
{
// Change session on the websocket
if (openSocket)
{
wsClient.changeSession(session.getUuid());
}
accountSession = session;
if (session.getUsername() != null)
{
// Initialize config for new session
// If the session isn't logged in yet, don't switch to the new config
configManager.switchSession(session);
}
eventBus.post(new SessionOpen());
}
private void closeSession()
{
wsClient.changeSession(null);
if (accountSession == null)
{
return;
}
log.debug("Logging out of account {}", accountSession.getUsername());
accountClient.setUuid(accountSession.getUuid());
try
{
accountClient.logout();
}
catch (IOException ex)
{
log.warn("Unable to logout of session", ex);
}
accountSession = null; // No more account
// Restore config
configManager.switchSession(null);
eventBus.post(new SessionClose());
}
public void login()
{
// If a session is already open, use that id. Otherwise generate a new id.
UUID uuid = wsClient.getSessionId() != null ? wsClient.getSessionId() : UUID.randomUUID();
accountClient.setUuid(uuid);
final OAuthResponse login;
try
{
login = accountClient.login();
}
catch (IOException ex)
{
log.warn("Unable to get oauth url", ex);
return;
}
// Create new session
openSession(new AccountSession(login.getUid(), Instant.now()), true);
// Navigate to login link
LinkBrowser.browse(login.getOauthUrl());
}
@Subscribe
public void onLoginResponse(LoginResponse loginResponse)
{
log.debug("Now logged in as {}", loginResponse.getUsername());
AccountSession session = getAccountSession();
session.setUsername(loginResponse.getUsername());
// Open session, again, now that we have a username
// This triggers onSessionOpen
// The socket is already opened here anyway so we pass true for openSocket
openSession(session, true);
// Save session to disk
saveSession();
}
public void logout()
{
closeSession();
deleteSession();
}
}
| |
package edu.washington.escience.myria.operator;
import java.io.BufferedReader;
import java.io.DataInput;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Scanner;
import java.util.regex.Pattern;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import edu.washington.escience.myria.DbException;
import edu.washington.escience.myria.Schema;
import edu.washington.escience.myria.Type;
import edu.washington.escience.myria.storage.TupleBatch;
import edu.washington.escience.myria.storage.TupleBatchBuffer;
import edu.washington.escience.myria.storage.TupleUtils;
import edu.washington.escience.myria.util.MyriaUtils;
/**
* Parse NChilada file formats. See <a
* href="http://librarian.phys.washington.edu/astro/index.php/Research:NChilada_File_Format">NChilada wiki</a>
*
* @author leelee
*
*/
public class NChiladaFileScan extends LeafOperator {
/** Required for Java serialization. */
private static final long serialVersionUID = 1L;
/** IOrder attribute that exists in all three types of particles. */
private static final String IORD = "iord";
/** Den attribute that exists in all three types of particles. */
private static final String DEN = "den";
/** Mass attribute that exists in all three types of particles. */
private static final String MASS = "mass";
/** Position x attribute that exists in all three types of particles. */
private static final String POS_X = "x";
/** Position y attribute that exists in all three types of particles. */
private static final String POS_Y = "y";
/** Position z attribute that exists in all three types of particles. */
private static final String POS_Z = "z";
/** Position attribute file name. */
private static final String POS_FILE_NAME = "pos";
/** Pot attribute that exists in all three types of particles. */
private static final String POT = "pot";
/** Smoothlength attribute that exists in all three types of particles. */
private static final String SMOOTHLENGTH = "smoothlength";
/** Soft attribute that exists in all three types of particles. */
private static final String SOFT = "soft";
/** Velocity x attribute that exists in all three types of particles. */
private static final String VEL_X = "vx";
/** Velocity y attribute that exists in all three types of particles. */
private static final String VEL_Y = "vy";
/** Velocity z attribute that exists in all three types of particles. */
private static final String VEL_Z = "vz";
/** Position attribute file name. */
private static final String VEL_FILE_NAME = "vel";
/** Gas iOrder attribute that only exists in star particles. */
private static final String IGASORD = "igasord";
/** Massform attribute that only exists in star particles. */
private static final String MASSFORM = "massform";
/** Tform attribute that only exists in star particles. */
private static final String TFORM = "tform";
/** ESNRate attribute that only exists in gas and star particles. */
private static final String ESN_RATE = "ESNRate";
/** FeMassFrac attribute that only exists in gas and star particles. */
private static final String FE_MASS_FRAC = "FeMassFrac";
/** OxMassFrac attribute that only exists in gas and star particles. */
private static final String OX_MASS_FRAC = "OxMassFrac";
/** Metals attribute that only exists in gas and star particles. */
private static final String METALS = "metals";
/** FeMassFracDot attribute that only exists in gas particles. */
private static final String FE_MASS_FRACDOT = "FeMassFracDot";
/** GasDensity attribute that only exists in gas particles. */
private static final String GAS_DENSITY = "GasDensity";
/** HI attribute that only exists in gas particles. */
private static final String H_I = "HI";
/** HeI attribute that only exists in gas particles. */
private static final String HE_I = "HeI";
/** HeII attribute that only exists in gas particles. */
private static final String HE_I_I = "HeII";
/** Metalsdot attribute that only exists in gas particles. */
private static final String METALSDOT = "Metalsdot";
/** OxMassFracdot attribute that only exists in gas particles. */
private static final String OX_MASS_FRACDOT = "OxMassFracdot";
/** Coolontime attribute that only exists in gas particles. */
private static final String COOLONTIME = "coolontime";
/** Temperature attribute that only exists in gas particles. */
private static final String TEMPERATURE = "temperature";
/** The star directory name. */
private static final String STAR_DIR = "/star";
/** The dark directory name. */
private static final String DARK_DIR = "/dark";
/** The gas directory name. */
private static final String GAS_DIR = "/gas";
/** The column types for NChilada schema. */
private static final List<Type> NCHILADA_COLUMN_TYPES =
ImmutableList.of(
Type.INT_TYPE,
Type.INT_TYPE,
Type.FLOAT_TYPE,
Type.FLOAT_TYPE,
Type.FLOAT_TYPE,
Type.FLOAT_TYPE,
Type.FLOAT_TYPE,
Type.FLOAT_TYPE,
Type.FLOAT_TYPE,
Type.FLOAT_TYPE,
Type.FLOAT_TYPE,
Type.FLOAT_TYPE,
Type.FLOAT_TYPE,
Type.FLOAT_TYPE,
Type.FLOAT_TYPE,
Type.FLOAT_TYPE,
Type.FLOAT_TYPE,
Type.FLOAT_TYPE,
Type.FLOAT_TYPE,
Type.FLOAT_TYPE,
Type.FLOAT_TYPE,
Type.FLOAT_TYPE,
Type.FLOAT_TYPE,
Type.FLOAT_TYPE,
Type.FLOAT_TYPE,
Type.FLOAT_TYPE,
Type.FLOAT_TYPE,
Type.FLOAT_TYPE,
Type.INT_TYPE,
Type.STRING_TYPE);
/** The column names for NChilada schema. */
private static final List<String> NCHILADA_COLUMN_NAMES =
ImmutableList.of(
IORD,
IGASORD,
ESN_RATE,
FE_MASS_FRAC,
FE_MASS_FRACDOT,
GAS_DENSITY,
H_I,
HE_I,
HE_I_I,
METALSDOT,
OX_MASS_FRAC,
OX_MASS_FRACDOT,
COOLONTIME,
DEN,
MASS,
METALS,
POS_X,
POS_Y,
POS_Z,
POT,
SMOOTHLENGTH,
SOFT,
VEL_X,
VEL_Y,
VEL_Z,
MASSFORM,
TFORM,
TEMPERATURE,
"grp",
"type");
/** Schema for all NChilada files. */
private static final Schema NCHILADA_SCHEMA =
new Schema(NCHILADA_COLUMN_TYPES, NCHILADA_COLUMN_NAMES);
/** The magic number that indicates the file format is NChilada. */
private static final int NCHILADA_FORMAT = 1062053;
/** The number of dimension that vel and pos in NChilada file format should have. */
private static final int VEL_POS_DIM = 3;
/** The code that indicates the data type of the file is float. */
private static final int FLOAT_CODE = 9;
/** The code that indicates the data type of the file is int. */
private static final int INT_CODE = 5;
/** Holds the tuples that are ready for release. */
private transient TupleBatchBuffer buffer;
/** The full path of the directory that contains star, dark and gas directories. */
private String particleDirectoryPath;
/** The full path of the file that contains groupNumber in the order of gas, dark, star. */
private String groupFilePath;
/** The group input stream. */
private InputStream groupInputStream;
/** Contains matching from file name to DataInput object for star particles attributes. */
private Map<String, DataInput> starAttributeFilesToDataInput;
/** Contains matching from file name to DataInput object for gas particles attributes. */
private Map<String, DataInput> gasAttributeFilesToDataInput;
/** Contains matching from file name to DataInput object for dark particles attributes. */
private Map<String, DataInput> darkAttributeFilesToDataInput;
/** The number of star particle records. */
private int numStar;
/** The number of gas particle records. */
private int numGas;
/** The number of dark particle records. */
private int numDark;
/** Scanner used to parse the group number file. */
private transient Scanner groupScanner;
/** Which line of the file the scanner is currently on. */
private int lineNumber;
/**
* Represents different types of particle.
*/
private enum ParticleType {
/** There are three types of particles. */
DARK,
GAS,
STAR
}
/**
* Construct a new NChiladaFileScanObject.
*
* @param particleDirectoryPath The full path of the directory that contains gas, star, dark directories.
* @param groupFilePath The full path of the file that contains groupNumber in the order of gas, dark, star.
*/
public NChiladaFileScan(final String particleDirectoryPath, final String groupFilePath) {
Objects.requireNonNull(particleDirectoryPath);
Objects.requireNonNull(groupFilePath);
this.particleDirectoryPath = particleDirectoryPath;
this.groupFilePath = groupFilePath;
}
/**
* Construct a new NChiladaFileScanObject. This constructor is only meant to be called from test.
*
* @param groupInputStream The InputStream object for group.
* @param gasAttributeFilesToDataInput A mapping from gas attribute file names to their respective DataInput object.
* @param starAttributeFilesToDataInput A mapping from star attribute file names to their respective DataInput object.
* @param darkAttributeFilesToDataInput A mapping from dark attribute file names to their respective DataInput object.
*/
@SuppressWarnings("unused")
// used via reflection in the tests
private NChiladaFileScan(
final InputStream groupInputStream,
final Map<String, DataInput> gasAttributeFilesToDataInput,
final Map<String, DataInput> starAttributeFilesToDataInput,
final Map<String, DataInput> darkAttributeFilesToDataInput) {
Objects.requireNonNull(groupInputStream);
Objects.requireNonNull(gasAttributeFilesToDataInput);
Objects.requireNonNull(starAttributeFilesToDataInput);
Objects.requireNonNull(darkAttributeFilesToDataInput);
this.darkAttributeFilesToDataInput = darkAttributeFilesToDataInput;
this.gasAttributeFilesToDataInput = gasAttributeFilesToDataInput;
this.starAttributeFilesToDataInput = starAttributeFilesToDataInput;
this.groupInputStream = groupInputStream;
}
@Override
protected TupleBatch fetchNextReady() throws Exception {
processFile(ParticleType.GAS);
processFile(ParticleType.DARK);
processFile(ParticleType.STAR);
return buffer.popAny();
}
/**
* Create InputStream object for the given group file path.
*
* @param groupFilePath The file path to create InputStream object.
* @return the group InputStream object.
* @throws DbException The DbException.
*/
private InputStream getGroupFileStream(final String groupFilePath) throws DbException {
InputStream groupInputStreamLocal;
try {
Configuration conf = new Configuration();
FileSystem fs = FileSystem.get(MyriaUtils.normalizeS3Uri(URI.create(groupFilePath)), conf);
Path rootPath = new Path(groupFilePath);
groupInputStreamLocal = fs.open(rootPath);
} catch (IOException | URISyntaxException e) {
throw new DbException(e);
}
return groupInputStreamLocal;
}
/**
* Create DataInput object for every files in the directory.
*
* @param path The directory path.
* @return a mapping from filename to DataInput object.
* @throws DbException The DbException.
*/
private Map<String, DataInput> getFilesToDataInput(final String path) throws DbException {
Configuration conf = new Configuration();
FileSystem fs;
Map<String, DataInput> map = new HashMap<>();
try {
fs = FileSystem.get(MyriaUtils.normalizeS3Uri(URI.create(path)), conf);
Path rootPath = new Path(path + File.separator);
FileStatus[] statii = fs.listStatus(rootPath);
if (statii == null || statii.length == 0) {
throw new FileNotFoundException(path);
}
for (FileStatus status : statii) {
Path p = status.getPath();
String[] pNameTokens = p.getName().split(Pattern.quote(File.separator));
String fileName = pNameTokens[pNameTokens.length - 1];
DataInput dataInputStream = fs.open(p);
map.put(fileName, dataInputStream);
}
} catch (IOException | URISyntaxException e) {
throw new DbException(e);
}
return map;
}
@Override
protected final void init(final ImmutableMap<String, Object> execEnvVars) throws DbException {
numDark = -1;
numGas = -1;
numStar = -1;
if (darkAttributeFilesToDataInput == null) {
darkAttributeFilesToDataInput = getFilesToDataInput(particleDirectoryPath + DARK_DIR);
}
if (gasAttributeFilesToDataInput == null) {
gasAttributeFilesToDataInput = getFilesToDataInput(particleDirectoryPath + GAS_DIR);
}
if (starAttributeFilesToDataInput == null) {
starAttributeFilesToDataInput = getFilesToDataInput(particleDirectoryPath + STAR_DIR);
}
Preconditions.checkArgument(
darkAttributeFilesToDataInput != null, "darkAttributeFilesToDataInput has not been set");
Preconditions.checkArgument(
starAttributeFilesToDataInput != null, "starAttributeFilesToDataInput has not been set");
Preconditions.checkArgument(
gasAttributeFilesToDataInput != null, "gasAttributeFilesToDataInput has not been set");
buffer = new TupleBatchBuffer(getSchema());
initBasedOnParticleType(ParticleType.GAS);
initBasedOnParticleType(ParticleType.DARK);
initBasedOnParticleType(ParticleType.STAR);
if (groupInputStream == null) {
groupInputStream = getGroupFileStream(groupFilePath);
}
Preconditions.checkArgument(
groupInputStream != null, "FileScan group input stream has not been set.");
groupScanner = new Scanner(new BufferedReader(new InputStreamReader(groupInputStream)));
int numGroup = groupScanner.nextInt();
int numTot = numGas + numDark + numStar;
if (numGroup != numTot) {
throw new DbException(
"Number of group is different from the number of particles. numGroup: "
+ numGroup
+ " num particles: "
+ numTot);
}
lineNumber = 0;
}
/**
* Initialize fileNamesToDataInput and number of particles based on the given type of particles.
*
* @param pType The type of the particles.
* @throws DbException The DbException.
*/
private void initBasedOnParticleType(final ParticleType pType) throws DbException {
int numRows;
Map<String, DataInput> fileNameToDataInput;
switch (pType) {
case GAS:
numRows = numGas;
fileNameToDataInput = gasAttributeFilesToDataInput;
break;
case DARK:
numRows = numDark;
fileNameToDataInput = darkAttributeFilesToDataInput;
break;
case STAR:
numRows = numStar;
fileNameToDataInput = starAttributeFilesToDataInput;
break;
default:
throw new DbException("Invalide pType: " + pType);
}
try {
for (String fileName : fileNameToDataInput.keySet()) {
DataInput dataInputStream = fileNameToDataInput.get(fileName);
// Read header of the file. (magic, time, iHighWord, nbodies, ndim, code)
Preconditions.checkArgument(
dataInputStream.readInt() == NCHILADA_FORMAT,
fileName + " is not in NChilada format."); // Read and verify magic.
// Time.
dataInputStream.readDouble();
// IHighWord.
dataInputStream.readInt();
// Nbodies.
int nbodies = dataInputStream.readInt();
// Ndim;
int ndim = dataInputStream.readInt();
if (fileName.equals(POS_FILE_NAME) || fileName.equals(VEL_FILE_NAME)) {
Preconditions.checkArgument(
ndim == VEL_POS_DIM,
fileName + "should have " + VEL_POS_DIM + " instead of " + ndim + ".");
}
if (numRows == -1) {
numRows = nbodies;
// Update number of particles according to the type.
switch (pType) {
case DARK:
numDark = numRows;
break;
case GAS:
numGas = numRows;
break;
case STAR:
numStar = numRows;
break;
default:
throw new DbException("Invalide pType: " + pType);
}
} else {
Preconditions.checkArgument(
numRows == nbodies,
"The files do not have the same number of rows. numRows: "
+ numRows
+ " nbodies: "
+ nbodies
+ " fileName: "
+ fileName);
}
// Code.
int code = dataInputStream.readInt();
Preconditions.checkArgument(
code == FLOAT_CODE || code == INT_CODE,
"This code format: " + code + " is not being expected.");
// After the header, there is the maximum and minimum value in the file, both in the same data type as the rest
// of the file.
if (code == FLOAT_CODE) {
// Max value.
dataInputStream.readFloat();
// Min value.
dataInputStream.readFloat();
} else {
// Max value.
dataInputStream.readInt();
// Min value.
dataInputStream.readInt();
}
}
} catch (FileNotFoundException e) {
throw new DbException(e);
} catch (IOException e) {
throw new DbException(e);
}
}
/**
* Constructs tuples of particles. Attributes of star particle: den, pos, pot, vel, iord, mass, OxMassFrac, soft,
* smoothlength, tform, ESNRate, massform, metals, igasord, FeMassFrac. Attributes of gas particle: HI, HeI, den, pos,
* pot , vel, HeII, iord, mass, OxMassFrac, soft, OxMassFracdot, gas smoothlength, FeMassFracdot, ESNRate, Metalsdot,
* GasDensity, metals, temperature, FeMassFrac, coolontime. Attributes of dark particle: den, pos, pot, vel, iord,
* mass, soft, smoothlength.
*
* @param pType The particle type.
* @throws DbException The DbException.
*/
private void processFile(final ParticleType pType) throws DbException {
int numRows;
Map<String, DataInput> fileNameToDataInput;
switch (pType) {
case DARK:
numRows = numDark;
fileNameToDataInput = darkAttributeFilesToDataInput;
break;
case GAS:
numRows = numGas;
fileNameToDataInput = gasAttributeFilesToDataInput;
break;
case STAR:
numRows = numStar;
fileNameToDataInput = starAttributeFilesToDataInput;
break;
default:
throw new DbException("Invalide pType: " + pType);
}
// TODO(leelee): Put 0 for now to replace null values.
while (numRows > 0 && buffer.numTuples() < buffer.getBatchSize()) {
lineNumber++;
int column = 0;
// -2 to exclude grp, and type.
for (int i = 0; i < NCHILADA_COLUMN_NAMES.size() - 2; i++) {
String columnNames = NCHILADA_COLUMN_NAMES.get(i);
DataInput dataInputStream = fileNameToDataInput.get(columnNames);
Type type = NCHILADA_COLUMN_TYPES.get(i);
try {
if (type.equals(Type.FLOAT_TYPE)) {
if (columnNames.equals(POS_X)
|| columnNames.equals(POS_Y)
|| columnNames.equals(POS_Z)) {
dataInputStream = fileNameToDataInput.get(POS_FILE_NAME);
Preconditions.checkArgument(
dataInputStream != null, "Cannot find dataInputStream for " + POS_FILE_NAME);
} else if (columnNames.equals(VEL_X)
|| columnNames.equals(VEL_Y)
|| columnNames.equals(VEL_Z)) {
dataInputStream = fileNameToDataInput.get(VEL_FILE_NAME);
Preconditions.checkArgument(
dataInputStream != null, "Cannot find dataInputStream for " + VEL_FILE_NAME);
}
if (dataInputStream != null) {
buffer.putFloat(column++, dataInputStream.readFloat());
} else {
buffer.putFloat(column++, 0);
}
} else {
if (dataInputStream != null) {
buffer.putInt(column++, dataInputStream.readInt());
} else {
buffer.putInt(column++, 0);
}
}
} catch (IOException e) {
throw new DbException(e);
}
}
buffer.putInt(column++, groupScanner.nextInt());
buffer.putString(column++, pType.toString().toLowerCase());
final String groupRest = groupScanner.nextLine().trim();
if (groupRest.length() > 0) {
throw new DbException(
"groupFile: Unexpected output at the end of line " + lineNumber + ": " + groupRest);
}
numRows--;
}
// Update number of particles according to the type.
switch (pType) {
case DARK:
numDark = numRows;
break;
case GAS:
numGas = numRows;
break;
case STAR:
numStar = numRows;
break;
default:
throw new DbException("Invalide pType: " + pType);
}
}
@Override
protected Schema generateSchema() {
return NCHILADA_SCHEMA;
}
@Override
protected final void cleanup() throws DbException {
groupScanner = null;
while (buffer.numTuples() > 0) {
buffer.popAny();
}
}
}
| |
/*
* Copyright 2010-2016 Amazon.com, Inc. or its affiliates. All Rights
* Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.identitymanagement.model;
import java.io.Serializable;
/**
* <p>
* Contains the response to a successful <a>ListPolicies</a> request.
* </p>
*/
public class ListPoliciesResult implements Serializable, Cloneable {
/**
* <p>
* A list of policies.
* </p>
*/
private com.amazonaws.internal.SdkInternalList<Policy> policies;
/**
* <p>
* A flag that indicates whether there are more items to return. If your
* results were truncated, you can make a subsequent pagination request
* using the <code>Marker</code> request parameter to retrieve more items.
* Note that IAM might return fewer than the <code>MaxItems</code> number of
* results even when there are more results available. We recommend that you
* check <code>IsTruncated</code> after every call to ensure that you
* receive all of your results.
* </p>
*/
private Boolean isTruncated;
/**
* <p>
* When <code>IsTruncated</code> is <code>true</code>, this element is
* present and contains the value to use for the <code>Marker</code>
* parameter in a subsequent pagination request.
* </p>
*/
private String marker;
/**
* <p>
* A list of policies.
* </p>
*
* @return A list of policies.
*/
public java.util.List<Policy> getPolicies() {
if (policies == null) {
policies = new com.amazonaws.internal.SdkInternalList<Policy>();
}
return policies;
}
/**
* <p>
* A list of policies.
* </p>
*
* @param policies
* A list of policies.
*/
public void setPolicies(java.util.Collection<Policy> policies) {
if (policies == null) {
this.policies = null;
return;
}
this.policies = new com.amazonaws.internal.SdkInternalList<Policy>(
policies);
}
/**
* <p>
* A list of policies.
* </p>
* <p>
* <b>NOTE:</b> This method appends the values to the existing list (if
* any). Use {@link #setPolicies(java.util.Collection)} or
* {@link #withPolicies(java.util.Collection)} if you want to override the
* existing values.
* </p>
*
* @param policies
* A list of policies.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public ListPoliciesResult withPolicies(Policy... policies) {
if (this.policies == null) {
setPolicies(new com.amazonaws.internal.SdkInternalList<Policy>(
policies.length));
}
for (Policy ele : policies) {
this.policies.add(ele);
}
return this;
}
/**
* <p>
* A list of policies.
* </p>
*
* @param policies
* A list of policies.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public ListPoliciesResult withPolicies(java.util.Collection<Policy> policies) {
setPolicies(policies);
return this;
}
/**
* <p>
* A flag that indicates whether there are more items to return. If your
* results were truncated, you can make a subsequent pagination request
* using the <code>Marker</code> request parameter to retrieve more items.
* Note that IAM might return fewer than the <code>MaxItems</code> number of
* results even when there are more results available. We recommend that you
* check <code>IsTruncated</code> after every call to ensure that you
* receive all of your results.
* </p>
*
* @param isTruncated
* A flag that indicates whether there are more items to return. If
* your results were truncated, you can make a subsequent pagination
* request using the <code>Marker</code> request parameter to
* retrieve more items. Note that IAM might return fewer than the
* <code>MaxItems</code> number of results even when there are more
* results available. We recommend that you check
* <code>IsTruncated</code> after every call to ensure that you
* receive all of your results.
*/
public void setIsTruncated(Boolean isTruncated) {
this.isTruncated = isTruncated;
}
/**
* <p>
* A flag that indicates whether there are more items to return. If your
* results were truncated, you can make a subsequent pagination request
* using the <code>Marker</code> request parameter to retrieve more items.
* Note that IAM might return fewer than the <code>MaxItems</code> number of
* results even when there are more results available. We recommend that you
* check <code>IsTruncated</code> after every call to ensure that you
* receive all of your results.
* </p>
*
* @return A flag that indicates whether there are more items to return. If
* your results were truncated, you can make a subsequent pagination
* request using the <code>Marker</code> request parameter to
* retrieve more items. Note that IAM might return fewer than the
* <code>MaxItems</code> number of results even when there are more
* results available. We recommend that you check
* <code>IsTruncated</code> after every call to ensure that you
* receive all of your results.
*/
public Boolean getIsTruncated() {
return this.isTruncated;
}
/**
* <p>
* A flag that indicates whether there are more items to return. If your
* results were truncated, you can make a subsequent pagination request
* using the <code>Marker</code> request parameter to retrieve more items.
* Note that IAM might return fewer than the <code>MaxItems</code> number of
* results even when there are more results available. We recommend that you
* check <code>IsTruncated</code> after every call to ensure that you
* receive all of your results.
* </p>
*
* @param isTruncated
* A flag that indicates whether there are more items to return. If
* your results were truncated, you can make a subsequent pagination
* request using the <code>Marker</code> request parameter to
* retrieve more items. Note that IAM might return fewer than the
* <code>MaxItems</code> number of results even when there are more
* results available. We recommend that you check
* <code>IsTruncated</code> after every call to ensure that you
* receive all of your results.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public ListPoliciesResult withIsTruncated(Boolean isTruncated) {
setIsTruncated(isTruncated);
return this;
}
/**
* <p>
* A flag that indicates whether there are more items to return. If your
* results were truncated, you can make a subsequent pagination request
* using the <code>Marker</code> request parameter to retrieve more items.
* Note that IAM might return fewer than the <code>MaxItems</code> number of
* results even when there are more results available. We recommend that you
* check <code>IsTruncated</code> after every call to ensure that you
* receive all of your results.
* </p>
*
* @return A flag that indicates whether there are more items to return. If
* your results were truncated, you can make a subsequent pagination
* request using the <code>Marker</code> request parameter to
* retrieve more items. Note that IAM might return fewer than the
* <code>MaxItems</code> number of results even when there are more
* results available. We recommend that you check
* <code>IsTruncated</code> after every call to ensure that you
* receive all of your results.
*/
public Boolean isTruncated() {
return this.isTruncated;
}
/**
* <p>
* When <code>IsTruncated</code> is <code>true</code>, this element is
* present and contains the value to use for the <code>Marker</code>
* parameter in a subsequent pagination request.
* </p>
*
* @param marker
* When <code>IsTruncated</code> is <code>true</code>, this element
* is present and contains the value to use for the
* <code>Marker</code> parameter in a subsequent pagination request.
*/
public void setMarker(String marker) {
this.marker = marker;
}
/**
* <p>
* When <code>IsTruncated</code> is <code>true</code>, this element is
* present and contains the value to use for the <code>Marker</code>
* parameter in a subsequent pagination request.
* </p>
*
* @return When <code>IsTruncated</code> is <code>true</code>, this element
* is present and contains the value to use for the
* <code>Marker</code> parameter in a subsequent pagination request.
*/
public String getMarker() {
return this.marker;
}
/**
* <p>
* When <code>IsTruncated</code> is <code>true</code>, this element is
* present and contains the value to use for the <code>Marker</code>
* parameter in a subsequent pagination request.
* </p>
*
* @param marker
* When <code>IsTruncated</code> is <code>true</code>, this element
* is present and contains the value to use for the
* <code>Marker</code> parameter in a subsequent pagination request.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public ListPoliciesResult withMarker(String marker) {
setMarker(marker);
return this;
}
/**
* Returns a string representation of this object; useful for testing and
* debugging.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getPolicies() != null)
sb.append("Policies: " + getPolicies() + ",");
if (getIsTruncated() != null)
sb.append("IsTruncated: " + getIsTruncated() + ",");
if (getMarker() != null)
sb.append("Marker: " + getMarker());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof ListPoliciesResult == false)
return false;
ListPoliciesResult other = (ListPoliciesResult) obj;
if (other.getPolicies() == null ^ this.getPolicies() == null)
return false;
if (other.getPolicies() != null
&& other.getPolicies().equals(this.getPolicies()) == false)
return false;
if (other.getIsTruncated() == null ^ this.getIsTruncated() == null)
return false;
if (other.getIsTruncated() != null
&& other.getIsTruncated().equals(this.getIsTruncated()) == false)
return false;
if (other.getMarker() == null ^ this.getMarker() == null)
return false;
if (other.getMarker() != null
&& other.getMarker().equals(this.getMarker()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode
+ ((getPolicies() == null) ? 0 : getPolicies().hashCode());
hashCode = prime
* hashCode
+ ((getIsTruncated() == null) ? 0 : getIsTruncated().hashCode());
hashCode = prime * hashCode
+ ((getMarker() == null) ? 0 : getMarker().hashCode());
return hashCode;
}
@Override
public ListPoliciesResult clone() {
try {
return (ListPoliciesResult) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException(
"Got a CloneNotSupportedException from Object.clone() "
+ "even though we're Cloneable!", e);
}
}
}
| |
/*
* Copyright 2012 gitblit.com.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.gitblit.wicket.pages;
import java.text.MessageFormat;
import java.util.ArrayList;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Set;
import org.apache.wicket.Component;
import org.apache.wicket.PageParameters;
import org.apache.wicket.markup.html.basic.Label;
import org.apache.wicket.markup.html.form.CheckBox;
import org.apache.wicket.markup.html.form.ListMultipleChoice;
import org.apache.wicket.markup.html.form.TextField;
import org.apache.wicket.markup.html.panel.Fragment;
import org.apache.wicket.markup.repeater.Item;
import org.apache.wicket.markup.repeater.data.DataView;
import org.apache.wicket.markup.repeater.data.ListDataProvider;
import org.apache.wicket.model.Model;
import org.eclipse.jgit.lib.Constants;
import com.gitblit.Constants.SearchType;
import com.gitblit.Keys;
import com.gitblit.models.RepositoryModel;
import com.gitblit.models.SearchResult;
import com.gitblit.models.UserModel;
import com.gitblit.utils.ArrayUtils;
import com.gitblit.utils.StringUtils;
import com.gitblit.wicket.GitBlitWebSession;
import com.gitblit.wicket.SessionlessForm;
import com.gitblit.wicket.StringChoiceRenderer;
import com.gitblit.wicket.WicketUtils;
import com.gitblit.wicket.panels.LinkPanel;
import com.gitblit.wicket.panels.PagerPanel;
public class LuceneSearchPage extends RootPage {
public LuceneSearchPage() {
super();
setup(null);
}
public LuceneSearchPage(PageParameters params) {
super(params);
setup(params);
}
private void setup(PageParameters params) {
setupPage("", "");
// default values
ArrayList<String> repositories = new ArrayList<String>();
String query = "";
int page = 1;
int pageSize = app().settings().getInteger(Keys.web.itemsPerPage, 50);
// display user-accessible selections
UserModel user = GitBlitWebSession.get().getUser();
List<String> availableRepositories = new ArrayList<String>();
for (RepositoryModel model : app().repositories().getRepositoryModels(user)) {
if (model.hasCommits && !ArrayUtils.isEmpty(model.indexedBranches)) {
availableRepositories.add(model.name);
}
}
if (params != null) {
String repository = WicketUtils.getRepositoryName(params);
if (!StringUtils.isEmpty(repository)) {
repositories.add(repository);
}
page = WicketUtils.getPage(params);
if (params.containsKey("repositories")) {
String value = params.getString("repositories", "");
List<String> list = StringUtils.getStringsFromValue(value);
repositories.addAll(list);
}
if (params.containsKey("allrepos")) {
repositories.addAll(availableRepositories);
}
if (params.containsKey("query")) {
query = params.getString("query", "");
} else {
String value = WicketUtils.getSearchString(params);
String type = WicketUtils.getSearchType(params);
com.gitblit.Constants.SearchType searchType = com.gitblit.Constants.SearchType.forName(type);
if (!StringUtils.isEmpty(value)) {
if (searchType == SearchType.COMMIT) {
query = "type:" + searchType.name().toLowerCase() + " AND \"" + value + "\"";
} else {
query = searchType.name().toLowerCase() + ":\"" + value + "\"";
}
}
}
}
boolean luceneEnabled = app().settings().getBoolean(Keys.web.allowLuceneIndexing, true);
if (luceneEnabled) {
if (availableRepositories.size() == 0) {
info(getString("gb.noIndexedRepositoriesWarning"));
}
} else {
error(getString("gb.luceneDisabled"));
}
// enforce user-accessible repository selections
Set<String> uniqueRepositories = new LinkedHashSet<String>();
for (String selectedRepository : repositories) {
if (availableRepositories.contains(selectedRepository)) {
uniqueRepositories.add(selectedRepository);
}
}
ArrayList<String> searchRepositories = new ArrayList<String>(uniqueRepositories);
// search form
final Model<String> queryModel = new Model<String>(query);
final Model<ArrayList<String>> repositoriesModel = new Model<ArrayList<String>>(searchRepositories);
final Model<Boolean> allreposModel = new Model<Boolean>(params != null && params.containsKey("allrepos"));
SessionlessForm<Void> form = new SessionlessForm<Void>("searchForm", getClass()) {
private static final long serialVersionUID = 1L;
@Override
public void onSubmit() {
String q = queryModel.getObject();
if (StringUtils.isEmpty(q)) {
error(getString("gb.undefinedQueryWarning"));
return;
}
if (repositoriesModel.getObject().size() == 0 && !allreposModel.getObject()) {
error(getString("gb.noSelectedRepositoriesWarning"));
return;
}
PageParameters params = new PageParameters();
params.put("repositories", StringUtils.flattenStrings(repositoriesModel.getObject()));
params.put("query", queryModel.getObject());
params.put("allrepos", allreposModel.getObject());
LuceneSearchPage page = new LuceneSearchPage(params);
setResponsePage(page);
}
};
ListMultipleChoice<String> selections = new ListMultipleChoice<String>("repositories",
repositoriesModel, availableRepositories, new StringChoiceRenderer());
selections.setMaxRows(8);
form.add(selections.setEnabled(luceneEnabled));
form.add(new TextField<String>("query", queryModel).setEnabled(luceneEnabled));
form.add(new CheckBox("allrepos", allreposModel));
add(form.setEnabled(luceneEnabled));
// execute search
final List<SearchResult> results = new ArrayList<SearchResult>();
if (!ArrayUtils.isEmpty(searchRepositories) && !StringUtils.isEmpty(query)) {
results.addAll(app().repositories().search(query, page, pageSize, searchRepositories));
}
// results header
if (results.size() == 0) {
if (!ArrayUtils.isEmpty(searchRepositories) && !StringUtils.isEmpty(query)) {
add(new Label("resultsHeader", query).setRenderBodyOnly(true));
add(new Label("resultsCount", getString("gb.noHits")).setRenderBodyOnly(true));
} else {
add(new Label("resultsHeader").setVisible(false));
add(new Label("resultsCount").setVisible(false));
}
} else {
add(new Label("resultsHeader", query).setRenderBodyOnly(true));
add(new Label("resultsCount", MessageFormat.format(getString("gb.queryResults"),
results.get(0).hitId, results.get(results.size() - 1).hitId, results.get(0).totalHits)).
setRenderBodyOnly(true));
}
// search results view
ListDataProvider<SearchResult> resultsDp = new ListDataProvider<SearchResult>(results);
final DataView<SearchResult> resultsView = new DataView<SearchResult>("searchResults", resultsDp) {
private static final long serialVersionUID = 1L;
@Override
public void populateItem(final Item<SearchResult> item) {
final SearchResult sr = item.getModelObject();
switch(sr.type) {
case commit: {
Label icon = WicketUtils.newIcon("type", "icon-refresh");
WicketUtils.setHtmlTooltip(icon, "commit");
item.add(icon);
item.add(new LinkPanel("summary", null, sr.summary, CommitPage.class, WicketUtils.newObjectParameter(sr.repository, sr.commitId)));
// show tags
Fragment fragment = new Fragment("tags", "tagsPanel", LuceneSearchPage.this);
List<String> tags = sr.tags;
if (tags == null) {
tags = new ArrayList<String>();
}
ListDataProvider<String> tagsDp = new ListDataProvider<String>(tags);
final DataView<String> tagsView = new DataView<String>("tag", tagsDp) {
private static final long serialVersionUID = 1L;
@Override
public void populateItem(final Item<String> item) {
String tag = item.getModelObject();
Component c = new LinkPanel("tagLink", null, tag, TagPage.class,
WicketUtils.newObjectParameter(sr.repository, Constants.R_TAGS + tag));
WicketUtils.setCssClass(c, "tagRef");
item.add(c);
}
};
fragment.add(tagsView);
item.add(fragment);
break;
}
case blob: {
Label icon = WicketUtils.newIcon("type", "icon-file");
WicketUtils.setHtmlTooltip(icon, "blob");
item.add(icon);
item.add(new LinkPanel("summary", null, sr.path, BlobPage.class, WicketUtils.newPathParameter(sr.repository, sr.branch, sr.path)));
item.add(new Label("tags").setVisible(false));
break;
}
}
item.add(new Label("fragment", sr.fragment).setEscapeModelStrings(false).setVisible(!StringUtils.isEmpty(sr.fragment)));
item.add(new LinkPanel("repository", null, sr.repository, SummaryPage.class, WicketUtils.newRepositoryParameter(sr.repository)));
if (StringUtils.isEmpty(sr.branch)) {
item.add(new Label("branch", "null"));
} else {
item.add(new LinkPanel("branch", "branch", StringUtils.getRelativePath(Constants.R_HEADS, sr.branch), LogPage.class, WicketUtils.newObjectParameter(sr.repository, sr.branch)));
}
item.add(new Label("author", sr.author));
item.add(WicketUtils.createDatestampLabel("date", sr.date, getTimeZone(), getTimeUtils()));
}
};
add(resultsView.setVisible(results.size() > 0));
PageParameters pagerParams = new PageParameters();
pagerParams.put("repositories", StringUtils.flattenStrings(repositoriesModel.getObject()));
pagerParams.put("query", queryModel.getObject());
boolean showPager = false;
int totalPages = 0;
if (results.size() > 0) {
totalPages = (results.get(0).totalHits / pageSize) + (results.get(0).totalHits % pageSize > 0 ? 1 : 0);
showPager = results.get(0).totalHits > pageSize;
}
add(new PagerPanel("topPager", page, totalPages, LuceneSearchPage.class, pagerParams).setVisible(showPager));
add(new PagerPanel("bottomPager", page, totalPages, LuceneSearchPage.class, pagerParams).setVisible(showPager));
}
}
| |
/*
* Copyright 2000-2015 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.jetbrains.python.refactoring.move.makeFunctionTopLevel;
import com.google.common.collect.Lists;
import com.intellij.openapi.util.Comparing;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.psi.PsiElement;
import com.intellij.psi.util.PsiTreeUtil;
import com.intellij.usageView.UsageInfo;
import com.intellij.util.IncorrectOperationException;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.util.containers.MultiMap;
import com.jetbrains.python.PyBundle;
import com.jetbrains.python.PyNames;
import com.jetbrains.python.codeInsight.controlflow.ScopeOwner;
import com.jetbrains.python.psi.*;
import com.jetbrains.python.refactoring.PyRefactoringUtil;
import org.jetbrains.annotations.NotNull;
import java.util.*;
import static com.jetbrains.python.psi.PyUtil.as;
/**
* @author Mikhail Golubev
*/
public class PyMakeMethodTopLevelProcessor extends PyBaseMakeFunctionTopLevelProcessor {
private final LinkedHashMap<String, String> myAttributeToParameterName = new LinkedHashMap<>();
private final MultiMap<String, PyReferenceExpression> myAttributeReferences = MultiMap.create();
private final Set<PsiElement> myReadsOfSelfParam = new HashSet<>();
public PyMakeMethodTopLevelProcessor(@NotNull PyFunction targetFunction, @NotNull String destination) {
super(targetFunction, destination);
}
@NotNull
@Override
protected String getRefactoringName() {
return PyBundle.message("refactoring.make.method.top.level.dialog.title");
}
@Override
protected void updateUsages(@NotNull Collection<String> newParamNames, UsageInfo @NotNull [] usages) {
// Field usages
for (String attrName : myAttributeReferences.keySet()) {
final Collection<PyReferenceExpression> reads = myAttributeReferences.get(attrName);
final String paramName = myAttributeToParameterName.get(attrName);
if (!attrName.equals(paramName)) {
for (PyReferenceExpression read : reads) {
read.replace(myGenerator.createExpressionFromText(LanguageLevel.forElement(read), paramName));
}
}
else {
for (PyReferenceExpression read : reads) {
removeQualifier(read);
}
}
}
// Function usages
final Collection<String> attrNames = myAttributeToParameterName.keySet();
for (UsageInfo usage : usages) {
final PsiElement usageElem = usage.getElement();
if (usageElem == null) {
continue;
}
if (usageElem instanceof PyReferenceExpression) {
final PyExpression qualifier = ((PyReferenceExpression)usageElem).getQualifier();
final PyCallExpression callExpr = as(usageElem.getParent(), PyCallExpression.class);
if (qualifier != null && callExpr != null && callExpr.getArgumentList() != null) {
PyExpression instanceExpr = qualifier;
final PyArgumentList argumentList = callExpr.getArgumentList();
// Class.method(instance) -> method(instance.attr)
if (resolvesToClass(qualifier)) {
final PyExpression[] arguments = argumentList.getArguments();
if (arguments.length > 0) {
instanceExpr = arguments[0];
instanceExpr.delete();
}
else {
// It's not clear how to handle usages like Class.method(), since there is no suitable instance
instanceExpr = null;
}
}
if (instanceExpr != null) {
// module.inst.method() -> method(module.inst.foo, module.inst.bar)
if (isPureReferenceExpression(instanceExpr)) {
// recursive call inside the method
if (myReadsOfSelfParam.contains(instanceExpr)) {
addArguments(argumentList, newParamNames);
}
else {
final String instanceExprText = instanceExpr.getText();
addArguments(argumentList, ContainerUtil.map(attrNames, attribute -> instanceExprText + "." + attribute));
}
}
// Class().method() -> method(Class().foo)
else if (newParamNames.size() == 1) {
addArguments(argumentList, Collections.singleton(instanceExpr.getText() + "." + ContainerUtil.getFirstItem(attrNames)));
}
// Class().method() -> inst = Class(); method(inst.foo, inst.bar)
else if (!newParamNames.isEmpty()) {
final PyStatement anchor = PsiTreeUtil.getParentOfType(callExpr, PyStatement.class);
//noinspection ConstantConditions
final String className = StringUtil.notNullize(myFunction.getContainingClass().getName(), PyNames.OBJECT);
final String targetName = PyRefactoringUtil.selectUniqueNameFromType(className, usageElem);
final String assignmentText = targetName + " = " + instanceExpr.getText();
final PyAssignmentStatement assignment = myGenerator.createFromText(LanguageLevel.forElement(callExpr),
PyAssignmentStatement.class,
assignmentText);
//noinspection ConstantConditions
anchor.getParent().addBefore(assignment, anchor);
addArguments(argumentList, ContainerUtil.map(attrNames, attribute -> targetName + "." + attribute));
}
}
}
// Will replace/invalidate entire expression
removeQualifier((PyReferenceExpression)usageElem);
}
}
}
private boolean resolvesToClass(@NotNull PyExpression qualifier) {
for (PsiElement element : PyUtil.multiResolveTopPriority(qualifier, myResolveContext)) {
if (element == myFunction.getContainingClass()) {
return true;
}
}
return false;
}
private static boolean isPureReferenceExpression(@NotNull PyExpression expr) {
if (!(expr instanceof PyReferenceExpression)) {
return false;
}
final PyExpression qualifier = ((PyReferenceExpression)expr).getQualifier();
return qualifier == null || isPureReferenceExpression(qualifier);
}
@NotNull
private PyReferenceExpression removeQualifier(@NotNull PyReferenceExpression expr) {
if (!expr.isQualified()) {
return expr;
}
final PyExpression newExpression = myGenerator.createExpressionFromText(LanguageLevel.forElement(expr), expr.getLastChild().getText());
return (PyReferenceExpression)expr.replace(newExpression);
}
@NotNull
@Override
protected PyFunction createNewFunction(@NotNull Collection<String> newParams) {
final PyFunction copied = (PyFunction)myFunction.copy();
final PyParameter[] params = copied.getParameterList().getParameters();
if (params.length > 0) {
params[0].delete();
}
addParameters(copied.getParameterList(), newParams);
return copied;
}
@NotNull
@Override
protected List<String> collectNewParameterNames() {
final Set<String> attributeNames = new LinkedHashSet<>();
for (ScopeOwner owner : PsiTreeUtil.collectElementsOfType(myFunction, ScopeOwner.class)) {
final AnalysisResult result = analyseScope(owner);
if (!result.nonlocalWritesToEnclosingScope.isEmpty()) {
throw new IncorrectOperationException(PyBundle.message("refactoring.make.function.top.level.error.nonlocal.writes"));
}
if (!result.readsOfSelfParametersFromEnclosingScope.isEmpty()) {
throw new IncorrectOperationException(PyBundle.message("refactoring.make.function.top.level.error.self.reads"));
}
if (!result.readsFromEnclosingScope.isEmpty()) {
throw new IncorrectOperationException(PyBundle.message("refactoring.make.function.top.level.error.outer.scope.reads"));
}
if (!result.writesToSelfParameter.isEmpty()) {
throw new IncorrectOperationException(PyBundle.message("refactoring.make.function.top.level.error.special.usage.of.self"));
}
myReadsOfSelfParam.addAll(result.readsOfSelfParameter);
for (PsiElement usage : result.readsOfSelfParameter) {
if (usage.getParent() instanceof PyTargetExpression) {
throw new IncorrectOperationException(PyBundle.message("refactoring.make.function.top.level.error.attribute.writes"));
}
final PyReferenceExpression parentReference = as(usage.getParent(), PyReferenceExpression.class);
if (parentReference != null) {
final String attrName = parentReference.getName();
if (attrName != null && PyUtil.isClassPrivateName(attrName)) {
throw new IncorrectOperationException(PyBundle.message("refactoring.make.function.top.level.error.private.attributes"));
}
if (parentReference.getParent() instanceof PyCallExpression) {
if (!(Comparing.equal(myFunction.getName(), parentReference.getName()))) {
throw new IncorrectOperationException(PyBundle.message("refactoring.make.function.top.level.error.method.calls"));
}
else {
// do not add method itself to its parameters
continue;
}
}
attributeNames.add(attrName);
myAttributeReferences.putValue(attrName, parentReference);
}
else {
throw new IncorrectOperationException(PyBundle.message("refactoring.make.function.top.level.error.special.usage.of.self"));
}
}
}
for (String name : attributeNames) {
final Collection<PyReferenceExpression> reads = myAttributeReferences.get(name);
final PsiElement anchor = ContainerUtil.getFirstItem(reads);
//noinspection ConstantConditions
if (!PyRefactoringUtil.isValidNewName(name, anchor)) {
final String indexedName = PyRefactoringUtil.appendNumberUntilValid(name, anchor, PyRefactoringUtil::isValidNewName);
myAttributeToParameterName.put(name, indexedName);
}
else {
myAttributeToParameterName.put(name, name);
}
}
return Lists.newArrayList(myAttributeToParameterName.values());
}
}
| |
package hotelbuddy;
import com.licel.jcardsim.base.Simulator;
import common.CryptographyMock;
import common.TestHelper;
import javacard.framework.AID;
import org.junit.Assert;
import org.junit.Test;
/**
* Created by Johannes on 29.07.2015.
*/
public class BonusTest
{
private static final byte[] CryptographyAIDBytes = "Cryptography".getBytes();
private static final byte[] BonusAIDBytes = "Bonus".getBytes();
private static final AID CryptographyAID = new AID(CryptographyAIDBytes, (short) 0, (byte) CryptographyAIDBytes.length);
private static final AID BonusAID = new AID(BonusAIDBytes, (short) 0, (byte) BonusAIDBytes.length);
@Test
public void Test_RegisterBonus_Valid()
{
Simulator sim = new Simulator();
sim.installApplet(CryptographyAID, CryptographyMock.class);
sim.installApplet(BonusAID, Bonus.class);
System.out.println("Getting ATR...");
byte[] atr = sim.getATR();
System.out.println(new String(atr));
System.out.println(TestHelper.ToHexString(atr));
System.out.println("\nSelecting Applet...");
boolean isAppletSelected = sim.selectApplet(BonusAID);
System.out.println(isAppletSelected);
byte[] answer;
System.out.println("\nSetting bonus points");
byte[] setMessage = { (byte) 0x75, (byte) 0x30 };
CryptographyMock.DataLength = (short) setMessage.length;
answer = TestHelper.ExecuteCommand(sim, (byte) 0x42, (byte) 0xB0, setMessage, (byte) 0x00);
TestHelper.EnsureStatusBytesNoError(answer);
Assert.assertTrue(CryptographyMock.decryptWasCalled());
byte[] expectedAnswer = { (byte) 0x75, (byte) 0x30 };
System.out.println("\nGetting bonus points from card");
answer = TestHelper.ExecuteCommand(sim, (byte) 0x42, (byte) 0xB1, new byte[0], (byte) 0x02);
TestHelper.EnsureStatusBytesNoError(answer);
Assert.assertTrue(CryptographyMock.encryptWasCalled());
TestHelper.compareWithoutStatusBytes(expectedAnswer, answer, expectedAnswer.length);
CryptographyMock.reset();
}
@Test
public void Test_RegisterBonus_Error_WrongMessageLength()
{
Simulator sim = new Simulator();
sim.installApplet(CryptographyAID, CryptographyMock.class);
sim.installApplet(BonusAID, Bonus.class);
System.out.println("Getting ATR...");
byte[] atr = sim.getATR();
System.out.println(new String(atr));
System.out.println(TestHelper.ToHexString(atr));
System.out.println("\nSelecting Applet...");
boolean isAppletSelected = sim.selectApplet(BonusAID);
System.out.println(isAppletSelected);
byte[] answer;
System.out.println("\nSetting bonus points");
byte[] setMessage = { (byte) 0xFF };
CryptographyMock.DataLength = (short) setMessage.length;
answer = TestHelper.ExecuteCommand(sim, (byte) 0x42, (byte) 0xB0, setMessage, (byte) 0x00);
// Error: wrong length
TestHelper.EnsureStatusBytes(answer, new byte[]{(byte) 0x67, (byte) 0x00});
Assert.assertTrue(CryptographyMock.decryptWasCalled());
CryptographyMock.reset();
}
@Test
public void Test_RegisterBonus_Error_AddingNegativePoints()
{
Simulator sim = new Simulator();
sim.installApplet(CryptographyAID, CryptographyMock.class);
sim.installApplet(BonusAID, Bonus.class);
System.out.println("Getting ATR...");
byte[] atr = sim.getATR();
System.out.println(new String(atr));
System.out.println(TestHelper.ToHexString(atr));
System.out.println("\nSelecting Applet...");
boolean isAppletSelected = sim.selectApplet(BonusAID);
System.out.println(isAppletSelected);
byte[] answer;
System.out.println("\nSetting bonus points");
byte[] setMessage = { (byte) 0xFF, (byte) 0xFF };
CryptographyMock.DataLength = (short) setMessage.length;
answer = TestHelper.ExecuteCommand(sim, (byte) 0x42, (byte) 0xB0, setMessage, (byte) 0x00);
// Error: data invalid
TestHelper.EnsureStatusBytes(answer, new byte[]{(byte) 0x69, (byte) 0x84});
Assert.assertTrue(CryptographyMock.decryptWasCalled());
CryptographyMock.reset();
}
@Test
public void Test_RegisterBonus_Error_AddingNoPoints()
{
Simulator sim = new Simulator();
sim.installApplet(CryptographyAID, CryptographyMock.class);
sim.installApplet(BonusAID, Bonus.class);
System.out.println("Getting ATR...");
byte[] atr = sim.getATR();
System.out.println(new String(atr));
System.out.println(TestHelper.ToHexString(atr));
System.out.println("\nSelecting Applet...");
boolean isAppletSelected = sim.selectApplet(BonusAID);
System.out.println(isAppletSelected);
byte[] answer;
System.out.println("\nSetting bonus points");
byte[] setMessage = { (byte) 0x00, (byte) 0x00 };
CryptographyMock.DataLength = (short) setMessage.length;
answer = TestHelper.ExecuteCommand(sim, (byte) 0x42, (byte) 0xB0, setMessage, (byte) 0x00);
// Error: data invalid
TestHelper.EnsureStatusBytes(answer, new byte[]{(byte) 0x69, (byte) 0x84});
Assert.assertTrue(CryptographyMock.decryptWasCalled());
CryptographyMock.reset();
}
@Test
public void Test_RegisterBonus_Error_BonusOverflow()
{
Simulator sim = new Simulator();
sim.installApplet(CryptographyAID, CryptographyMock.class);
sim.installApplet(BonusAID, Bonus.class);
System.out.println("Getting ATR...");
byte[] atr = sim.getATR();
System.out.println(new String(atr));
System.out.println(TestHelper.ToHexString(atr));
System.out.println("\nSelecting Applet...");
boolean isAppletSelected = sim.selectApplet(BonusAID);
System.out.println(isAppletSelected);
byte[] answer;
System.out.println("\nSetting bonus points");
byte[] setMessage = { (byte) 0x75, (byte) 0x00 };
CryptographyMock.DataLength = (short) setMessage.length;
answer = TestHelper.ExecuteCommand(sim, (byte) 0x42, (byte) 0xB0, setMessage, (byte) 0x00);
TestHelper.EnsureStatusBytesNoError(answer);
Assert.assertTrue(CryptographyMock.decryptWasCalled());
System.out.println("\nSetting bonus points which causes overflow");
CryptographyMock.DataLength = (short) setMessage.length;
// sending same value again which causes overflow
answer = TestHelper.ExecuteCommand(sim, (byte) 0x42, (byte) 0xB0, setMessage, (byte) 0x00);
// Error: conditions not satisfied
TestHelper.EnsureStatusBytes(answer, new byte[]{(byte) 0x69, (byte) 0x85});
Assert.assertTrue(CryptographyMock.decryptWasCalled());
CryptographyMock.reset();
}
@Test
public void Test_Reset()
{
Simulator sim = new Simulator();
sim.installApplet(CryptographyAID, CryptographyMock.class);
sim.installApplet(BonusAID, Bonus.class);
System.out.println("Getting ATR...");
byte[] atr = sim.getATR();
System.out.println(new String(atr));
System.out.println(TestHelper.ToHexString(atr));
System.out.println("\nSelecting Applet...");
boolean isAppletSelected = sim.selectApplet(BonusAID);
System.out.println(isAppletSelected);
byte[] answer;
System.out.println("\nSetting bonus points");
byte[] setMessage = { (byte) 0x75, (byte) 0x30 };
CryptographyMock.DataLength = (short) setMessage.length;
answer = TestHelper.ExecuteCommand(sim, (byte) 0x42, (byte) 0xB0, setMessage, (byte) 0x00);
TestHelper.EnsureStatusBytesNoError(answer);
Assert.assertTrue(CryptographyMock.decryptWasCalled());
System.out.println("\nReset the bonus points");
answer = TestHelper.ExecuteCommand(sim, (byte) 0x42, (byte) 0xF0, new byte[0], (byte) 0x00);
TestHelper.EnsureStatusBytesNoError(answer);
Assert.assertTrue(CryptographyMock.decryptWasCalled());
byte[] expectedAnswer = { (byte) 0x00, (byte) 0x00 };
System.out.println("\nGetting bonus points from card");
answer = TestHelper.ExecuteCommand(sim, (byte) 0x42, (byte) 0xB1, new byte[0], (byte) 0x02);
TestHelper.EnsureStatusBytesNoError(answer);
Assert.assertTrue(CryptographyMock.encryptWasCalled());
TestHelper.compareWithoutStatusBytes(expectedAnswer, answer, expectedAnswer.length);
CryptographyMock.reset();
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.druid.query.scan;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Lists;
import com.google.common.io.CharSource;
import org.apache.commons.io.IOUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.druid.java.util.common.DateTimes;
import org.apache.druid.java.util.common.Intervals;
import org.apache.druid.java.util.common.concurrent.Execs;
import org.apache.druid.java.util.common.granularity.Granularities;
import org.apache.druid.java.util.common.guava.MergeSequence;
import org.apache.druid.java.util.common.guava.Sequence;
import org.apache.druid.java.util.common.guava.Sequences;
import org.apache.druid.query.DefaultGenericQueryMetricsFactory;
import org.apache.druid.query.Druids;
import org.apache.druid.query.QueryPlus;
import org.apache.druid.query.QueryRunner;
import org.apache.druid.query.QueryRunnerFactory;
import org.apache.druid.query.QueryRunnerTestHelper;
import org.apache.druid.query.TableDataSource;
import org.apache.druid.query.context.ResponseContext;
import org.apache.druid.query.spec.LegacySegmentSpec;
import org.apache.druid.query.spec.QuerySegmentSpec;
import org.apache.druid.segment.IncrementalIndexSegment;
import org.apache.druid.segment.Segment;
import org.apache.druid.segment.TestIndex;
import org.apache.druid.segment.incremental.IncrementalIndex;
import org.apache.druid.segment.incremental.IncrementalIndexSchema;
import org.apache.druid.timeline.SegmentId;
import org.apache.druid.timeline.partition.NoneShardSpec;
import org.joda.time.Interval;
import org.junit.AfterClass;
import org.junit.Assert;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import java.io.IOException;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
/**
*
*/
@RunWith(Parameterized.class)
public class MultiSegmentScanQueryTest
{
private static final ScanQueryQueryToolChest TOOL_CHEST = new ScanQueryQueryToolChest(
new ScanQueryConfig(),
DefaultGenericQueryMetricsFactory.instance()
);
private static final QueryRunnerFactory<ScanResultValue, ScanQuery> FACTORY = new ScanQueryRunnerFactory(
TOOL_CHEST,
new ScanQueryEngine(),
new ScanQueryConfig()
);
private static final Interval I_0112_0114 = Intervals.of("2011-01-12/2011-01-14");
public static final QuerySegmentSpec I_0112_0114_SPEC = new LegacySegmentSpec(I_0112_0114);
// time modified version of druid.sample.numeric.tsv
public static final String[] V_0112 = {
"2011-01-12T00:00:00.000Z\tspot\tautomotive\t1000\t10000.0\t10000.0\t100000\tpreferred\tapreferred\t100.000000",
"2011-01-12T01:00:00.000Z\tspot\tbusiness\t1100\t11000.0\t11000.0\t110000\tpreferred\tbpreferred\t100.000000",
"2011-01-12T02:00:00.000Z\tspot\tentertainment\t1200\t12000.0\t12000.0\t120000\tpreferred\tepreferred\t100.000000",
"2011-01-12T03:00:00.000Z\tspot\thealth\t1300\t13000.0\t13000.0\t130000\tpreferred\thpreferred\t100.000000",
"2011-01-12T04:00:00.000Z\tspot\tmezzanine\t1400\t14000.0\t14000.0\t140000\tpreferred\tmpreferred\t100.000000",
"2011-01-12T05:00:00.000Z\tspot\tnews\t1500\t15000.0\t15000.0\t150000\tpreferred\tnpreferred\t100.000000",
"2011-01-12T06:00:00.000Z\tspot\tpremium\t1600\t16000.0\t16000.0\t160000\tpreferred\tppreferred\t100.000000",
"2011-01-12T07:00:00.000Z\tspot\ttechnology\t1700\t17000.0\t17000.0\t170000\tpreferred\ttpreferred\t100.000000",
"2011-01-12T08:00:00.000Z\tspot\ttravel\t1800\t18000.0\t18000.0\t180000\tpreferred\ttpreferred\t100.000000",
"2011-01-12T09:00:00.000Z\ttotal_market\tmezzanine\t1400\t14000.0\t14000.0\t140000\tpreferred\tmpreferred\t1000.000000",
"2011-01-12T10:00:00.000Z\ttotal_market\tpremium\t1600\t16000.0\t16000.0\t160000\tpreferred\tppreferred\t1000.000000",
"2011-01-12T11:00:00.000Z\tupfront\tmezzanine\t1400\t14000.0\t14000.0\t140000\tpreferred\tmpreferred\t800.000000\tvalue",
"2011-01-12T12:00:00.000Z\tupfront\tpremium\t1600\t16000.0\t16000.0\t160000\tpreferred\tppreferred\t800.000000\tvalue"
};
public static final String[] V_0113 = {
"2011-01-13T00:00:00.000Z\tspot\tautomotive\t1000\t10000.0\t10000.0\t100000\tpreferred\tapreferred\t94.874713",
"2011-01-13T01:00:00.000Z\tspot\tbusiness\t1100\t11000.0\t11000.0\t110000\tpreferred\tbpreferred\t103.629399",
"2011-01-13T02:00:00.000Z\tspot\tentertainment\t1200\t12000.0\t12000.0\t120000\tpreferred\tepreferred\t110.087299",
"2011-01-13T03:00:00.000Z\tspot\thealth\t1300\t13000.0\t13000.0\t130000\tpreferred\thpreferred\t114.947403",
"2011-01-13T04:00:00.000Z\tspot\tmezzanine\t1400\t14000.0\t14000.0\t140000\tpreferred\tmpreferred\t104.465767",
"2011-01-13T05:00:00.000Z\tspot\tnews\t1500\t15000.0\t15000.0\t150000\tpreferred\tnpreferred\t102.851683",
"2011-01-13T06:00:00.000Z\tspot\tpremium\t1600\t16000.0\t16000.0\t160000\tpreferred\tppreferred\t108.863011",
"2011-01-13T07:00:00.000Z\tspot\ttechnology\t1700\t17000.0\t17000.0\t170000\tpreferred\ttpreferred\t111.356672",
"2011-01-13T08:00:00.000Z\tspot\ttravel\t1800\t18000.0\t18000.0\t180000\tpreferred\ttpreferred\t106.236928",
"2011-01-13T09:00:00.000Z\ttotal_market\tmezzanine\t1400\t14000.0\t14000.0\t140000\tpreferred\tmpreferred\t1040.945505",
"2011-01-13T10:00:00.000Z\ttotal_market\tpremium\t1600\t16000.0\t16000.0\t160000\tpreferred\tppreferred\t1689.012875",
"2011-01-13T11:00:00.000Z\tupfront\tmezzanine\t1400\t14000.0\t14000.0\t140000\tpreferred\tmpreferred\t826.060182\tvalue",
"2011-01-13T12:00:00.000Z\tupfront\tpremium\t1600\t16000.0\t16000.0\t160000\tpreferred\tppreferred\t1564.617729\tvalue"
};
private static Segment segment0;
private static Segment segment1;
@BeforeClass
public static void setup() throws IOException
{
CharSource v_0112 = CharSource.wrap(StringUtils.join(V_0112, "\n"));
CharSource v_0113 = CharSource.wrap(StringUtils.join(V_0113, "\n"));
IncrementalIndex index0 = TestIndex.loadIncrementalIndex(newIndex("2011-01-12T00:00:00.000Z"), v_0112);
IncrementalIndex index1 = TestIndex.loadIncrementalIndex(newIndex("2011-01-13T00:00:00.000Z"), v_0113);
segment0 = new IncrementalIndexSegment(index0, makeIdentifier(index0, "v1"));
segment1 = new IncrementalIndexSegment(index1, makeIdentifier(index1, "v1"));
}
private static SegmentId makeIdentifier(IncrementalIndex index, String version)
{
return makeIdentifier(index.getInterval(), version);
}
private static SegmentId makeIdentifier(Interval interval, String version)
{
return SegmentId.of(QueryRunnerTestHelper.DATA_SOURCE, interval, version, NoneShardSpec.instance());
}
private static IncrementalIndex newIndex(String minTimeStamp)
{
return newIndex(minTimeStamp, 10000);
}
private static IncrementalIndex newIndex(String minTimeStamp, int maxRowCount)
{
final IncrementalIndexSchema schema = new IncrementalIndexSchema.Builder()
.withMinTimestamp(DateTimes.of(minTimeStamp).getMillis())
.withQueryGranularity(Granularities.HOUR)
.withMetrics(TestIndex.METRIC_AGGS)
.build();
return new IncrementalIndex.Builder()
.setIndexSchema(schema)
.setMaxRowCount(maxRowCount)
.buildOnheap();
}
@AfterClass
public static void clear()
{
IOUtils.closeQuietly(segment0);
IOUtils.closeQuietly(segment1);
}
@Parameterized.Parameters(name = "limit={0},batchSize={1}")
public static Iterable<Object[]> constructorFeeder()
{
return QueryRunnerTestHelper.cartesian(
Arrays.asList(0, 1, 3, 7, 10, 20, 1000),
Arrays.asList(0, 1, 3, 6, 7, 10, 123, 2000)
);
}
private final int limit;
private final int batchSize;
public MultiSegmentScanQueryTest(int limit, int batchSize)
{
this.limit = limit;
this.batchSize = batchSize;
}
private Druids.ScanQueryBuilder newBuilder()
{
return Druids.newScanQueryBuilder()
.dataSource(new TableDataSource(QueryRunnerTestHelper.DATA_SOURCE))
.intervals(I_0112_0114_SPEC)
.batchSize(batchSize)
.columns(Collections.emptyList())
.legacy(false)
.limit(limit);
}
@Test
public void testMergeRunnersWithLimit()
{
ScanQuery query = newBuilder().build();
List<ScanResultValue> results = FACTORY
.mergeRunners(
Execs.directExecutor(),
ImmutableList.of(FACTORY.createRunner(segment0), FACTORY.createRunner(segment1))
)
.run(QueryPlus.wrap(query))
.toList();
int totalCount = 0;
for (ScanResultValue result : results) {
System.out.println(((List) result.getEvents()).size());
totalCount += ((List) result.getEvents()).size();
}
Assert.assertEquals(
totalCount,
limit != 0 ? Math.min(limit, V_0112.length + V_0113.length) : V_0112.length + V_0113.length
);
}
@Test
public void testMergeResultsWithLimit()
{
QueryRunner<ScanResultValue> runner = TOOL_CHEST.mergeResults(
new QueryRunner<ScanResultValue>()
{
@Override
public Sequence<ScanResultValue> run(
QueryPlus<ScanResultValue> queryPlus,
ResponseContext responseContext
)
{
// simulate results back from 2 historicals
List<Sequence<ScanResultValue>> sequences = Lists.newArrayListWithExpectedSize(2);
sequences.add(FACTORY.createRunner(segment0).run(queryPlus));
sequences.add(FACTORY.createRunner(segment1).run(queryPlus));
return new MergeSequence<>(
queryPlus.getQuery().getResultOrdering(),
Sequences.simple(sequences)
);
}
}
);
ScanQuery query = newBuilder().build();
List<ScanResultValue> results = runner.run(QueryPlus.wrap(query)).toList();
int totalCount = 0;
for (ScanResultValue result : results) {
totalCount += ((List) result.getEvents()).size();
}
Assert.assertEquals(
totalCount,
limit != 0 ? Math.min(limit, V_0112.length + V_0113.length) : V_0112.length + V_0113.length
);
}
}
| |
/**********************************************************************************
* $URL$
* $Id$
***********************************************************************************
*
* Copyright (c) 2003, 2004, 2005, 2006 The Sakai Foundation.
*
* Licensed under the Educational Community License, Version 1.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.opensource.org/licenses/ecl1.php
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
**********************************************************************************/
package uk.ac.cam.caret.sakai.rwiki.service.api;
import java.util.Date;
import java.util.List;
import java.util.Map;
import org.sakaiproject.entity.api.ContextObserver;
import org.sakaiproject.entity.api.Entity;
import org.sakaiproject.entity.api.EntityProducer;
import org.sakaiproject.entity.api.EntityTransferrer;
import org.sakaiproject.entity.api.Reference;
import uk.ac.cam.caret.sakai.rwiki.service.api.dao.ObjectProxy;
import uk.ac.cam.caret.sakai.rwiki.service.api.model.RWikiCurrentObject;
import uk.ac.cam.caret.sakai.rwiki.service.api.model.RWikiHistoryObject;
import uk.ac.cam.caret.sakai.rwiki.service.api.model.RWikiObject;
import uk.ac.cam.caret.sakai.rwiki.service.api.model.RWikiPermissions;
import uk.ac.cam.caret.sakai.rwiki.service.exception.PermissionException;
import uk.ac.cam.caret.sakai.rwiki.service.exception.VersionException;
// FIXME: Service
public interface RWikiObjectService extends EntityProducer, EntityTransferrer, ContextObserver
{
/** The type string for this application: should not change over time as it may be stored in various parts of persistent entities. */
static final String APPLICATION_ID = RWikiObjectService.class.getName();
/** This string starts the references to resources in this service. */
static final String REFERENCE_ROOT = Entity.SEPARATOR + "wiki";
/** This string starts the references to resources in this service. */
static final String REFERENCE_LABEL = "wiki";
/** Name of the event when creating a resource. */
public static final String EVENT_RESOURCE_ADD = "wiki.new";
/** Name of the event when reading a resource. */
public static final String EVENT_RESOURCE_READ = "wiki.read";
/** Name of the event when writing a resource. */
public static final String EVENT_RESOURCE_WRITE = "wiki.revise";
/** Name of the event when removing a resource. */
public static final String EVENT_RESOURCE_REMOVE = "wiki.delete";
public static final String SMALL_CHANGE_IN_THREAD = "wiki.smallchange.request";
/**
* Gets the current object
*
* @param name
* @param realm
* @return
* @throws PermissionException
*/
RWikiCurrentObject getRWikiObject(String name, String realm)
throws PermissionException;
/**
* Gets the current object using a named template if it does not exist
*
* @param name
* @param realm
* the page space the page is in, used to localise and globalise the
* name
* @param templateName
* @return
* @throws PermissionException
*/
RWikiCurrentObject getRWikiObject(String name, String realm,
RWikiObject ignore, String templateName) throws PermissionException;
/**
* Gets the object based on the ID. This
*
* @param reference
* the reference object
* @return
*/
RWikiCurrentObject getRWikiObject(RWikiObject reference);
/**
* Search on current objects
*
* @param criteria
* @param realm
* @return
* @throws PermissionException
*/
List search(String criteria, String realm) throws PermissionException;
/**
* Update the named page, with permissions
*
* @param name
* @param realm
* @param version
* @param content
* @param permissions
* @throws PermissionException
* @throws VersionException
*/
void update(String name, String realm, Date version, String content,
RWikiPermissions permissions) throws PermissionException,
VersionException;
/**
* Update the name page, no permissions
*
* @param name
* @param realm
* @param version
* @param content
* @throws PermissionException
* @throws VersionException
*/
void update(String name, String realm, Date version, String content)
throws PermissionException, VersionException;
/**
* Update the name page's permissions
*
* @param name
* @param realm
* @param version
* @param permissions
* @throws PermissionException
* @throws VersionException
*/
void update(String name, String realm, Date version,
RWikiPermissions permissions) throws PermissionException,
VersionException;
/**
* Does the page exist
*
* @param name
* A possibly non-globalised page name
* @param space
* Default space to globalise to
* @return
*/
boolean exists(String name, String space);
// SAK-2519
/**
* A list of pages that have changed since (current versions)
*
* @param since
* @param realm
* @return a list containing RWikiCurrentObjects
*/
List findChangedSince(Date since, String realm);
/**
* Finds pages that reference the given page name
*
* @param name
* @return a non-null list of page names not rwikiObjects
*/
List findReferencingPages(String name);
/**
* Revert current revision to a named revision, creates a new revision
*
* @param name
* @param realm
* @param version
* @param revision
*/
void revert(String name, String realm, Date version, int revision);
/**
* Get a previous version
*
* @param referenceObject
* the Rwiki object whore rwikiobjectid field will be used to locate
* the revision
* @param revision
* @return
*/
RWikiHistoryObject getRWikiHistoryObject(RWikiObject refernceObject,
int revision);
/**
* get a list of all previous versions as RWikiHistoryObjects
*
* @param id
* @return
*/
List findRWikiHistoryObjects(RWikiObject reference);
/**
* Finds the history objects sorted in reverse order
*
* @param rwo
* @return
*/
List findRWikiHistoryObjectsInReverse(RWikiObject rwo);
/**
* get list of subpages of the supplied page. The list will be
* alphabetiallcy sorted
*
* @param globalParentPageName
* is the page on which we want to find sub pages. THIS IS A GLOBAL
* NAME. DONT CONFUSE WITH A LOCAL NAME
* @return a list of pages sorted by name alphabetically.
*/
List findRWikiSubPages(String globalParentPageName);
/**
* Updates and creates a new comment on the page
*
* @param name
* @param realm
* @param version
* @param content
* @throws PermissionException
* @throws VersionException
*/
void updateNewComment(String name, String realm, Date version,
String content) throws PermissionException, VersionException;
/**
* Create a list proxy based on the List and Object Proxy
*
* @param commentsList
* @param lop
* @return
*/
List createListProxy(List commentsList, ObjectProxy lop);
/**
* Creates a new rwiki Current Object according to the implementation
*
* @return
*/
RWikiObject createNewRWikiCurrentObject();
/**
* Creates a new RWiki Permissions Bean
*
* @return
*/
RWikiPermissions createNewRWikiPermissionsImpl();
/**
* fetches the entity based on the RWikiObject
*
* @param rwo
* @return
*/
Entity getEntity(RWikiObject rwo);
/**
* Fetches the Reference Object from the Entity manager based on the
* RWikiObject
*
* @param rwo
* @return
*/
Reference getReference(RWikiObject rwo);
/**
* A Map containing EntityHandlers for the Service, Each entity handler
* handles a subtype
*
* @return
*/
Map getHandlers();
/**
* Find all the changes under this point and under since the time specified
*
* @param time
* the time after which to consider changes
* @param basepath
* the base path
* @return a list of RWikiCurrentObjects
*/
List findAllChangedSince(Date time, String basepath);
/**
* Check for read permission
*
* @param rwo
* @return
*/
boolean checkRead(RWikiObject rwo);
/**
* check for update permission
*
* @param rwo
* @return
*/
boolean checkUpdate(RWikiObject rwo);
/**
* check for admin permission
*
* @param rwo
* @return
*/
boolean checkAdmin(RWikiObject rwo);
/**
* Find all pages in the database just reture
*
* @return
*/
List findAllPageNames();
/**
* generates a valid entity reference from the page name
*
* @param pageName
* @return
*/
String createReference(String pageName);
/**
* gets a component page link renderer
*
* @param pageSpace
* @return
*/
PageLinkRenderer getComponentPageLinkRender(String pageSpace, boolean withBreadCrumb);
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.internal.processors.cache.distributed;
import java.io.Externalizable;
import java.nio.ByteBuffer;
import java.util.Collection;
import org.apache.ignite.internal.processors.cache.version.GridCacheVersion;
import org.apache.ignite.internal.util.tostring.GridToStringBuilder;
import org.apache.ignite.lang.IgniteUuid;
import org.apache.ignite.plugin.extensions.communication.MessageReader;
import org.apache.ignite.plugin.extensions.communication.MessageWriter;
import org.jetbrains.annotations.Nullable;
/**
* Transaction completion message.
*/
public class GridDistributedTxFinishRequest extends GridDistributedBaseMessage {
/** */
private static final long serialVersionUID = 0L;
/** Future ID. */
private IgniteUuid futId;
/** Thread ID. */
private long threadId;
/** Commit version. */
private GridCacheVersion commitVer;
/** Invalidate flag. */
private boolean invalidate;
/** Commit flag. */
private boolean commit;
/** Sync commit flag. */
@Deprecated
private boolean syncCommit;
/** Sync commit flag. */
@Deprecated
private boolean syncRollback;
/** Min version used as base for completed versions. */
private GridCacheVersion baseVer;
/** Expected txSize. */
private int txSize;
/** System transaction flag. */
private boolean sys;
/** IO policy. */
private byte plc;
/**
* Empty constructor required by {@link Externalizable}.
*/
public GridDistributedTxFinishRequest() {
/* No-op. */
}
/**
* @param xidVer Transaction ID.
* @param futId future ID.
* @param threadId Thread ID.
* @param commitVer Commit version.
* @param commit Commit flag.
* @param invalidate Invalidate flag.
* @param sys System transaction flag.
* @param plc IO policy.
* @param syncCommit Sync commit flag.
* @param syncRollback Sync rollback flag.
* @param baseVer Base version.
* @param committedVers Committed versions.
* @param rolledbackVers Rolled back versions.
* @param txSize Expected transaction size.
* @param addDepInfo Deployment info flag.
*/
public GridDistributedTxFinishRequest(
GridCacheVersion xidVer,
IgniteUuid futId,
@Nullable GridCacheVersion commitVer,
long threadId,
boolean commit,
boolean invalidate,
boolean sys,
byte plc,
boolean syncCommit,
boolean syncRollback,
GridCacheVersion baseVer,
Collection<GridCacheVersion> committedVers,
Collection<GridCacheVersion> rolledbackVers,
int txSize,
boolean addDepInfo
) {
super(xidVer, 0, addDepInfo);
assert xidVer != null;
this.futId = futId;
this.commitVer = commitVer;
this.threadId = threadId;
this.commit = commit;
this.invalidate = invalidate;
this.sys = sys;
this.plc = plc;
this.syncCommit = syncCommit;
this.syncRollback = syncRollback;
this.baseVer = baseVer;
this.txSize = txSize;
completedVersions(committedVers, rolledbackVers);
}
/**
* @return System transaction flag.
*/
public boolean system() {
return sys;
}
/**
* @return IO policy.
*/
public byte policy() {
return plc;
}
/**
* @return Future ID.
*/
public IgniteUuid futureId() {
return futId;
}
/**
* @return Thread ID.
*/
public long threadId() {
return threadId;
}
/**
* @return Commit version.
*/
public GridCacheVersion commitVersion() {
return commitVer;
}
/**
* @return Commit flag.
*/
public boolean commit() {
return commit;
}
/**
*
* @return Invalidate flag.
*/
public boolean isInvalidate() {
return invalidate;
}
/**
* @return Sync commit flag.
*/
public boolean syncCommit() {
return syncCommit;
}
/**
* @param syncCommit Sync commit flag.
*/
public void syncCommit(boolean syncCommit) {
this.syncCommit = syncCommit;
}
/**
* @return Sync rollback flag.
*/
public boolean syncRollback() {
return syncRollback;
}
/**
* @return Base version.
*/
public GridCacheVersion baseVersion() {
return baseVer;
}
/**
* @return Expected tx size.
*/
public int txSize() {
return txSize;
}
/**
*
* @return {@code True} if reply is required.
*/
public boolean replyRequired() {
return commit ? syncCommit : syncRollback;
}
/** {@inheritDoc} */
@Override public boolean writeTo(ByteBuffer buf, MessageWriter writer) {
writer.setBuffer(buf);
if (!super.writeTo(buf, writer))
return false;
if (!writer.isHeaderWritten()) {
if (!writer.writeHeader(directType(), fieldsCount()))
return false;
writer.onHeaderWritten();
}
switch (writer.state()) {
case 7:
if (!writer.writeMessage("baseVer", baseVer))
return false;
writer.incrementState();
case 8:
if (!writer.writeBoolean("commit", commit))
return false;
writer.incrementState();
case 9:
if (!writer.writeMessage("commitVer", commitVer))
return false;
writer.incrementState();
case 10:
if (!writer.writeIgniteUuid("futId", futId))
return false;
writer.incrementState();
case 11:
if (!writer.writeBoolean("invalidate", invalidate))
return false;
writer.incrementState();
case 12:
if (!writer.writeByte("plc", plc))
return false;
writer.incrementState();
case 13:
if (!writer.writeBoolean("syncCommit", syncCommit))
return false;
writer.incrementState();
case 14:
if (!writer.writeBoolean("syncRollback", syncRollback))
return false;
writer.incrementState();
case 15:
if (!writer.writeBoolean("sys", sys))
return false;
writer.incrementState();
case 16:
if (!writer.writeLong("threadId", threadId))
return false;
writer.incrementState();
case 17:
if (!writer.writeInt("txSize", txSize))
return false;
writer.incrementState();
}
return true;
}
/** {@inheritDoc} */
@Override public boolean readFrom(ByteBuffer buf, MessageReader reader) {
reader.setBuffer(buf);
if (!reader.beforeMessageRead())
return false;
if (!super.readFrom(buf, reader))
return false;
switch (reader.state()) {
case 7:
baseVer = reader.readMessage("baseVer");
if (!reader.isLastRead())
return false;
reader.incrementState();
case 8:
commit = reader.readBoolean("commit");
if (!reader.isLastRead())
return false;
reader.incrementState();
case 9:
commitVer = reader.readMessage("commitVer");
if (!reader.isLastRead())
return false;
reader.incrementState();
case 10:
futId = reader.readIgniteUuid("futId");
if (!reader.isLastRead())
return false;
reader.incrementState();
case 11:
invalidate = reader.readBoolean("invalidate");
if (!reader.isLastRead())
return false;
reader.incrementState();
case 12:
plc = reader.readByte("plc");
if (!reader.isLastRead())
return false;
reader.incrementState();
case 13:
syncCommit = reader.readBoolean("syncCommit");
if (!reader.isLastRead())
return false;
reader.incrementState();
case 14:
syncRollback = reader.readBoolean("syncRollback");
if (!reader.isLastRead())
return false;
reader.incrementState();
case 15:
sys = reader.readBoolean("sys");
if (!reader.isLastRead())
return false;
reader.incrementState();
case 16:
threadId = reader.readLong("threadId");
if (!reader.isLastRead())
return false;
reader.incrementState();
case 17:
txSize = reader.readInt("txSize");
if (!reader.isLastRead())
return false;
reader.incrementState();
}
return reader.afterMessageRead(GridDistributedTxFinishRequest.class);
}
/** {@inheritDoc} */
@Override public byte directType() {
return 23;
}
/** {@inheritDoc} */
@Override public byte fieldsCount() {
return 18;
}
/** {@inheritDoc} */
@Override public String toString() {
return GridToStringBuilder.toString(GridDistributedTxFinishRequest.class, this,
"super", super.toString());
}
}
| |
/*
* This file is part of Sponge, licensed under the MIT License (MIT).
*
* Copyright (c) SpongePowered <https://www.spongepowered.org>
* Copyright (c) contributors
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package org.spongepowered.common.scheduler;
import com.google.common.collect.Maps;
import com.google.common.collect.Sets;
import co.aikar.timings.TimingsManager;
import org.spongepowered.api.plugin.PluginContainer;
import org.spongepowered.api.scheduler.Task;
import org.spongepowered.common.SpongeImpl;
import org.spongepowered.common.event.tracking.CauseTracker;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import java.util.UUID;
abstract class SchedulerBase {
// The simple queue of all pending (and running) ScheduledTasks
private final Map<UUID, ScheduledTask> taskMap = Maps.newConcurrentMap();
private long sequenceNumber = 0L;
private final String taskNameFmt;
protected SchedulerBase(ScheduledTask.TaskSynchronicity type) {
this.taskNameFmt = "%s-" + (type == ScheduledTask.TaskSynchronicity.SYNCHRONOUS ? "S" : "A") + "-%d";
}
protected String nextName(PluginContainer plugin) {
return String.format(this.taskNameFmt, plugin.getId(), this.sequenceNumber++);
}
/**
* Gets the timestamp to update the timestamp of a task. This method is task
* sensitive to support different timestamp types i.e. real time and ticks.
*
* <p>Subtracting the result of this method from a previously obtained
* result should become a representation of the time that has passed
* between those calls.</p>
*
* @param task The task
* @return Timestamp for the task
*/
protected long getTimestamp(ScheduledTask task) {
return System.nanoTime();
}
/**
* Adds the task to the task map, will attempt to process the task on the
* next call to {@link #runTick}.
*
* @param task The task to add
*/
protected void addTask(ScheduledTask task) {
task.setTimestamp(this.getTimestamp(task));
this.taskMap.put(task.getUniqueId(), task);
}
/**
* Removes the task from the task map.
*
* @param task The task to remove
*/
protected void removeTask(ScheduledTask task) {
this.taskMap.remove(task.getUniqueId());
}
protected Optional<Task> getTask(UUID id) {
return Optional.<Task>ofNullable(this.taskMap.get(id));
}
protected Set<Task> getScheduledTasks() {
synchronized (this.taskMap) {
return Sets.<Task>newHashSet(this.taskMap.values());
}
}
/**
* Process all tasks in the map.
*/
protected final void runTick() {
this.preTick();
TimingsManager.PLUGIN_SCHEDULER_HANDLER.startTimingIfSync();
try {
this.taskMap.values().forEach(this::processTask);
this.postTick();
} finally {
this.finallyPostTick();
}
TimingsManager.PLUGIN_SCHEDULER_HANDLER.stopTimingIfSync();
}
/**
* Fired when the scheduler begins to tick, before any tasks are processed.
*/
protected void preTick() {
}
/**
* Fired when the scheduler has processed all tasks.
*/
protected void postTick() {
}
/**
* Fired after tasks have attempted to be processed, in a finally block to
* guarantee execution regardless of any error when processing a task.
*/
protected void finallyPostTick() {
}
/**
* Processes the task.
*
* @param task The task to process
*/
protected void processTask(ScheduledTask task) {
// If the task is now slated to be cancelled, we just remove it as if it
// no longer exists.
if (task.getState() == ScheduledTask.ScheduledTaskState.CANCELED) {
this.removeTask(task);
return;
}
long threshold = Long.MAX_VALUE;
// Figure out if we start a delayed Task after threshold ticks or, start
// it after the interval (period) of the repeating task parameter.
if (task.getState() == ScheduledTask.ScheduledTaskState.WAITING) {
threshold = task.offset;
} else if (task.getState() == ScheduledTask.ScheduledTaskState.RUNNING) {
threshold = task.period;
}
// This moment is 'now'
long now = this.getTimestamp(task);
// So, if the current time minus the timestamp of the task is greater
// than the delay to wait before starting the task, then start the task.
// Repeating tasks get a reset-timestamp each time they are set RUNNING
// If the task has a period of 0 (zero) this task will not repeat, and
// is removed after we start it.
if (threshold <= (now - task.getTimestamp())) {
task.setState(ScheduledTask.ScheduledTaskState.SWITCHING);
task.setTimestamp(this.getTimestamp(task));
startTask(task);
// If task is one time shot, remove it from the map.
if (task.period == 0L) {
this.removeTask(task);
}
}
}
/**
* Begin the execution of a task. Exceptions are caught and logged.
*
* @param task The task to start
*/
protected void startTask(final ScheduledTask task) {
this.executeTaskRunnable(task, () -> {
task.setState(ScheduledTask.ScheduledTaskState.RUNNING);
if (!task.isAsynchronous()) {
CauseTracker.getInstance().getCurrentContext().activeContainer(task.getOwner());
}
task.getTimingsHandler().startTimingIfSync();
try {
task.getConsumer().accept(task);
} catch (Throwable t) {
SpongeImpl.getLogger().error("The Scheduler tried to run the task {} owned by {}, but an error occured.", task.getName(),
task.getOwner(), t);
}
task.getTimingsHandler().stopTimingIfSync();
if (!task.isAsynchronous()) {
CauseTracker.getInstance().getCurrentContext().activeContainer(null);
}
});
}
/**
* Actually run the runnable that will begin the task
*
* @param runnable The runnable to run
*/
protected abstract void executeTaskRunnable(ScheduledTask task, Runnable runnable);
}
| |
package org.wiztools.restclient.ui.reqgo;
import com.jidesoft.swing.AutoCompletion;
import java.awt.BorderLayout;
import java.awt.Component;
import java.awt.Toolkit;
import java.awt.event.*;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.ArrayList;
import java.util.LinkedList;
import java.util.List;
import javax.annotation.PostConstruct;
import javax.inject.Inject;
import javax.swing.*;
import org.wiztools.restclient.ui.RESTUserInterface;
import org.wiztools.restclient.ui.RESTView;
import org.wiztools.restclient.ui.UIUtil;
/**
*
* @author subwiz
*/
public class ReqUrlGoPanelImpl extends JPanel implements ReqUrlGoPanel {
@Inject private RESTUserInterface rest_ui;
private ImageIcon icon_go = UIUtil.getIconFromClasspath("org/wiztools/restclient/go.png");
private ImageIcon icon_stop = UIUtil.getIconFromClasspath("org/wiztools/restclient/stop.png");
private static final String TEXT_GO = "Go!";
private static final String TEXT_STOP = "Stop!";
private final JComboBox jcb_url = new JComboBox();
private final JButton jb_request = new JButton(icon_go);
private final List<ActionListener> listeners = new ArrayList<ActionListener>();
@PostConstruct
protected void init() {
{ // Keystroke for focusing on the address bar:
final KeyStroke ks = KeyStroke.getKeyStroke(KeyEvent.VK_L,
Toolkit.getDefaultToolkit().getMenuShortcutKeyMask());
final String actionName = "org.wiztools.restclient:ADDRESS_FOCUS";
jcb_url.getInputMap(JComponent.WHEN_IN_FOCUSED_WINDOW)
.put(ks, actionName);
jcb_url.getActionMap().put(actionName, new AbstractAction() {
@Override
public void actionPerformed(ActionEvent ae) {
jcb_url.requestFocus();
}
});
}
// Layout follows:
// West:
JLabel jl_url = new JLabel("URL: ");
jl_url.setLabelFor(jcb_url);
jl_url.setDisplayedMnemonic('u');
rest_ui.getFrame().getRootPane().setDefaultButton(jb_request);
setLayout(new BorderLayout(RESTView.BORDER_WIDTH, 0));
add(jl_url, BorderLayout.WEST);
// Center:
jcb_url.setToolTipText("URL");
jcb_url.setEditable(true);
jcb_url.getEditor().getEditorComponent().addFocusListener(new FocusAdapter() {
@Override
public void focusGained(FocusEvent e) {
((JTextField) jcb_url.getEditor().getEditorComponent()).selectAll();
}
});
AutoCompletion ac = new AutoCompletion(jcb_url);
ac.setStrict(false);
ac.setStrictCompletion(false);
add(jcb_url, BorderLayout.CENTER);
// East:
jb_request.setToolTipText(TEXT_GO);
rest_ui.getFrame().getRootPane().setDefaultButton(jb_request);
jb_request.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent event) {
jb_requestActionPerformed();
}
});
add(jb_request, BorderLayout.EAST);
}
@Override
public void addActionListener(ActionListener listener) {
listeners.add(listener);
}
public ACTION_TYPE getActionType() {
if(jb_request.getIcon() == icon_go){
return ACTION_TYPE.GO;
}
else {
return ACTION_TYPE.CANCEL;
}
}
private void jb_requestActionPerformed() {
final Object item = jcb_url.getSelectedItem();
final int count = jcb_url.getItemCount();
final LinkedList l = new LinkedList();
for(int i=0; i<count; i++){
l.add(jcb_url.getItemAt(i));
}
if(l.contains(item)){ // Item already present
// Remove and add to bring it to the top
// l.remove(item);
// l.addFirst(item);
// System.out.println("Removing and inserting at top");
jcb_url.removeItem(item);
jcb_url.insertItemAt(item, 0);
}
else{ // Add new item
if(((String)item).trim().length() != 0 ) {
// The total number of items should not exceed 20
if(count > 19){
// Remove last item to give place
// to new one
//l.removeLast();
jcb_url.removeItemAt(count - 1);
}
//l.addFirst(item);
jcb_url.insertItemAt(item, 0);
}
}
// make the selected item is the item we want
jcb_url.setSelectedItem(item);
for(ActionListener listener: listeners) {
listener.actionPerformed(null);
}
}
@Override
public String getUrlString() {
return (String) jcb_url.getSelectedItem();
}
@Override
public void setUrlString(String url) {
jcb_url.setSelectedItem(url);
}
@Override
public boolean isSslUrl() {
try {
URL url = new URL((String) jcb_url.getSelectedItem());
if(url.getProtocol().equalsIgnoreCase("https")) {
return true;
}
}
catch(MalformedURLException ex) {
// return default value!
}
return false;
}
@Override
public void requestFocus() {
super.requestFocus();
jcb_url.requestFocus();
}
@Override
public void setAsRunning() {
jb_request.setIcon(icon_stop);
jb_request.setToolTipText(TEXT_STOP);
}
@Override
public void setAsIdle() {
jb_request.setIcon(icon_go);
jb_request.setToolTipText(TEXT_GO);
}
@Override
public boolean isIdle() {
return jb_request.getIcon() == icon_go;
}
@Override
public boolean isRunning() {
return jb_request.getIcon() == icon_stop;
}
@Override
public Component getComponent() {
return this;
}
@Override
public void clear() {
jcb_url.setSelectedItem(null);
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.internal;
import org.apache.lucene.search.Collector;
import org.apache.lucene.search.FieldDoc;
import org.apache.lucene.search.Query;
import org.elasticsearch.action.search.SearchShardTask;
import org.elasticsearch.action.search.SearchType;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.util.BigArrays;
import org.elasticsearch.index.cache.bitset.BitsetFilterCache;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.ObjectMapper;
import org.elasticsearch.index.query.ParsedQuery;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.index.shard.IndexShard;
import org.elasticsearch.index.similarity.SimilarityService;
import org.elasticsearch.search.SearchExtBuilder;
import org.elasticsearch.search.SearchShardTarget;
import org.elasticsearch.search.aggregations.SearchContextAggregations;
import org.elasticsearch.search.collapse.CollapseContext;
import org.elasticsearch.search.dfs.DfsSearchResult;
import org.elasticsearch.search.fetch.FetchPhase;
import org.elasticsearch.search.fetch.FetchSearchResult;
import org.elasticsearch.search.fetch.StoredFieldsContext;
import org.elasticsearch.search.fetch.subphase.FetchSourceContext;
import org.elasticsearch.search.fetch.subphase.InnerHitsContext;
import org.elasticsearch.search.fetch.subphase.ScriptFieldsContext;
import org.elasticsearch.search.fetch.subphase.highlight.SearchHighlightContext;
import org.elasticsearch.search.profile.Profilers;
import org.elasticsearch.search.query.QuerySearchResult;
import org.elasticsearch.search.rescore.RescoreContext;
import org.elasticsearch.search.sort.SortAndFormats;
import org.elasticsearch.search.suggest.SuggestionSearchContext;
import java.util.List;
import java.util.Map;
public abstract class FilteredSearchContext extends SearchContext {
private final SearchContext in;
public FilteredSearchContext(SearchContext in) {
this.in = in;
}
@Override
public boolean hasStoredFields() {
return in.hasStoredFields();
}
@Override
public boolean hasStoredFieldsContext() {
return in.hasStoredFieldsContext();
}
@Override
public boolean storedFieldsRequested() {
return in.storedFieldsRequested();
}
@Override
public StoredFieldsContext storedFieldsContext() {
return in.storedFieldsContext();
}
@Override
public SearchContext storedFieldsContext(StoredFieldsContext storedFieldsContext) {
return in.storedFieldsContext(storedFieldsContext);
}
@Override
protected void doClose() {
in.doClose();
}
@Override
public void preProcess(boolean rewrite) {
in.preProcess(rewrite);
}
@Override
public Query buildFilteredQuery(Query query) {
return in.buildFilteredQuery(query);
}
@Override
public ShardSearchContextId id() {
return in.id();
}
@Override
public String source() {
return in.source();
}
@Override
public ShardSearchRequest request() {
return in.request();
}
@Override
public SearchType searchType() {
return in.searchType();
}
@Override
public SearchShardTarget shardTarget() {
return in.shardTarget();
}
@Override
public int numberOfShards() {
return in.numberOfShards();
}
@Override
public float queryBoost() {
return in.queryBoost();
}
@Override
public ScrollContext scrollContext() {
return in.scrollContext();
}
@Override
public SearchContextAggregations aggregations() {
return in.aggregations();
}
@Override
public SearchContext aggregations(SearchContextAggregations aggregations) {
return in.aggregations(aggregations);
}
@Override
public SearchHighlightContext highlight() {
return in.highlight();
}
@Override
public void highlight(SearchHighlightContext highlight) {
in.highlight(highlight);
}
@Override
public InnerHitsContext innerHits() {
return in.innerHits();
}
@Override
public SuggestionSearchContext suggest() {
return in.suggest();
}
@Override
public void suggest(SuggestionSearchContext suggest) {
in.suggest(suggest);
}
@Override
public List<RescoreContext> rescore() {
return in.rescore();
}
@Override
public boolean hasScriptFields() {
return in.hasScriptFields();
}
@Override
public ScriptFieldsContext scriptFields() {
return in.scriptFields();
}
@Override
public boolean sourceRequested() {
return in.sourceRequested();
}
@Override
public boolean hasFetchSourceContext() {
return in.hasFetchSourceContext();
}
@Override
public FetchSourceContext fetchSourceContext() {
return in.fetchSourceContext();
}
@Override
public SearchContext fetchSourceContext(FetchSourceContext fetchSourceContext) {
return in.fetchSourceContext(fetchSourceContext);
}
@Override
public ContextIndexSearcher searcher() {
return in.searcher();
}
@Override
public IndexShard indexShard() {
return in.indexShard();
}
@Override
public MapperService mapperService() {
return in.mapperService();
}
@Override
public SimilarityService similarityService() {
return in.similarityService();
}
@Override
public BigArrays bigArrays() {
return in.bigArrays();
}
@Override
public BitsetFilterCache bitsetFilterCache() {
return in.bitsetFilterCache();
}
@Override
public TimeValue timeout() {
return in.timeout();
}
@Override
public void timeout(TimeValue timeout) {
in.timeout(timeout);
}
@Override
public int terminateAfter() {
return in.terminateAfter();
}
@Override
public void terminateAfter(int terminateAfter) {
in.terminateAfter(terminateAfter);
}
@Override
public boolean lowLevelCancellation() {
return in.lowLevelCancellation();
}
@Override
public SearchContext minimumScore(float minimumScore) {
return in.minimumScore(minimumScore);
}
@Override
public Float minimumScore() {
return in.minimumScore();
}
@Override
public SearchContext sort(SortAndFormats sort) {
return in.sort(sort);
}
@Override
public SortAndFormats sort() {
return in.sort();
}
@Override
public SearchContext trackScores(boolean trackScores) {
return in.trackScores(trackScores);
}
@Override
public boolean trackScores() {
return in.trackScores();
}
@Override
public SearchContext trackTotalHitsUpTo(int trackTotalHitsUpTo) {
return in.trackTotalHitsUpTo(trackTotalHitsUpTo);
}
@Override
public int trackTotalHitsUpTo() {
return in.trackTotalHitsUpTo();
}
@Override
public SearchContext searchAfter(FieldDoc searchAfter) {
return in.searchAfter(searchAfter);
}
@Override
public FieldDoc searchAfter() {
return in.searchAfter();
}
@Override
public SearchContext parsedPostFilter(ParsedQuery postFilter) {
return in.parsedPostFilter(postFilter);
}
@Override
public ParsedQuery parsedPostFilter() {
return in.parsedPostFilter();
}
@Override
public Query aliasFilter() {
return in.aliasFilter();
}
@Override
public SearchContext parsedQuery(ParsedQuery query) {
return in.parsedQuery(query);
}
@Override
public ParsedQuery parsedQuery() {
return in.parsedQuery();
}
@Override
public Query query() {
return in.query();
}
@Override
public int from() {
return in.from();
}
@Override
public SearchContext from(int from) {
return in.from(from);
}
@Override
public int size() {
return in.size();
}
@Override
public SearchContext size(int size) {
return in.size(size);
}
@Override
public boolean explain() {
return in.explain();
}
@Override
public void explain(boolean explain) {
in.explain(explain);
}
@Override
public List<String> groupStats() {
return in.groupStats();
}
@Override
public void groupStats(List<String> groupStats) {
in.groupStats(groupStats);
}
@Override
public boolean version() {
return in.version();
}
@Override
public void version(boolean version) {
in.version(version);
}
@Override
public boolean seqNoAndPrimaryTerm() {
return in.seqNoAndPrimaryTerm();
}
@Override
public void seqNoAndPrimaryTerm(boolean seqNoAndPrimaryTerm) {
in.seqNoAndPrimaryTerm(seqNoAndPrimaryTerm);
}
@Override
public int[] docIdsToLoad() {
return in.docIdsToLoad();
}
@Override
public int docIdsToLoadFrom() {
return in.docIdsToLoadFrom();
}
@Override
public int docIdsToLoadSize() {
return in.docIdsToLoadSize();
}
@Override
public SearchContext docIdsToLoad(int[] docIdsToLoad, int docsIdsToLoadFrom, int docsIdsToLoadSize) {
return in.docIdsToLoad(docIdsToLoad, docsIdsToLoadFrom, docsIdsToLoadSize);
}
@Override
public DfsSearchResult dfsResult() {
return in.dfsResult();
}
@Override
public QuerySearchResult queryResult() {
return in.queryResult();
}
@Override
public FetchSearchResult fetchResult() {
return in.fetchResult();
}
@Override
public FetchPhase fetchPhase() {
return in.fetchPhase();
}
@Override
public MappedFieldType fieldType(String name) {
return in.fieldType(name);
}
@Override
public ObjectMapper getObjectMapper(String name) {
return in.getObjectMapper(name);
}
@Override
public long getRelativeTimeInMillis() {
return in.getRelativeTimeInMillis();
}
@Override
public void addSearchExt(SearchExtBuilder searchExtBuilder) {
in.addSearchExt(searchExtBuilder);
}
@Override
public SearchExtBuilder getSearchExt(String name) {
return in.getSearchExt(name);
}
@Override
public Profilers getProfilers() {
return in.getProfilers();
}
@Override
public Map<Class<?>, Collector> queryCollectors() { return in.queryCollectors();}
@Override
public QueryShardContext getQueryShardContext() {
return in.getQueryShardContext();
}
@Override
public void setTask(SearchShardTask task) {
in.setTask(task);
}
@Override
public SearchShardTask getTask() {
return in.getTask();
}
@Override
public boolean isCancelled() {
return in.isCancelled();
}
@Override
public SearchContext collapse(CollapseContext collapse) {
return in.collapse(collapse);
}
@Override
public CollapseContext collapse() {
return in.collapse();
}
@Override
public void addRescore(RescoreContext rescore) {
in.addRescore(rescore);
}
@Override
public ReaderContext readerContext() {
return in.readerContext();
}
}
| |
/*
* Copyright 2010 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.drools.core.reteoo.builder;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.drools.core.common.BaseNode;
import org.drools.core.common.BetaConstraints;
import org.drools.core.common.EmptyBetaConstraints;
import org.drools.core.common.RuleBasePartitionId;
import org.drools.core.definitions.rule.impl.RuleImpl;
import org.drools.core.reteoo.AlphaNode;
import org.drools.core.reteoo.BetaNode;
import org.drools.core.reteoo.EntryPointNode;
import org.drools.core.reteoo.NodeTypeEnums;
import org.drools.core.reteoo.ObjectTypeNode;
import org.drools.core.rule.AbstractCompositeConstraint;
import org.drools.core.rule.Declaration;
import org.drools.core.rule.GroupElement;
import org.drools.core.rule.IntervalProviderConstraint;
import org.drools.core.rule.Pattern;
import org.drools.core.rule.RuleConditionElement;
import org.drools.core.spi.AlphaNodeFieldConstraint;
import org.drools.core.spi.BetaNodeFieldConstraint;
import org.drools.core.spi.ObjectType;
import org.drools.core.time.Interval;
import org.drools.core.time.TemporalDependencyMatrix;
import org.drools.core.time.TimeUtils;
import org.kie.api.definition.rule.Rule;
/**
* Utility functions for reteoo build
*/
public class BuildUtils {
private final Map<Class< ? >, ReteooComponentBuilder> componentBuilders = new HashMap<Class< ? >, ReteooComponentBuilder>();
/**
* Adds the given builder for the given target to the builders map
*/
public void addBuilder(final Class< ? > target,
final ReteooComponentBuilder builder) {
this.componentBuilders.put( target,
builder );
}
/**
* Returns a builder for the given target from the builders map
*/
public ReteooComponentBuilder getBuilderFor(final RuleConditionElement target) {
return getBuilderFor( target.getClass() );
}
public ReteooComponentBuilder getBuilderFor(final Class cls) {
ReteooComponentBuilder builder = this.componentBuilders.get( cls );
return builder != null || cls.getSuperclass() == null ? builder : getBuilderFor(cls.getSuperclass());
}
/**
* Attaches a node into the network. If a node already exists that could
* substitute, it is used instead.
*
* @param context
* The current build context
* @param candidate
* The node to attach.
*
* @return the actual attached node that may be the one given as parameter
* or eventually one that was already in the cache if sharing is enabled
*/
public <T extends BaseNode> T attachNode(BuildContext context, T candidate) {
BaseNode node = null;
RuleBasePartitionId partition = null;
if ( candidate.getType() == NodeTypeEnums.EntryPointNode ) {
// entry point nodes are always shared
node = context.getKnowledgeBase().getRete().getEntryPointNode( ((EntryPointNode) candidate).getEntryPoint() );
// all EntryPointNodes belong to the main partition
partition = RuleBasePartitionId.MAIN_PARTITION;
} else if ( candidate.getType() == NodeTypeEnums.ObjectTypeNode ) {
// object type nodes are always shared
Map<ObjectType, ObjectTypeNode> map = context.getKnowledgeBase().getRete().getObjectTypeNodes( context.getCurrentEntryPoint() );
if ( map != null ) {
ObjectTypeNode otn = map.get( ((ObjectTypeNode) candidate).getObjectType() );
if ( otn != null ) {
// adjusting expiration offset
otn.mergeExpirationOffset( (ObjectTypeNode) candidate );
node = otn;
}
}
// all ObjectTypeNodes belong to the main partition
partition = RuleBasePartitionId.MAIN_PARTITION;
} else if ( isSharingEnabledForNode( context,
candidate ) ) {
if ( (context.getTupleSource() != null) && NodeTypeEnums.isLeftTupleSink( candidate ) ) {
node = context.getTupleSource().getSinkPropagator().getMatchingNode( candidate );
} else if ( (context.getObjectSource() != null) && NodeTypeEnums.isObjectSink( candidate ) ) {
node = context.getObjectSource().getObjectSinkPropagator().getMatchingNode( candidate );
} else {
throw new RuntimeException( "This is a bug on node sharing verification. Please report to development team." );
}
}
if ( node != null && !areNodesCompatibleForSharing(context, node) ) {
node = null;
}
if ( node == null ) {
// only attach() if it is a new node
node = candidate;
// new node, so it must be labeled
if ( partition == null ) {
// if it does not has a predefined label
if ( context.getPartitionId() == null ) {
// if no label in current context, create one
context.setPartitionId( context.getKnowledgeBase().createNewPartitionId() );
}
partition = context.getPartitionId();
}
// set node whit the actual partition label
node.setPartitionId( context, partition );
node.attach(context);
// adds the node to the context list to track all added nodes
context.getNodes().add( node );
} else {
// shared node found
mergeNodes(node, candidate);
// undo previous id assignment
context.releaseId( candidate );
if ( partition == null && context.getPartitionId() == null ) {
partition = node.getPartitionId();
// if no label in current context, create one
context.setPartitionId( partition );
}
}
node.addAssociation( context, context.getRule() );
return (T)node;
}
private void mergeNodes(BaseNode node, BaseNode duplicate) {
if (node instanceof AlphaNode) {
AlphaNodeFieldConstraint alphaConstraint = ((AlphaNode) node).getConstraint();
alphaConstraint.addPackageNames(((AlphaNode) duplicate).getConstraint().getPackageNames());
} else if (node instanceof BetaNode) {
BetaNodeFieldConstraint[] betaConstraints = ((BetaNode) node).getConstraints();
int i = 0;
for (BetaNodeFieldConstraint betaConstraint : betaConstraints) {
betaConstraint.addPackageNames(((BetaNode) duplicate).getConstraints()[i].getPackageNames());
i++;
}
}
}
/**
* Utility function to check if sharing is enabled for nodes of the given class
*/
private boolean isSharingEnabledForNode(BuildContext context, BaseNode node) {
if ( NodeTypeEnums.isLeftTupleSource( node )) {
return context.getKnowledgeBase().getConfiguration().isShareBetaNodes();
} else if ( NodeTypeEnums.isObjectSource( node ) ) {
return context.getKnowledgeBase().getConfiguration().isShareAlphaNodes();
}
return false;
}
private boolean areNodesCompatibleForSharing(BuildContext context, BaseNode node) {
if ( node.getType() == NodeTypeEnums.RightInputAdaterNode ) {
// avoid subnetworks sharing when they belong to 2 different agenda-groups
String agendaGroup = context.getRule().getAgendaGroup();
for (Rule associatedRule : node.getAssociatedRules()) {
if (!agendaGroup.equals( (( RuleImpl ) associatedRule).getAgendaGroup() )) {
return false;
}
}
}
return true;
}
/**
* Creates and returns a BetaConstraints object for the given list of constraints
*
* @param context the current build context
* @param list the list of constraints
*/
public BetaConstraints createBetaNodeConstraint(final BuildContext context,
final List<BetaNodeFieldConstraint> list,
final boolean disableIndexing) {
BetaConstraints constraints;
switch ( list.size() ) {
case 0 :
constraints = EmptyBetaConstraints.getInstance();
break;
case 1 :
constraints = BetaNodeConstraintFactory.Factory.get().createSingleBetaConstraints( list.get( 0 ),
context.getKnowledgeBase().getConfiguration(),
disableIndexing );
break;
case 2 :
constraints = BetaNodeConstraintFactory.Factory.get().createDoubleBetaConstraints( list.toArray( new BetaNodeFieldConstraint[list.size()] ),
context.getKnowledgeBase().getConfiguration(),
disableIndexing );
break;
case 3 :
constraints = BetaNodeConstraintFactory.Factory.get().createTripleBetaConstraints( list.toArray( new BetaNodeFieldConstraint[list.size()] ),
context.getKnowledgeBase().getConfiguration(),
disableIndexing );
break;
case 4 :
constraints = BetaNodeConstraintFactory.Factory.get().createQuadroupleBetaConstraints( list.toArray( new BetaNodeFieldConstraint[list.size()] ),
context.getKnowledgeBase().getConfiguration(),
disableIndexing );
break;
default :
constraints = BetaNodeConstraintFactory.Factory.get().createDefaultBetaConstraints( list.toArray( new BetaNodeFieldConstraint[list.size()] ),
context.getKnowledgeBase().getConfiguration(),
disableIndexing );
}
return constraints;
}
/**
* Calculates the temporal distance between all event patterns in the given
* subrule.
*
* @param groupElement the root element of a subrule being added to the rulebase
*/
public TemporalDependencyMatrix calculateTemporalDistance(GroupElement groupElement) {
// find the events
List<Pattern> events = new ArrayList<Pattern>();
selectAllEventPatterns( events,
groupElement );
final int size = events.size();
if ( size >= 1 ) {
// create the matrix
Interval[][] source = new Interval[size][];
for ( int row = 0; row < size; row++ ) {
source[row] = new Interval[size];
for ( int col = 0; col < size; col++ ) {
if ( row == col ) {
source[row][col] = new Interval( 0,
0 );
} else {
source[row][col] = new Interval( Interval.MIN,
Interval.MAX );
}
}
}
Interval[][] result;
if ( size > 1 ) {
List<Declaration> declarations = new ArrayList<>();
int eventIndex = 0;
// populate the matrix
for ( Pattern event : events ) {
// references to other events are always backward references, so we can build the list as we go
declarations.add( event.getDeclaration() );
Map<Declaration, Interval> temporal = new HashMap<>();
gatherTemporalRelationships( event.getConstraints(),
temporal );
// intersects default values with the actual constrained intervals
for ( Map.Entry<Declaration, Interval> entry : temporal.entrySet() ) {
int targetIndex = declarations.indexOf( entry.getKey() );
Interval interval = entry.getValue();
source[targetIndex][eventIndex].intersect( interval );
Interval reverse = new Interval( interval.getUpperBound() == Long.MAX_VALUE ? Long.MIN_VALUE : -interval.getUpperBound(),
interval.getLowerBound() == Long.MIN_VALUE ? Long.MAX_VALUE : -interval.getLowerBound() );
source[eventIndex][targetIndex].intersect( reverse );
}
eventIndex++;
}
result = TimeUtils.calculateTemporalDistance( source );
} else {
result = source;
}
return new TemporalDependencyMatrix( result, events );
}
return null;
}
private void gatherTemporalRelationships(List< ? > constraints,
Map<Declaration, Interval> temporal) {
for ( Object obj : constraints ) {
if ( obj instanceof IntervalProviderConstraint) {
IntervalProviderConstraint constr = (IntervalProviderConstraint) obj;
if ( constr.isTemporal() ) {
// if a constraint already exists, calculate the intersection
Declaration[] decs = constr.getRequiredDeclarations();
// only calculate relationships to other event patterns
if( decs.length > 0 && decs[0].isPatternDeclaration() && decs[0].getPattern().getObjectType().isEvent() ) {
Declaration target = decs[0];
Interval interval = temporal.get( target );
if ( interval == null ) {
interval = constr.getInterval();
temporal.put( target,
interval );
} else {
interval.intersect( constr.getInterval() );
}
}
}
} else if ( obj instanceof AbstractCompositeConstraint ) {
gatherTemporalRelationships( Arrays.asList( ((AbstractCompositeConstraint) obj).getBetaConstraints() ),
temporal );
}
}
}
private void selectAllEventPatterns(List<Pattern> events,
RuleConditionElement rce) {
if ( rce instanceof Pattern ) {
Pattern p = (Pattern) rce;
if ( p.getObjectType().isEvent() ) {
events.add( p );
}
}
for ( RuleConditionElement child : rce.getNestedElements() ) {
selectAllEventPatterns( events,
child );
}
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.yawp.commons.utils;
import java.io.BufferedInputStream;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.net.HttpURLConnection;
import java.net.JarURLConnection;
import java.net.MalformedURLException;
import java.net.URL;
import java.net.URLConnection;
import java.net.URLDecoder;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Enumeration;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.Vector;
import java.util.jar.JarEntry;
import java.util.jar.JarFile;
/**
* @author David Blevins
* @version $Rev$ $Date$
*/
public class ResourceFinder {
private final URL[] urls;
private final String path;
private final ClassLoader classLoader;
private final List<String> resourcesNotLoaded = new ArrayList<String>();
public ResourceFinder(URL... urls) {
this(null, Thread.currentThread().getContextClassLoader(), urls);
}
public ResourceFinder(String path) {
this(path, Thread.currentThread().getContextClassLoader(), null);
}
public ResourceFinder(String path, URL... urls) {
this(path, Thread.currentThread().getContextClassLoader(), urls);
}
public ResourceFinder(String path, ClassLoader classLoader) {
this(path, classLoader, null);
}
public ResourceFinder(String path, ClassLoader classLoader, URL... urls) {
if (path == null){
path = "";
} else if (path.length() > 0 && !path.endsWith("/")) {
path += "/";
}
this.path = path;
if (classLoader == null) {
classLoader = Thread.currentThread().getContextClassLoader();
}
this.classLoader = classLoader;
for (int i = 0; urls != null && i < urls.length; i++) {
URL url = urls[i];
if (url == null || isDirectory(url) || url.getProtocol().equals("jar")) {
continue;
}
try {
urls[i] = new URL("jar", "", -1, url.toString() + "!/");
} catch (MalformedURLException e) {
}
}
this.urls = (urls == null || urls.length == 0)? null : urls;
}
private static boolean isDirectory(URL url) {
String file = url.getFile();
return (file.length() > 0 && file.charAt(file.length() - 1) == '/');
}
/**
* Returns a list of resources that could not be loaded in the last invoked findAvailable* or
* mapAvailable* methods.
* <p/>
* The list will only contain entries of resources that match the requirements
* of the last invoked findAvailable* or mapAvailable* methods, but were unable to be
* loaded and included in their results.
* <p/>
* The list returned is unmodifiable and the results of this method will change
* after each invocation of a findAvailable* or mapAvailable* methods.
* <p/>
* This method is not thread safe.
*/
public List<String> getResourcesNotLoaded() {
return Collections.unmodifiableList(resourcesNotLoaded);
}
// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
//
// Find
//
// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
public URL find(String uri) throws IOException {
String fullUri = path + uri;
URL resource = getResource(fullUri);
if (resource == null) {
throw new IOException("Could not find resource '" + fullUri + "'");
}
return resource;
}
public List<URL> findAll(String uri) throws IOException {
String fullUri = path + uri;
Enumeration<URL> resources = getResources(fullUri);
List<URL> list = new ArrayList();
while (resources.hasMoreElements()) {
URL url = resources.nextElement();
list.add(url);
}
return list;
}
// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
//
// Find String
//
// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
/**
* Reads the contents of the URL as a {@link String}'s and returns it.
*
* @param uri
* @return a stringified content of a resource
* @throws IOException if a resource pointed out by the uri param could not be find
* @see ClassLoader#getResource(String)
*/
public String findString(String uri) throws IOException {
String fullUri = path + uri;
URL resource = getResource(fullUri);
if (resource == null) {
throw new IOException("Could not find a resource in : " + fullUri);
}
return readContents(resource);
}
/**
* Reads the contents of the found URLs as a list of {@link String}'s and returns them.
*
* @param uri
* @return a list of the content of each resource URL found
* @throws IOException if any of the found URLs are unable to be read.
*/
public List<String> findAllStrings(String uri) throws IOException {
String fulluri = path + uri;
List<String> strings = new ArrayList<String>();
Enumeration<URL> resources = getResources(fulluri);
while (resources.hasMoreElements()) {
URL url = resources.nextElement();
String string = readContents(url);
strings.add(string);
}
return strings;
}
/**
* Reads the contents of the found URLs as a Strings and returns them.
* Individual URLs that cannot be read are skipped and added to the
* list of 'resourcesNotLoaded'
*
* @param uri
* @return a list of the content of each resource URL found
* @throws IOException if classLoader.getResources throws an exception
*/
public List<String> findAvailableStrings(String uri) throws IOException {
resourcesNotLoaded.clear();
String fulluri = path + uri;
List<String> strings = new ArrayList<String>();
Enumeration<URL> resources = getResources(fulluri);
while (resources.hasMoreElements()) {
URL url = resources.nextElement();
try {
String string = readContents(url);
strings.add(string);
} catch (IOException notAvailable) {
resourcesNotLoaded.add(url.toExternalForm());
}
}
return strings;
}
/**
* Reads the contents of all non-directory URLs immediately under the specified
* location and returns them in a map keyed by the file name.
* <p/>
* Any URLs that cannot be read will cause an exception to be thrown.
* <p/>
* Example classpath:
* <p/>
* META-INF/serializables/one
* META-INF/serializables/two
* META-INF/serializables/three
* META-INF/serializables/four/foo.txt
* <p/>
* ResourceFinder finder = new ResourceFinder("META-INF/");
* Map map = finder.mapAvailableStrings("serializables");
* map.contains("one"); // true
* map.contains("two"); // true
* map.contains("three"); // true
* map.contains("four"); // false
*
* @param uri
* @return a list of the content of each resource URL found
* @throws IOException if any of the urls cannot be read
*/
public Map<String, String> mapAllStrings(String uri) throws IOException {
Map<String, String> strings = new HashMap<String, String>();
Map<String, URL> resourcesMap = getResourcesMap(uri);
for (Iterator iterator = resourcesMap.entrySet().iterator(); iterator.hasNext();) {
Map.Entry entry = (Map.Entry) iterator.next();
String name = (String) entry.getKey();
URL url = (URL) entry.getValue();
String value = readContents(url);
strings.put(name, value);
}
return strings;
}
/**
* Reads the contents of all non-directory URLs immediately under the specified
* location and returns them in a map keyed by the file name.
* <p/>
* Individual URLs that cannot be read are skipped and added to the
* list of 'resourcesNotLoaded'
* <p/>
* Example classpath:
* <p/>
* META-INF/serializables/one
* META-INF/serializables/two # not readable
* META-INF/serializables/three
* META-INF/serializables/four/foo.txt
* <p/>
* ResourceFinder finder = new ResourceFinder("META-INF/");
* Map map = finder.mapAvailableStrings("serializables");
* map.contains("one"); // true
* map.contains("two"); // false
* map.contains("three"); // true
* map.contains("four"); // false
*
* @param uri
* @return a list of the content of each resource URL found
* @throws IOException if classLoader.getResources throws an exception
*/
public Map<String, String> mapAvailableStrings(String uri) throws IOException {
resourcesNotLoaded.clear();
Map<String, String> strings = new HashMap<String, String>();
Map<String, URL> resourcesMap = getResourcesMap(uri);
for (Iterator iterator = resourcesMap.entrySet().iterator(); iterator.hasNext();) {
Map.Entry entry = (Map.Entry) iterator.next();
String name = (String) entry.getKey();
URL url = (URL) entry.getValue();
try {
String value = readContents(url);
strings.put(name, value);
} catch (IOException notAvailable) {
resourcesNotLoaded.add(url.toExternalForm());
}
}
return strings;
}
// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
//
// Find Class
//
// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
/**
* Executes {@link #findString(String)} assuming the contents URL found is the name of
* a class that should be loaded and returned.
*
* @param uri
* @return
* @throws IOException
* @throws ClassNotFoundException
*/
public Class findClass(String uri) throws IOException, ClassNotFoundException {
String className = findString(uri);
return classLoader.loadClass(className);
}
/**
* Executes findAllStrings assuming the strings are
* the names of a classes that should be loaded and returned.
* <p/>
* Any URL or class that cannot be loaded will cause an exception to be thrown.
*
* @param uri
* @return
* @throws IOException
* @throws ClassNotFoundException
*/
public List<Class> findAllClasses(String uri) throws IOException, ClassNotFoundException {
List<Class> classes = new ArrayList<Class>();
List<String> strings = findAllStrings(uri);
for (String className : strings) {
Class clazz = classLoader.loadClass(className);
classes.add(clazz);
}
return classes;
}
/**
* Executes findAvailableStrings assuming the strings are
* the names of a classes that should be loaded and returned.
* <p/>
* Any class that cannot be loaded will be skipped and placed in the
* 'resourcesNotLoaded' collection.
*
* @param uri
* @return
* @throws IOException if classLoader.getResources throws an exception
*/
public List<Class> findAvailableClasses(String uri) throws IOException {
resourcesNotLoaded.clear();
List<Class> classes = new ArrayList<Class>();
List<String> strings = findAvailableStrings(uri);
for (String className : strings) {
try {
Class clazz = classLoader.loadClass(className);
classes.add(clazz);
} catch (Exception notAvailable) {
resourcesNotLoaded.add(className);
}
}
return classes;
}
/**
* Executes mapAllStrings assuming the value of each entry in the
* map is the name of a class that should be loaded.
* <p/>
* Any class that cannot be loaded will be cause an exception to be thrown.
* <p/>
* Example classpath:
* <p/>
* META-INF/xmlparsers/xerces
* META-INF/xmlparsers/crimson
* <p/>
* ResourceFinder finder = new ResourceFinder("META-INF/");
* Map map = finder.mapAvailableStrings("xmlparsers");
* map.contains("xerces"); // true
* map.contains("crimson"); // true
* Class xercesClass = map.get("xerces");
* Class crimsonClass = map.get("crimson");
*
* @param uri
* @return
* @throws IOException
* @throws ClassNotFoundException
*/
public Map<String, Class> mapAllClasses(String uri) throws IOException, ClassNotFoundException {
Map<String, Class> classes = new HashMap<String, Class>();
Map<String, String> map = mapAllStrings(uri);
for (Iterator iterator = map.entrySet().iterator(); iterator.hasNext();) {
Map.Entry entry = (Map.Entry) iterator.next();
String string = (String) entry.getKey();
String className = (String) entry.getValue();
Class clazz = classLoader.loadClass(className);
classes.put(string, clazz);
}
return classes;
}
/**
* Executes mapAvailableStrings assuming the value of each entry in the
* map is the name of a class that should be loaded.
* <p/>
* Any class that cannot be loaded will be skipped and placed in the
* 'resourcesNotLoaded' collection.
* <p/>
* Example classpath:
* <p/>
* META-INF/xmlparsers/xerces
* META-INF/xmlparsers/crimson
* <p/>
* ResourceFinder finder = new ResourceFinder("META-INF/");
* Map map = finder.mapAvailableStrings("xmlparsers");
* map.contains("xerces"); // true
* map.contains("crimson"); // true
* Class xercesClass = map.get("xerces");
* Class crimsonClass = map.get("crimson");
*
* @param uri
* @return
* @throws IOException if classLoader.getResources throws an exception
*/
public Map<String, Class> mapAvailableClasses(String uri) throws IOException {
resourcesNotLoaded.clear();
Map<String, Class> classes = new HashMap<String, Class>();
Map<String, String> map = mapAvailableStrings(uri);
for (Iterator iterator = map.entrySet().iterator(); iterator.hasNext();) {
Map.Entry entry = (Map.Entry) iterator.next();
String string = (String) entry.getKey();
String className = (String) entry.getValue();
try {
Class clazz = classLoader.loadClass(className);
classes.put(string, clazz);
} catch (Exception notAvailable) {
resourcesNotLoaded.add(className);
}
}
return classes;
}
// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
//
// Find Implementation
//
// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
/**
* Assumes the class specified points to a file in the classpath that contains
* the name of a class that implements or is a subclass of the specfied class.
* <p/>
* Any class that cannot be loaded will be cause an exception to be thrown.
* <p/>
* Example classpath:
* <p/>
* META-INF/java.io.InputStream # contains the classname org.acme.AcmeInputStream
* META-INF/java.io.OutputStream
* <p/>
* ResourceFinder finder = new ResourceFinder("META-INF/");
* Class clazz = finder.findImplementation(java.io.InputStream.class);
* clazz.getName(); // returns "org.acme.AcmeInputStream"
*
* @param interfase a superclass or interface
* @return
* @throws IOException if the URL cannot be read
* @throws ClassNotFoundException if the class found is not loadable
* @throws ClassCastException if the class found is not assignable to the specified superclass or interface
*/
public Class findImplementation(Class interfase) throws IOException, ClassNotFoundException {
String className = findString(interfase.getName());
Class impl = classLoader.loadClass(className);
if (!interfase.isAssignableFrom(impl)) {
throw new ClassCastException("Class not of type: " + interfase.getName());
}
return impl;
}
/**
* Assumes the class specified points to a file in the classpath that contains
* the name of a class that implements or is a subclass of the specfied class.
* <p/>
* Any class that cannot be loaded or assigned to the specified interface will be cause
* an exception to be thrown.
* <p/>
* Example classpath:
* <p/>
* META-INF/java.io.InputStream # contains the classname org.acme.AcmeInputStream
* META-INF/java.io.InputStream # contains the classname org.widget.NeatoInputStream
* META-INF/java.io.InputStream # contains the classname com.foo.BarInputStream
* <p/>
* ResourceFinder finder = new ResourceFinder("META-INF/");
* List classes = finder.findAllImplementations(java.io.InputStream.class);
* classes.contains("org.acme.AcmeInputStream"); // true
* classes.contains("org.widget.NeatoInputStream"); // true
* classes.contains("com.foo.BarInputStream"); // true
*
* @param interfase a superclass or interface
* @return
* @throws IOException if the URL cannot be read
* @throws ClassNotFoundException if the class found is not loadable
* @throws ClassCastException if the class found is not assignable to the specified superclass or interface
*/
public List<Class> findAllImplementations(Class interfase) throws IOException, ClassNotFoundException {
List<Class> implementations = new ArrayList<Class>();
List<String> strings = findAllStrings(interfase.getName());
for (String className : strings) {
Class impl = classLoader.loadClass(className);
if (!interfase.isAssignableFrom(impl)) {
throw new ClassCastException("Class not of type: " + interfase.getName());
}
implementations.add(impl);
}
return implementations;
}
/**
* Assumes the class specified points to a file in the classpath that contains
* the name of a class that implements or is a subclass of the specfied class.
* <p/>
* Any class that cannot be loaded or are not assignable to the specified class will be
* skipped and placed in the 'resourcesNotLoaded' collection.
* <p/>
* Example classpath:
* <p/>
* META-INF/java.io.InputStream # contains the classname org.acme.AcmeInputStream
* META-INF/java.io.InputStream # contains the classname org.widget.NeatoInputStream
* META-INF/java.io.InputStream # contains the classname com.foo.BarInputStream
* <p/>
* ResourceFinder finder = new ResourceFinder("META-INF/");
* List classes = finder.findAllImplementations(java.io.InputStream.class);
* classes.contains("org.acme.AcmeInputStream"); // true
* classes.contains("org.widget.NeatoInputStream"); // true
* classes.contains("com.foo.BarInputStream"); // true
*
* @param interfase a superclass or interface
* @return
* @throws IOException if classLoader.getResources throws an exception
*/
public List<Class> findAvailableImplementations(Class interfase) throws IOException {
resourcesNotLoaded.clear();
List<Class> implementations = new ArrayList<Class>();
List<String> strings = findAvailableStrings(interfase.getName());
for (String className : strings) {
try {
Class impl = classLoader.loadClass(className);
if (interfase.isAssignableFrom(impl)) {
implementations.add(impl);
} else {
resourcesNotLoaded.add(className);
}
} catch (Exception notAvailable) {
resourcesNotLoaded.add(className);
}
}
return implementations;
}
/**
* Assumes the class specified points to a directory in the classpath that holds files
* containing the name of a class that implements or is a subclass of the specfied class.
* <p/>
* Any class that cannot be loaded or assigned to the specified interface will be cause
* an exception to be thrown.
* <p/>
* Example classpath:
* <p/>
* META-INF/java.net.URLStreamHandler/jar
* META-INF/java.net.URLStreamHandler/file
* META-INF/java.net.URLStreamHandler/http
* <p/>
* ResourceFinder finder = new ResourceFinder("META-INF/");
* Map map = finder.mapAllImplementations(java.net.URLStreamHandler.class);
* Class jarUrlHandler = map.get("jar");
* Class fileUrlHandler = map.get("file");
* Class httpUrlHandler = map.get("http");
*
* @param interfase a superclass or interface
* @return
* @throws IOException if the URL cannot be read
* @throws ClassNotFoundException if the class found is not loadable
* @throws ClassCastException if the class found is not assignable to the specified superclass or interface
*/
public Map<String, Class> mapAllImplementations(Class interfase) throws IOException, ClassNotFoundException {
Map<String, Class> implementations = new HashMap<String, Class>();
Map<String, String> map = mapAllStrings(interfase.getName());
for (Iterator iterator = map.entrySet().iterator(); iterator.hasNext();) {
Map.Entry entry = (Map.Entry) iterator.next();
String string = (String) entry.getKey();
String className = (String) entry.getValue();
Class impl = classLoader.loadClass(className);
if (!interfase.isAssignableFrom(impl)) {
throw new ClassCastException("Class not of type: " + interfase.getName());
}
implementations.put(string, impl);
}
return implementations;
}
/**
* Assumes the class specified points to a directory in the classpath that holds files
* containing the name of a class that implements or is a subclass of the specfied class.
* <p/>
* Any class that cannot be loaded or are not assignable to the specified class will be
* skipped and placed in the 'resourcesNotLoaded' collection.
* <p/>
* Example classpath:
* <p/>
* META-INF/java.net.URLStreamHandler/jar
* META-INF/java.net.URLStreamHandler/file
* META-INF/java.net.URLStreamHandler/http
* <p/>
* ResourceFinder finder = new ResourceFinder("META-INF/");
* Map map = finder.mapAllImplementations(java.net.URLStreamHandler.class);
* Class jarUrlHandler = map.get("jar");
* Class fileUrlHandler = map.get("file");
* Class httpUrlHandler = map.get("http");
*
* @param interfase a superclass or interface
* @return
* @throws IOException if classLoader.getResources throws an exception
*/
public Map<String, Class> mapAvailableImplementations(Class interfase) throws IOException {
resourcesNotLoaded.clear();
Map<String, Class> implementations = new HashMap<String, Class>();
Map<String, String> map = mapAvailableStrings(interfase.getName());
for (Iterator iterator = map.entrySet().iterator(); iterator.hasNext();) {
Map.Entry entry = (Map.Entry) iterator.next();
String string = (String) entry.getKey();
String className = (String) entry.getValue();
try {
Class impl = classLoader.loadClass(className);
if (interfase.isAssignableFrom(impl)) {
implementations.put(string, impl);
} else {
resourcesNotLoaded.add(className);
}
} catch (Exception notAvailable) {
resourcesNotLoaded.add(className);
}
}
return implementations;
}
// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
//
// Find Properties
//
// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
/**
* Finds the corresponding resource and reads it in as a properties file
* <p/>
* Example classpath:
* <p/>
* META-INF/widget.properties
* <p/>
* ResourceFinder finder = new ResourceFinder("META-INF/");
* Properties widgetProps = finder.findProperties("widget.properties");
*
* @param uri
* @return
* @throws IOException if the URL cannot be read or is not in properties file format
*/
public Properties findProperties(String uri) throws IOException {
String fulluri = path + uri;
URL resource = getResource(fulluri);
if (resource == null) {
throw new IOException("Could not find resource: " + fulluri);
}
return loadProperties(resource);
}
/**
* Finds the corresponding resources and reads them in as a properties files
* <p/>
* Any URL that cannot be read in as a properties file will cause an exception to be thrown.
* <p/>
* Example classpath:
* <p/>
* META-INF/app.properties
* META-INF/app.properties
* META-INF/app.properties
* <p/>
* ResourceFinder finder = new ResourceFinder("META-INF/");
* List<Properties> appProps = finder.findAllProperties("app.properties");
*
* @param uri
* @return
* @throws IOException if the URL cannot be read or is not in properties file format
*/
public List<Properties> findAllProperties(String uri) throws IOException {
String fulluri = path + uri;
List<Properties> properties = new ArrayList<Properties>();
Enumeration<URL> resources = getResources(fulluri);
while (resources.hasMoreElements()) {
URL url = resources.nextElement();
Properties props = loadProperties(url);
properties.add(props);
}
return properties;
}
/**
* Finds the corresponding resources and reads them in as a properties files
* <p/>
* Any URL that cannot be read in as a properties file will be added to the
* 'resourcesNotLoaded' collection.
* <p/>
* Example classpath:
* <p/>
* META-INF/app.properties
* META-INF/app.properties
* META-INF/app.properties
* <p/>
* ResourceFinder finder = new ResourceFinder("META-INF/");
* List<Properties> appProps = finder.findAvailableProperties("app.properties");
*
* @param uri
* @return
* @throws IOException if classLoader.getResources throws an exception
*/
public List<Properties> findAvailableProperties(String uri) throws IOException {
resourcesNotLoaded.clear();
String fulluri = path + uri;
List<Properties> properties = new ArrayList<Properties>();
Enumeration<URL> resources = getResources(fulluri);
while (resources.hasMoreElements()) {
URL url = resources.nextElement();
try {
Properties props = loadProperties(url);
properties.add(props);
} catch (Exception notAvailable) {
resourcesNotLoaded.add(url.toExternalForm());
}
}
return properties;
}
/**
* Finds the corresponding resources and reads them in as a properties files
* <p/>
* Any URL that cannot be read in as a properties file will cause an exception to be thrown.
* <p/>
* Example classpath:
* <p/>
* META-INF/jdbcDrivers/oracle.properties
* META-INF/jdbcDrivers/mysql.props
* META-INF/jdbcDrivers/derby
* <p/>
* ResourceFinder finder = new ResourceFinder("META-INF/");
* List<Properties> driversList = finder.findAvailableProperties("jdbcDrivers");
* Properties oracleProps = driversList.get("oracle.properties");
* Properties mysqlProps = driversList.get("mysql.props");
* Properties derbyProps = driversList.get("derby");
*
* @param uri
* @return
* @throws IOException if the URL cannot be read or is not in properties file format
*/
public Map<String, Properties> mapAllProperties(String uri) throws IOException {
Map<String, Properties> propertiesMap = new HashMap<String, Properties>();
Map<String, URL> map = getResourcesMap(uri);
for (Iterator iterator = map.entrySet().iterator(); iterator.hasNext();) {
Map.Entry entry = (Map.Entry) iterator.next();
String string = (String) entry.getKey();
URL url = (URL) entry.getValue();
Properties properties = loadProperties(url);
propertiesMap.put(string, properties);
}
return propertiesMap;
}
/**
* Finds the corresponding resources and reads them in as a properties files
* <p/>
* Any URL that cannot be read in as a properties file will be added to the
* 'resourcesNotLoaded' collection.
* <p/>
* Example classpath:
* <p/>
* META-INF/jdbcDrivers/oracle.properties
* META-INF/jdbcDrivers/mysql.props
* META-INF/jdbcDrivers/derby
* <p/>
* ResourceFinder finder = new ResourceFinder("META-INF/");
* List<Properties> driversList = finder.findAvailableProperties("jdbcDrivers");
* Properties oracleProps = driversList.get("oracle.properties");
* Properties mysqlProps = driversList.get("mysql.props");
* Properties derbyProps = driversList.get("derby");
*
* @param uri
* @return
* @throws IOException if classLoader.getResources throws an exception
*/
public Map<String, Properties> mapAvailableProperties(String uri) throws IOException {
resourcesNotLoaded.clear();
Map<String, Properties> propertiesMap = new HashMap<String, Properties>();
Map<String, URL> map = getResourcesMap(uri);
for (Iterator iterator = map.entrySet().iterator(); iterator.hasNext();) {
Map.Entry entry = (Map.Entry) iterator.next();
String string = (String) entry.getKey();
URL url = (URL) entry.getValue();
try {
Properties properties = loadProperties(url);
propertiesMap.put(string, properties);
} catch (Exception notAvailable) {
resourcesNotLoaded.add(url.toExternalForm());
}
}
return propertiesMap;
}
// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
//
// Map Resources
//
// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
public Map<String, URL> getResourcesMap(String uri) throws IOException {
String basePath = path + uri;
Map<String, URL> resources = new HashMap<String, URL>();
if (!basePath.endsWith("/")) {
basePath += "/";
}
Enumeration<URL> urls = getResources(basePath);
while (urls.hasMoreElements()) {
URL location = urls.nextElement();
try {
if (location.getProtocol().equals("jar")) {
readJarEntries(location, basePath, resources);
} else if (location.getProtocol().equals("file")) {
readDirectoryEntries(location, resources);
}
} catch (Exception e) {
}
}
return resources;
}
private static void readDirectoryEntries(URL location, Map<String, URL> resources) throws MalformedURLException {
File dir = new File(URLDecoder.decode(location.getPath()));
if (dir.isDirectory()) {
File[] files = dir.listFiles();
for (File file : files) {
if (!file.isDirectory()) {
String name = file.getName();
URL url = file.toURI().toURL();
resources.put(name, url);
}
}
}
}
private static void readJarEntries(URL location, String basePath, Map<String, URL> resources) throws IOException {
JarURLConnection conn = (JarURLConnection) location.openConnection();
JarFile jarfile = conn.getJarFile();
Enumeration<JarEntry> entries = jarfile.entries();
while (entries != null && entries.hasMoreElements()) {
JarEntry entry = entries.nextElement();
String name = entry.getName();
if (entry.isDirectory() || !name.startsWith(basePath) || name.length() == basePath.length()) {
continue;
}
name = name.substring(basePath.length());
if (name.contains("/")) {
continue;
}
URL resource = new URL(location, name);
resources.put(name, resource);
}
}
private Properties loadProperties(URL resource) throws IOException {
InputStream in = resource.openStream();
BufferedInputStream reader = null;
try {
reader = new BufferedInputStream(in);
Properties properties = new Properties();
properties.load(reader);
return properties;
} finally {
try {
in.close();
reader.close();
} catch (Exception e) {
}
}
}
private String readContents(URL resource) throws IOException {
InputStream in = resource.openStream();
BufferedInputStream reader = null;
StringBuilder sb = new StringBuilder();
try {
reader = new BufferedInputStream(in);
int b = reader.read();
while (b != -1) {
sb.append((char) b);
b = reader.read();
}
return sb.toString().trim();
} finally {
try {
in.close();
reader.close();
} catch (Exception e) {
}
}
}
private URL getResource(String fullUri) {
if (urls == null){
return classLoader.getResource(fullUri);
}
return findResource(fullUri, urls);
}
private Enumeration<URL> getResources(String fulluri) throws IOException {
if (urls == null) {
return classLoader.getResources(fulluri);
}
Vector<URL> resources = new Vector();
for (URL url : urls) {
URL resource = findResource(fulluri, url);
if (resource != null){
resources.add(resource);
}
}
return resources.elements();
}
private URL findResource(String resourceName, URL... search) {
for (int i = 0; i < search.length; i++) {
URL currentUrl = search[i];
if (currentUrl == null) {
continue;
}
try {
String protocol = currentUrl.getProtocol();
if (protocol.equals("jar")) {
/*
* If the connection for currentUrl or resURL is
* used, getJarFile() will throw an exception if the
* entry doesn't exist.
*/
URL jarURL = ((JarURLConnection) currentUrl.openConnection()).getJarFileURL();
JarFile jarFile;
JarURLConnection juc;
try {
juc = (JarURLConnection) new URL("jar", "", jarURL.toExternalForm() + "!/").openConnection();
jarFile = juc.getJarFile();
} catch (IOException e) {
// Don't look for this jar file again
search[i] = null;
throw e;
}
try {
juc = (JarURLConnection) new URL("jar", "", jarURL.toExternalForm() + "!/").openConnection();
jarFile = juc.getJarFile();
String entryName;
if (currentUrl.getFile().endsWith("!/")) {
entryName = resourceName;
} else {
String file = currentUrl.getFile();
int sepIdx = file.lastIndexOf("!/");
if (sepIdx == -1) {
// Invalid URL, don't look here again
search[i] = null;
continue;
}
sepIdx += 2;
StringBuilder sb = new StringBuilder(file.length() - sepIdx + resourceName.length());
sb.append(file.substring(sepIdx));
sb.append(resourceName);
entryName = sb.toString();
}
if (entryName.equals("META-INF/") && jarFile.getEntry("META-INF/MANIFEST.MF") != null) {
return targetURL(currentUrl, "META-INF/MANIFEST.MF");
}
if (jarFile.getEntry(entryName) != null) {
return targetURL(currentUrl, resourceName);
}
} finally {
if (!juc.getUseCaches()) {
try {
jarFile.close();
} catch (Exception e) {
}
}
}
} else if (protocol.equals("file")) {
String baseFile = currentUrl.getFile();
String host = currentUrl.getHost();
int hostLength = 0;
if (host != null) {
hostLength = host.length();
}
StringBuilder buf = new StringBuilder(2 + hostLength + baseFile.length() + resourceName.length());
if (hostLength > 0) {
buf.append("//").append(host);
}
// baseFile always ends with '/'
buf.append(baseFile);
String fixedResName = resourceName;
// Do not create a UNC path, i.e. \\host
while (fixedResName.startsWith("/") || fixedResName.startsWith("\\")) {
fixedResName = fixedResName.substring(1);
}
buf.append(fixedResName);
String filename = buf.toString();
File file = new File(filename);
File file2 = new File(URLDecoder.decode(filename));
if (file.exists() || file2.exists()) {
return targetURL(currentUrl, fixedResName);
}
} else {
URL resourceURL = targetURL(currentUrl, resourceName);
URLConnection urlConnection = resourceURL.openConnection();
try {
urlConnection.getInputStream().close();
} catch (SecurityException e) {
return null;
}
// HTTP can return a stream on a non-existent file
// So check for the return code;
if (!resourceURL.getProtocol().equals("http")) {
return resourceURL;
}
int code = ((HttpURLConnection) urlConnection).getResponseCode();
if (code >= 200 && code < 300) {
return resourceURL;
}
}
} catch (MalformedURLException e) {
// Keep iterating through the URL list
} catch (IOException e) {
} catch (SecurityException e) {
}
}
return null;
}
private URL targetURL(URL base, String name) throws MalformedURLException {
StringBuilder sb = new StringBuilder(base.getFile().length() + name.length());
sb.append(base.getFile());
sb.append(name);
String file = sb.toString();
return new URL(base.getProtocol(), base.getHost(), base.getPort(), file, null);
}
}
| |
/*
* Copyright (c) 2018, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.wso2.mb.integration.tests.amqp.functional;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.testng.Assert;
import org.testng.annotations.AfterClass;
import org.testng.annotations.AfterMethod;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
import org.wso2.carbon.andes.event.stub.core.TopicRolePermission;
import org.wso2.carbon.andes.event.stub.service.AndesEventAdminServiceEventAdminException;
import org.wso2.carbon.andes.stub.AndesAdminServiceBrokerManagerAdminException;
import org.wso2.carbon.authenticator.stub.LoginAuthenticationExceptionException;
import org.wso2.carbon.authenticator.stub.LogoutAuthenticationExceptionException;
import org.wso2.carbon.automation.engine.FrameworkConstants;
import org.wso2.carbon.automation.engine.context.AutomationContext;
import org.wso2.carbon.automation.engine.context.TestUserMode;
import org.wso2.carbon.automation.engine.context.beans.User;
import org.wso2.carbon.integration.common.admin.client.UserManagementClient;
import org.wso2.carbon.integration.common.utils.LoginLogoutClient;
import org.wso2.carbon.integration.common.utils.exceptions.AutomationUtilException;
import org.wso2.carbon.user.mgt.stub.UserAdminUserAdminException;
import org.wso2.carbon.user.mgt.stub.types.carbon.FlaggedName;
import org.wso2.mb.integration.common.clients.AndesClient;
import org.wso2.mb.integration.common.clients.configurations.AndesJMSConsumerClientConfiguration;
import org.wso2.mb.integration.common.clients.configurations.AndesJMSPublisherClientConfiguration;
import org.wso2.mb.integration.common.clients.exceptions.AndesClientConfigurationException;
import org.wso2.mb.integration.common.clients.exceptions.AndesClientException;
import org.wso2.mb.integration.common.clients.operations.clients.TopicAdminClient;
import org.wso2.mb.integration.common.clients.operations.utils.AndesClientConstants;
import org.wso2.mb.integration.common.clients.operations.utils.AndesClientUtils;
import org.wso2.mb.integration.common.clients.operations.utils.ExchangeType;
import org.wso2.mb.integration.common.utils.backend.MBIntegrationBaseTest;
import org.xml.sax.SAXException;
import javax.jms.JMSException;
import javax.naming.NamingException;
import javax.xml.stream.XMLStreamException;
import javax.xml.xpath.XPathExpressionException;
import java.io.IOException;
import java.net.URISyntaxException;
import java.rmi.RemoteException;
/**
* This class contains the test cases related to user authorization and topics
*/
public class TopicUserAuthorizationTestCase extends MBIntegrationBaseTest {
/**
* The logger used to log information, warnings, errors, etc.
*/
private static final Logger log = LoggerFactory.getLogger(TopicUserAuthorizationTestCase.class);
/**
* Permission path for creating a topic
*/
private static final String ADD_TOPIC_PERMISSION = "/permission/admin/manage/topic/add";
/**
* Roles for the test case scenarios
*/
private static final String CREATE_PUB_SUB_TOPIC_ROLE = "create_pub_sub_topic_role";
private static final String PUB_SUB_TOPIC_ROLE = "pub_sub_topic_role";
private static final String NO_PERMISSION_TOPIC_ROLE = "no_permission_topic_role";
/**
* Prefix for internal roles for topics
*/
private static final String TOPIC_PREFIX = "T_";
private UserManagementClient userManagementClient;
/**
* Initializes before a test method. Removes users of admin group if exists. Adds new roles with
* permissions.
*
* @throws Exception
*/
@BeforeMethod(alwaysRun = true)
public void initialize() throws Exception {
super.init(TestUserMode.SUPER_TENANT_ADMIN);
String[] createPubSubUsers = new String[]{"authUser1", "authUser2"};
String[] pubSubUsers = new String[]{"authUser3", "authUser4"};
String[] noPermissionUsers = new String[]{"authUser5"};
String[] allUsers =
new String[]{"authUser1", "authUser2", "authUser3", "authUser4", "authUser5"};
// Logging into user management as admin
userManagementClient = new UserManagementClient(backendURL, "admin", "admin");
// Removing admin permission for all users
userManagementClient.updateUserListOfRole(FrameworkConstants.ADMIN_ROLE, null, allUsers);
// Adding roles along with users if roles does not exist.
userManagementClient
.addRole(CREATE_PUB_SUB_TOPIC_ROLE, createPubSubUsers, new String[]{ADD_TOPIC_PERMISSION});
userManagementClient.addRole(PUB_SUB_TOPIC_ROLE, pubSubUsers, new String[]{});
userManagementClient.addRole(NO_PERMISSION_TOPIC_ROLE, noPermissionUsers, new String[]{});
}
/**
* Cleans up the test case effects. Created roles and internal role related roles are created.
*
* @throws RemoteException
* @throws UserAdminUserAdminException
*/
@AfterMethod(alwaysRun = true)
public void cleanUp() throws RemoteException, UserAdminUserAdminException {
// Deleting roles of the users used in the test case
userManagementClient.deleteRole(CREATE_PUB_SUB_TOPIC_ROLE);
userManagementClient.deleteRole(PUB_SUB_TOPIC_ROLE);
userManagementClient.deleteRole(NO_PERMISSION_TOPIC_ROLE);
// Deleting internal roles specific to topics
FlaggedName[] allRoles = userManagementClient.getAllRolesNames("*", 10);
for (FlaggedName allRole : allRoles) {
if (allRole.getItemName().contains(TOPIC_PREFIX)) {
userManagementClient.deleteRole(allRole.getItemName());
}
}
}
/**
* User creates a topic and then publishes and consumes messages.
*
* @throws IOException
* @throws UserAdminUserAdminException
* @throws XPathExpressionException
* @throws NamingException
* @throws JMSException
* @throws AndesClientException
* @throws AndesClientConfigurationException
*/
@Test(groups = {"wso2.mb", "topic"})
public void performTopicPermissionTestCase()
throws IOException, UserAdminUserAdminException, XPathExpressionException,
NamingException, JMSException, AndesClientException,
AndesClientConfigurationException {
this.createPublishAndSubscribeFromUser("authUser1", "authTopic1");
}
/**
* User1 and User2 exists in the same role where create topic permission is assigned.
* User1 creates a topic and then publishes and consumes messages.
* User2 tries to publish and consume messages. But unable to succeed.
*
* @throws AndesClientConfigurationException
* @throws NamingException
* @throws IOException
* @throws XPathExpressionException
* @throws AndesClientException
* @throws JMSException
*/
@Test(groups = {"wso2.mb", "topic"}, expectedExceptions = JMSException.class,
expectedExceptionsMessageRegExp = ".*Permission denied.*")
public void performTopicPermissionSameRoleUsersWithNoPublishOrConsume()
throws AndesClientConfigurationException, NamingException, IOException,
XPathExpressionException, AndesClientException, JMSException {
this.createPublishAndSubscribeFromUser("authUser1", "authTopic2");
this.createPublishAndSubscribeFromUser("authUser2", "authTopic2");
}
/**
* User1 and User2 exists in the same role where create topic permission is assigned.
* User1 creates a topic and then publishes and consumes messages.
* Add publish and consume permissions to the role in which User1 exists.
* User2 tries to publish and consume messages. User2 succeeds.
*
* @throws AndesClientConfigurationException
* @throws NamingException
* @throws IOException
* @throws XPathExpressionException
* @throws AndesClientException
* @throws JMSException
* @throws UserAdminUserAdminException
* @throws LoginAuthenticationExceptionException
* @throws AndesEventAdminServiceEventAdminException
* @throws XMLStreamException
* @throws LogoutAuthenticationExceptionException
* @throws URISyntaxException
* @throws SAXException
* @throws AndesAdminServiceBrokerManagerAdminException
*/
@Test(groups = {"wso2.mb", "topic"})
public void performTopicPermissionSameRoleUsersWithPublishOrConsume()
throws AndesClientConfigurationException, NamingException, IOException,
XPathExpressionException, AndesClientException, JMSException,
UserAdminUserAdminException, LoginAuthenticationExceptionException,
AndesEventAdminServiceEventAdminException, XMLStreamException,
LogoutAuthenticationExceptionException, URISyntaxException, SAXException,
AndesAdminServiceBrokerManagerAdminException, AutomationUtilException {
this.createPublishAndSubscribeFromUser("authUser1", "authTopic3");
// Adding publish subscribe permissions of 'authTopic3' to 'create_pub_sub_topic_role' role.
TopicRolePermission topicRolePermission = new TopicRolePermission();
topicRolePermission.setRoleName(CREATE_PUB_SUB_TOPIC_ROLE);
topicRolePermission.setAllowedToSubscribe(true);
topicRolePermission.setAllowedToPublish(true);
this.updateTopicRoleConsumePublishPermission("authTopic3", topicRolePermission);
log.info("Consumer and publish permissions updated for " + CREATE_PUB_SUB_TOPIC_ROLE);
this.createPublishAndSubscribeFromUser("authUser2", "authTopic3");
}
/**
* User1 and User2 exists in the same role where create topic permission is assigned.
* Admin(UI) creates a topic and then publishes and consumes messages.
* Add publish and consume permissions to the role in which User1 and User2 exists.
* User1 and User2 tries to publish and consume messages. User2 succeeds.
*
* @throws AndesClientConfigurationException
* @throws NamingException
* @throws IOException
* @throws XPathExpressionException
* @throws AndesClientException
* @throws JMSException
* @throws UserAdminUserAdminException
* @throws LoginAuthenticationExceptionException
* @throws AndesEventAdminServiceEventAdminException
* @throws XMLStreamException
* @throws LogoutAuthenticationExceptionException
* @throws URISyntaxException
* @throws SAXException
* @throws AndesAdminServiceBrokerManagerAdminException
*/
@Test(groups = {"wso2.mb", "topic"})
public void performTopicPermissionSameRoleUsersWithAdminCreated()
throws AndesClientConfigurationException, NamingException, IOException,
XPathExpressionException, AndesClientException, JMSException,
UserAdminUserAdminException, LoginAuthenticationExceptionException,
AndesEventAdminServiceEventAdminException, XMLStreamException,
LogoutAuthenticationExceptionException, URISyntaxException, SAXException,
AndesAdminServiceBrokerManagerAdminException, AutomationUtilException {
// "superAdmin" refers to the admin
this.createPublishAndSubscribeFromUser("superAdmin", "authTopic8");
// Adding publish subscribe permissions of 'authTopic8' to 'create_pub_sub_topic_role' role.
TopicRolePermission topicRolePermission = new TopicRolePermission();
topicRolePermission.setRoleName(CREATE_PUB_SUB_TOPIC_ROLE);
topicRolePermission.setAllowedToSubscribe(true);
topicRolePermission.setAllowedToPublish(true);
this.updateTopicRoleConsumePublishPermission("authTopic8", topicRolePermission);
log.info("Consumer and publish permissions updated for " + CREATE_PUB_SUB_TOPIC_ROLE);
this.createPublishAndSubscribeFromUser("authUser1", "authTopic8");
this.createPublishAndSubscribeFromUser("authUser2", "authTopic8");
}
/**
* Admin add subscription to topic and subscribe.
* Admin unsubscribe from topic after receiving expected message count
* Delete topic admin created
* User1 create topic with the same name
*
* Expected results - User1 must be able to successfully create and subscribe to topic
*
* @throws AndesClientConfigurationException
* @throws NamingException
* @throws JMSException
* @throws AndesClientException
* @throws XPathExpressionException
* @throws IOException
*/
@Test(groups = {"wso2.mb", "topic"})
public void performTopicPermissionWithAdminCreateAndUnscribe()
throws AndesClientConfigurationException, NamingException, JMSException, AndesClientException,
XPathExpressionException, IOException, AutomationUtilException, AndesEventAdminServiceEventAdminException {
// "superAdmin" refers to the admin
this.createPublishSubscribeAndUnsubscribeFromUser("superAdmin", "authTopic10");
// delete topic admin created
LoginLogoutClient loginLogoutClientForUser = new LoginLogoutClient(this.automationContext);
String sessionCookie = loginLogoutClientForUser.login();
TopicAdminClient topicAdminClient =
new TopicAdminClient(this.backendURL, sessionCookie);
topicAdminClient.removeTopic("authTopic10");
// user1 subscribe with same topic name where previously created, unsubscribe and deleted by admin
this.createPublishSubscribeAndUnsubscribeFromUser("authUser1", "authTopic10");
}
/**
* User1 is in Role1 where there is topic creating permissions.
* User5 is in Role2 where there are no create topic permissions.
* User1 creates a topic and then publishes and consumes messages.
* User5 tries to publish and consume messages. User5 fails.
*
* @throws IOException
* @throws UserAdminUserAdminException
* @throws XPathExpressionException
* @throws NamingException
* @throws JMSException
* @throws AndesClientException
* @throws AndesClientConfigurationException
*/
@Test(groups = {"wso2.mb", "topic"}, expectedExceptions = JMSException.class,
expectedExceptionsMessageRegExp = ".*Permission denied.*")
public void performTopicPermissionDifferentRoleUsersWithNoPermissions()
throws IOException, UserAdminUserAdminException, XPathExpressionException,
NamingException, JMSException, AndesClientException,
AndesClientConfigurationException {
this.createPublishAndSubscribeFromUser("authUser1", "authTopic4");
this.createPublishAndSubscribeFromUser("authUser5", "authTopic4");
}
/**
* User1 exists in a role where create topic permission is assigned.
* User1 creates a topic and then publishes and consumes messages.
* User1 is removed from the role.
* User1 tries to publish and consume messages. User1 fails.
*
* @throws AndesClientConfigurationException
* @throws NamingException
* @throws IOException
* @throws XPathExpressionException
* @throws AndesClientException
* @throws JMSException
* @throws UserAdminUserAdminException
*/
@Test(groups = {"wso2.mb", "topic"}, expectedExceptions = JMSException.class,
expectedExceptionsMessageRegExp = ".*Permission denied.*")
public void performTopicPermissionSameUserRemovedFromRole()
throws AndesClientConfigurationException, NamingException, IOException,
XPathExpressionException, AndesClientException, JMSException,
UserAdminUserAdminException {
this.createPublishAndSubscribeFromUser("authUser1", "authTopic5");
// Removing authUser1 from create_pub_sub_topic_role and Internal/T_authTopic5
userManagementClient.addRemoveRolesOfUser("authUser1", new String[]{NO_PERMISSION_TOPIC_ROLE},
new String[]{CREATE_PUB_SUB_TOPIC_ROLE, "Internal/T_authtopic5"});
log.info("Removing authUser1 from " + CREATE_PUB_SUB_TOPIC_ROLE + " and Internal/T_authtopic5");
this.createPublishAndSubscribeFromUser("authUser1", "authTopic5");
}
/**
* User1 and User2 exists in the same role where create topic permission is assigned.
* User1 creates a topic and then publishes and consumes messages.
* Admin assigns publishing and consuming permissions to the role in which User1 and User2 are in.
* User1 is removed from the role.
* User2 tries to publish and consume messages. User2 succeeds.
*
* @throws AndesClientConfigurationException
* @throws NamingException
* @throws IOException
* @throws XPathExpressionException
* @throws AndesClientException
* @throws JMSException
* @throws UserAdminUserAdminException
* @throws LoginAuthenticationExceptionException
* @throws AndesEventAdminServiceEventAdminException
* @throws XMLStreamException
* @throws LogoutAuthenticationExceptionException
* @throws URISyntaxException
* @throws SAXException
* @throws AndesAdminServiceBrokerManagerAdminException
*/
@Test(groups = {"wso2.mb", "topic"})
public void performTopicPermissionSameRoleAssignedPermissions()
throws AndesClientConfigurationException, NamingException, IOException,
XPathExpressionException, AndesClientException, JMSException,
UserAdminUserAdminException, LoginAuthenticationExceptionException,
AndesEventAdminServiceEventAdminException, XMLStreamException,
LogoutAuthenticationExceptionException, URISyntaxException, SAXException,
AndesAdminServiceBrokerManagerAdminException, AutomationUtilException {
this.createPublishAndSubscribeFromUser("authUser1", "authTopic6");
// Adding publish subscribe permissions of 'authTopic6' to 'create_pub_sub_topic_role' role.
TopicRolePermission topicRolePermission = new TopicRolePermission();
topicRolePermission.setRoleName(CREATE_PUB_SUB_TOPIC_ROLE);
topicRolePermission.setAllowedToSubscribe(true);
topicRolePermission.setAllowedToPublish(true);
updateTopicRoleConsumePublishPermission("authTopic6", topicRolePermission);
log.info("Consumer and publish permissions updated for " + CREATE_PUB_SUB_TOPIC_ROLE);
// Removing authUser1 from create_pub_sub_topic_role and Internal/T_authTopic6
userManagementClient
.addRemoveRolesOfUser("authUser1", new String[]{NO_PERMISSION_TOPIC_ROLE},
new String[]{CREATE_PUB_SUB_TOPIC_ROLE, "Internal/T_authtopic6"});
this.createPublishAndSubscribeFromUser("authUser2", "authTopic6");
}
/**
* User3 is in Role2 where there are no create topic permissions.
* Admin creates a topic and then publishes and consumes messages.
* Admin assigns publishing and consuming permissions to Role2.
* User3 tries to publish and consume messages. User3 succeeds.
*
* @throws AndesClientConfigurationException
* @throws NamingException
* @throws IOException
* @throws XPathExpressionException
* @throws AndesClientException
* @throws JMSException
* @throws UserAdminUserAdminException
* @throws LoginAuthenticationExceptionException
* @throws AndesEventAdminServiceEventAdminException
* @throws XMLStreamException
* @throws LogoutAuthenticationExceptionException
* @throws URISyntaxException
* @throws SAXException
* @throws AndesAdminServiceBrokerManagerAdminException
*/
@Test(groups = {"wso2.mb", "topic"})
public void performTopicPermissionDifferentRolesAssignedPermissions()
throws AndesClientConfigurationException, NamingException, IOException,
XPathExpressionException, AndesClientException, JMSException,
UserAdminUserAdminException, LoginAuthenticationExceptionException,
AndesEventAdminServiceEventAdminException, XMLStreamException,
LogoutAuthenticationExceptionException, URISyntaxException, SAXException,
AndesAdminServiceBrokerManagerAdminException, AutomationUtilException {
this.createPublishAndSubscribeFromUser("superAdmin", "authTopic7");
// Adding publish subscribe permissions of 'authTopic7' to 'pub_sub_topic_role' role.
TopicRolePermission topicRolePermission = new TopicRolePermission();
topicRolePermission.setRoleName(PUB_SUB_TOPIC_ROLE);
topicRolePermission.setAllowedToSubscribe(true);
topicRolePermission.setAllowedToPublish(true);
this.updateTopicRoleConsumePublishPermission("authTopic7", topicRolePermission);
log.info("Consumer and publish permissions updated for " + PUB_SUB_TOPIC_ROLE);
this.createPublishAndSubscribeFromUser("authUser3", "authTopic7");
}
/**
* User1 is in Role1 where there are create topic permissions.
* User3 is in Role2 where there are no create topic permissions.
* Admin creates a topic and then publishes and consumes messages.
* Admin assigns publishing and consuming permissions to Role2.
* User1 tries to publish and consume messages. User1 fails.
*
* @throws AndesClientConfigurationException
* @throws NamingException
* @throws IOException
* @throws XPathExpressionException
* @throws AndesClientException
* @throws JMSException
* @throws UserAdminUserAdminException
* @throws LoginAuthenticationExceptionException
* @throws AndesEventAdminServiceEventAdminException
* @throws XMLStreamException
* @throws LogoutAuthenticationExceptionException
* @throws URISyntaxException
* @throws SAXException
* @throws AndesAdminServiceBrokerManagerAdminException
*/
@Test(groups = {"wso2.mb", "topic"}, expectedExceptions = JMSException.class,
expectedExceptionsMessageRegExp = ".*Permission denied.*")
public void performTopicPermissionDifferentRolesNoPermissions()
throws AndesClientConfigurationException, NamingException, IOException,
XPathExpressionException, AndesClientException, JMSException,
UserAdminUserAdminException, LoginAuthenticationExceptionException,
AndesEventAdminServiceEventAdminException, XMLStreamException,
LogoutAuthenticationExceptionException, URISyntaxException, SAXException,
AndesAdminServiceBrokerManagerAdminException, AutomationUtilException {
this.createPublishAndSubscribeFromUser("superAdmin", "authTopic9");
// Adding publish subscribe permissions of 'authTopic9' to 'pub_sub_topic_role' role.
TopicRolePermission topicRolePermission = new TopicRolePermission();
topicRolePermission.setRoleName(PUB_SUB_TOPIC_ROLE);
topicRolePermission.setAllowedToSubscribe(true);
topicRolePermission.setAllowedToPublish(true);
this.updateTopicRoleConsumePublishPermission("authTopic9", topicRolePermission);
log.info("Consumer and publish permissions updated for " + PUB_SUB_TOPIC_ROLE);
this.createPublishAndSubscribeFromUser("authUser1", "authTopic9");
}
/**
* Deleting the topics that were created.
*
* @throws XPathExpressionException
* @throws LoginAuthenticationExceptionException
* @throws IOException
* @throws XMLStreamException
* @throws URISyntaxException
* @throws SAXException
* @throws AndesEventAdminServiceEventAdminException
* @throws LogoutAuthenticationExceptionException
*/
@AfterClass()
public void cleanUpTopics()
throws XPathExpressionException, LoginAuthenticationExceptionException, IOException,
XMLStreamException, URISyntaxException, SAXException,
AndesEventAdminServiceEventAdminException,
LogoutAuthenticationExceptionException, AutomationUtilException {
LoginLogoutClient loginLogoutClientForUser = new LoginLogoutClient(this.automationContext);
String sessionCookie = loginLogoutClientForUser.login();
TopicAdminClient topicAdminClient =
new TopicAdminClient(this.backendURL, sessionCookie);
topicAdminClient.removeTopic("authTopic1");
topicAdminClient.removeTopic("authTopic2");
topicAdminClient.removeTopic("authTopic3");
topicAdminClient.removeTopic("authTopic4");
topicAdminClient.removeTopic("authTopic5");
topicAdminClient.removeTopic("authTopic6");
topicAdminClient.removeTopic("authTopic7");
topicAdminClient.removeTopic("authTopic8");
topicAdminClient.removeTopic("authTopic9");
loginLogoutClientForUser.logout();
}
/**
* Runs a test case where a consumer and publisher is created and published with a given user
* key from the automation.xml.
*
* @param userKey The user key mentioned in the automation.xml for a specific user.
* @param destinationName The destination name of the topic.
* @throws XPathExpressionException
* @throws org.wso2.mb.integration.common.clients.exceptions.AndesClientConfigurationException
* @throws IOException
* @throws javax.jms.JMSException
* @throws org.wso2.mb.integration.common.clients.exceptions.AndesClientException
* @throws javax.naming.NamingException
*/
private void createPublishAndSubscribeFromUser(String userKey, String destinationName)
throws XPathExpressionException, AndesClientConfigurationException, IOException,
JMSException, AndesClientException, NamingException {
long sendCount = 10L;
long expectedCount = 10L;
AutomationContext userAutomationContext =
new AutomationContext("MB", "mb001", FrameworkConstants.SUPER_TENANT_KEY, userKey);
User contextUser = userAutomationContext.getContextTenant().getContextUser();
// Creating a consumer client configuration
AndesJMSConsumerClientConfiguration
consumerConfig =
new AndesJMSConsumerClientConfiguration( getAMQPPort(),
contextUser.getUserNameWithoutDomain(), contextUser.getPassword(),
ExchangeType.TOPIC, destinationName);
consumerConfig.setMaximumMessagesToReceived(expectedCount);
consumerConfig.setAsync(false);
// Creating a publisher client configuration
AndesJMSPublisherClientConfiguration publisherConfig =
new AndesJMSPublisherClientConfiguration( getAMQPPort(),
contextUser.getUserNameWithoutDomain(), contextUser.getPassword(),
ExchangeType.TOPIC, destinationName);
publisherConfig.setNumberOfMessagesToSend(sendCount);
// Creating clients
AndesClient consumerClient = new AndesClient(consumerConfig, true);
consumerClient.startClient();
AndesClient publisherClient = new AndesClient(publisherConfig, true);
publisherClient.startClient();
AndesClientUtils
.waitForMessagesAndShutdown(consumerClient, AndesClientConstants.DEFAULT_RUN_TIME);
// Evaluating
Assert.assertEquals(publisherClient.getSentMessageCount(), sendCount, "Message sending " +
"failed for user : " + contextUser.getUserNameWithoutDomain());
Assert.assertEquals(consumerClient.getReceivedMessageCount(), expectedCount, "Message " +
"receiving failed for user : " + contextUser.getUserNameWithoutDomain());
}
/**
* Runs a test case where a consumer and publisher is created and published with a given user
* key from the automation.xml. Subscriber get unsubscribe after receiving expected message count.
*
* @param userKey The user key mentioned in the automation.xml for a specific user.
* @param destinationName The destination name of the topic.
* @throws XPathExpressionException
* @throws org.wso2.mb.integration.common.clients.exceptions.AndesClientConfigurationException
* @throws IOException
* @throws javax.jms.JMSException
* @throws org.wso2.mb.integration.common.clients.exceptions.AndesClientException
* @throws javax.naming.NamingException
*/
private void createPublishSubscribeAndUnsubscribeFromUser(String userKey, String destinationName)
throws XPathExpressionException, AndesClientConfigurationException, IOException,
JMSException, AndesClientException, NamingException {
long sendCount = 10L;
long expectedCount = 10L;
AutomationContext userAutomationContext =
new AutomationContext("MB", "mb001", FrameworkConstants.SUPER_TENANT_KEY, userKey);
User contextUser = userAutomationContext.getContextTenant().getContextUser();
// Creating a consumer client configuration
AndesJMSConsumerClientConfiguration
consumerConfig =
new AndesJMSConsumerClientConfiguration( getAMQPPort(),
contextUser.getUserNameWithoutDomain(), contextUser.getPassword(),
ExchangeType.TOPIC, destinationName);
consumerConfig.setMaximumMessagesToReceived(expectedCount);
consumerConfig.setUnSubscribeAfterEachMessageCount(expectedCount);
consumerConfig.setAsync(false);
// Creating a publisher client configuration
AndesJMSPublisherClientConfiguration publisherConfig =
new AndesJMSPublisherClientConfiguration( getAMQPPort(),
contextUser.getUserNameWithoutDomain(), contextUser.getPassword(),
ExchangeType.TOPIC, destinationName);
publisherConfig.setNumberOfMessagesToSend(sendCount);
// Creating clients
AndesClient consumerClient = new AndesClient(consumerConfig, true);
consumerClient.startClient();
AndesClient publisherClient = new AndesClient(publisherConfig, true);
publisherClient.startClient();
AndesClientUtils
.waitForMessagesAndShutdown(consumerClient, AndesClientConstants.DEFAULT_RUN_TIME);
// Evaluating
Assert.assertEquals(publisherClient.getSentMessageCount(), sendCount, "Message sending " +
"failed for user : " + contextUser.getUserNameWithoutDomain());
Assert.assertEquals(consumerClient.getReceivedMessageCount(), expectedCount, "Message " +
"receiving failed for user : " + contextUser.getUserNameWithoutDomain());
}
/**
* Assigning consuming publishing permissions of a topic to a role.
*
* @param topicName The topic name
* @param permissions New permissions for the role. can be publish, consume.
* @throws XPathExpressionException
* @throws IOException
* @throws URISyntaxException
* @throws SAXException
* @throws XMLStreamException
* @throws LoginAuthenticationExceptionException
* @throws AndesAdminServiceBrokerManagerAdminException
* @throws LogoutAuthenticationExceptionException
* @throws UserAdminUserAdminException
*/
public void updateTopicRoleConsumePublishPermission(String topicName,
TopicRolePermission permissions)
throws XPathExpressionException, IOException, URISyntaxException, SAXException,
XMLStreamException, LoginAuthenticationExceptionException,
AndesAdminServiceBrokerManagerAdminException,
LogoutAuthenticationExceptionException,
UserAdminUserAdminException,
AndesEventAdminServiceEventAdminException, AutomationUtilException {
LoginLogoutClient loginLogoutClientForUser = new LoginLogoutClient(automationContext);
String sessionCookie = loginLogoutClientForUser.login();
TopicAdminClient topicAdminClient =
new TopicAdminClient(backendURL, sessionCookie);
topicAdminClient.updatePermissionForTopic(topicName, permissions);
loginLogoutClientForUser.logout();
}
}
| |
/**
*/
package uk.co.autotrader.tingle.model.v2.mingle;
import org.eclipse.emf.common.notify.Notification;
import org.eclipse.emf.ecore.EClass;
import org.eclipse.emf.ecore.EObject;
import org.eclipse.emf.ecore.impl.ENotificationImpl;
import org.eclipse.emf.ecore.impl.MinimalEObjectImpl;
/**
* <!-- begin-user-doc --> A representation of the model object '
* <em><b>Identifyable</b></em>'. <!-- end-user-doc -->
* <p>
* The following features are supported:
* <ul>
* <li>{@link uk.co.autotrader.tingle.model.v2.mingle.Identifyable#getId <em>Id
* </em>}</li>
* <li>{@link uk.co.autotrader.tingle.model.v2.mingle.Identifyable#getName <em>
* Name</em>}</li>
* </ul>
* </p>
*
* @generated
*/
public class Identifyable extends MinimalEObjectImpl.Container implements EObject
{
/**
* The default value of the '{@link #getId() <em>Id</em>}' attribute. <!--
* begin-user-doc --> <!-- end-user-doc -->
*
* @see #getId()
* @generated
* @ordered
*/
protected static final int ID_EDEFAULT = 0;
/**
* The default value of the '{@link #getName() <em>Name</em>}' attribute.
* <!-- begin-user-doc --> <!-- end-user-doc -->
*
* @see #getName()
* @generated
* @ordered
*/
protected static final String NAME_EDEFAULT = null;
/**
* The cached value of the '{@link #getId() <em>Id</em>}' attribute. <!--
* begin-user-doc --> <!-- end-user-doc -->
*
* @see #getId()
* @generated
* @ordered
*/
protected int id = ID_EDEFAULT;
/**
* The cached value of the '{@link #getName() <em>Name</em>}' attribute.
* <!-- begin-user-doc --> <!-- end-user-doc -->
*
* @see #getName()
* @generated
* @ordered
*/
protected String name = NAME_EDEFAULT;
/**
* <!-- begin-user-doc --> <!-- end-user-doc -->
*
* @generated
*/
protected Identifyable() {
super();
}
/**
* <!-- begin-user-doc --> <!-- end-user-doc -->
*
* @generated
*/
@Override
public Object eGet(final int featureID, final boolean resolve, final boolean coreType) {
switch (featureID) {
case MinglePackage.IDENTIFYABLE__ID:
return getId();
case MinglePackage.IDENTIFYABLE__NAME:
return getName();
}
return super.eGet(featureID, resolve, coreType);
}
/**
* <!-- begin-user-doc --> <!-- end-user-doc -->
*
* @generated
*/
@Override
public boolean eIsSet(final int featureID) {
switch (featureID) {
case MinglePackage.IDENTIFYABLE__ID:
return id != ID_EDEFAULT;
case MinglePackage.IDENTIFYABLE__NAME:
return NAME_EDEFAULT == null ? name != null : !NAME_EDEFAULT.equals(name);
}
return super.eIsSet(featureID);
}
/**
* <!-- begin-user-doc --> <!-- end-user-doc -->
*
* @generated
*/
@Override
public void eSet(final int featureID, final Object newValue) {
switch (featureID) {
case MinglePackage.IDENTIFYABLE__ID:
setId((Integer) newValue);
return;
case MinglePackage.IDENTIFYABLE__NAME:
setName((String) newValue);
return;
}
super.eSet(featureID, newValue);
}
/**
* <!-- begin-user-doc --> <!-- end-user-doc -->
*
* @generated
*/
@Override
protected EClass eStaticClass() {
return MinglePackage.Literals.IDENTIFYABLE;
}
/**
* <!-- begin-user-doc --> <!-- end-user-doc -->
*
* @generated
*/
@Override
public void eUnset(final int featureID) {
switch (featureID) {
case MinglePackage.IDENTIFYABLE__ID:
setId(ID_EDEFAULT);
return;
case MinglePackage.IDENTIFYABLE__NAME:
setName(NAME_EDEFAULT);
return;
}
super.eUnset(featureID);
}
/**
* Returns the value of the '<em><b>Id</b></em>' attribute. <!--
* begin-user-doc -->
* <p>
* If the meaning of the '<em>Id</em>' attribute isn't clear, there really
* should be more of a description here...
* </p>
* <!-- end-user-doc -->
*
* @return the value of the '<em>Id</em>' attribute.
* @see #setId(int)
* @generated
*/
public int getId() {
return id;
}
/**
* Returns the value of the '<em><b>Name</b></em>' attribute. <!--
* begin-user-doc -->
* <p>
* If the meaning of the '<em>Name</em>' attribute isn't clear, there really
* should be more of a description here...
* </p>
* <!-- end-user-doc -->
*
* @return the value of the '<em>Name</em>' attribute.
* @see #setName(String)
* @generated
*/
public String getName() {
return name;
}
/**
* Sets the value of the '
* {@link uk.co.autotrader.tingle.model.v2.mingle.Identifyable#getId
* <em>Id</em>}' attribute. <!-- begin-user-doc --> <!-- end-user-doc -->
*
* @param value
* the new value of the '<em>Id</em>' attribute.
* @see #getId()
* @generated
*/
public void setId(final int newId) {
final int oldId = id;
id = newId;
if (eNotificationRequired()) {
eNotify(new ENotificationImpl(this, Notification.SET, MinglePackage.IDENTIFYABLE__ID, oldId, id));
}
}
/**
* Sets the value of the '
* {@link uk.co.autotrader.tingle.model.v2.mingle.Identifyable#getName
* <em>Name</em>}' attribute. <!-- begin-user-doc --> <!-- end-user-doc -->
*
* @param value
* the new value of the '<em>Name</em>' attribute.
* @see #getName()
* @generated
*/
public void setName(final String newName) {
final String oldName = name;
name = newName;
if (eNotificationRequired()) {
eNotify(new ENotificationImpl(this, Notification.SET, MinglePackage.IDENTIFYABLE__NAME, oldName, name));
}
}
/**
* <!-- begin-user-doc --> <!-- end-user-doc -->
*
* @generated
*/
@Override
public String toString() {
if (eIsProxy()) {
return super.toString();
}
final StringBuffer result = new StringBuffer(super.toString());
result.append(" (id: ");
result.append(id);
result.append(", name: ");
result.append(name);
result.append(')');
return result.toString();
}
} // Identifyable
| |
package edu.berkeley.nlp.util;
import java.util.Arrays;
import edu.berkeley.nlp.math.ArrayMath;
public class ArrayUtil {
public static boolean[][] clone(boolean[][] a) {
boolean[][] res = new boolean[a.length][];
for (int i=0; i<a.length; i++){
if (a[i]!=null) res[i] = a[i].clone();
}
return res;
}
public static boolean[][][] clone(boolean[][][] a) {
boolean[][][] res = new boolean[a.length][][];
for (int i=0; i<a.length; i++){
if (a[i]!=null) res[i] = clone(a[i]);
}
return res;
}
public static boolean[][][][] clone(boolean[][][][] a) {
boolean[][][][] res = new boolean[a.length][][][];
for (int i=0; i<a.length; i++){
res[i] = clone(a[i]);
}
return res;
}
public static int[][] clone(int[][] a) {
int[][] res = new int[a.length][];
for (int i=0; i<a.length; i++){
if (a[i]!=null) res[i] = a[i].clone();
}
return res;
}
public static double[][] clone(double[][] a) {
double[][] res = new double[a.length][];
for (int i=0; i<a.length; i++){
if (a[i]!=null) res[i] = a[i].clone();
}
return res;
}
public static double[][][] clone(double[][][] a) {
double[][][] res = new double[a.length][][];
for (int i=0; i<a.length; i++){
if (a[i]!=null) res[i] = clone(a[i]);
}
return res;
}
public static double[][][][] clone(double[][][][] a) {
double[][][][] res = new double[a.length][][][];
for (int i=0; i<a.length; i++){
res[i] = clone(a[i]);
}
return res;
}
public static void fill(float[][] a, float val) {
for (int i=0; i<a.length; i++){
Arrays.fill(a[i],val);
}
}
public static void fill(float[][][] a, float val) {
for (int i=0; i<a.length; i++){
fill(a[i],val);
}
}
public static void fill(int[][] a, int val) {
for (int i=0; i<a.length; i++){
Arrays.fill(a[i],val);
}
}
public static void fill(int[][][] a, int val) {
for (int i=0; i<a.length; i++){
fill(a[i],val);
}
}
public static void fill(double[][] a, double val) {
for (int i=0; i<a.length; i++){
Arrays.fill(a[i],val);
}
}
public static void fill(double[][][] a, double val) {
for (int i=0; i<a.length; i++){
fill(a[i],val);
}
}
public static void fill(double[][][] a, int until, double val) {
for (int i=0; i<until; i++){
fill(a[i],val);
}
}
public static void fill(int[][][] a, int until, int val) {
for (int i=0; i<until; i++){
fill(a[i],val);
}
}
public static void fill(boolean[][] a, boolean val) {
for (int i=0; i<a.length; i++){
Arrays.fill(a[i],val);
}
}
public static String toString(float[][] a) {
String s = "[";
for (int i=0; i<a.length; i++){
s = s.concat(Arrays.toString(a[i])+", ");
}
return s + "]";
}
public static String toString(float[][][] a) {
String s = "[";
for (int i=0; i<a.length; i++){
s = s.concat(toString(a[i])+", ");
}
return s + "]";
}
public static String toString(double[][] a) {
String s = "[";
for (int i=0; i<a.length; i++){
s = s.concat(Arrays.toString(a[i])+", ");
}
return s + "]";
}
public static String toString(double[][][] a) {
String s = "[";
for (int i=0; i<a.length; i++){
s = s.concat(toString(a[i])+", ");
}
return s + "]";
}
public static String toString(boolean[][] a) {
String s = "[";
for (int i=0; i<a.length; i++){
s = s.concat(Arrays.toString(a[i])+", ");
}
return s + "]";
}
public static double[][] copyArray(double[][] a) {
if (a==null) return null;
double[][] res = new double[a.length][];
for (int i=0; i<a.length; i++){
if (a[i] == null) continue;
res[i] = a[i].clone();
}
return res;
}
public static void copyArray(double[][] a, double[][] res) {
for (int i=0; i<a.length; i++){
if (a[i] == null) continue;
for (int j = 0; j < a[i].length; ++j)
{
res[i][j] = a[i][j];
}
}
}
public static double[][][] copyArray(double[][][] a) {
if (a==null) return null;
double[][][] res = new double[a.length][][];
for (int i=0; i<a.length; i++){
res[i] = copyArray(a[i]);
}
return res;
}
public static void multiplyInPlace(double[][][] array, double d) {
for (int i=0; i<array.length; i++){
multiplyInPlace(array[i], d);
}
}
public static void multiplyInPlace(double[][] array, double d) {
for (int i=0; i<array.length; i++){
multiplyInPlace(array[i], d);
}
}
public static void multiplyInPlace(double[] array, double d) {
if (array==null) return;
for (int i=0; i<array.length; i++){
array[i] *= d;
}
}
public static void addInPlace(double[][][] a, double[][][] b) {
if (a == null || b == null)
return;
if (a.length != b.length)
return;
for (int i = 0; i < a.length; ++i) {
addInPlace(a[i], b[i]);
}
}
public static void addInPlace(double[][][][] a, double[][][][] b) {
if (a == null || b == null)
return;
if (a.length != b.length)
return;
for (int i = 0; i < a.length; ++i) {
addInPlace(a[i], b[i]);
}
}
public static void addInPlace(double[][] a, double[][] b) {
if (a == null || b == null)
return;
if (a.length != b.length)
return;
for (int i = 0; i < a.length; ++i) {
if (a[i] == null || b[i] == null)
continue;
ArrayMath.addInPlace(a[i], b[i]);
}
}
public static void subtractInPlace(double[][] a, double[][] b) {
if (a == null || b == null)
return;
if (a.length != b.length)
return;
for (int i = 0; i < a.length; ++i) {
if (a[i] == null || b[i] == null)
continue;
ArrayMath.subtractInPlace(a[i], b[i]);
}
}
public static double product(double[] a) {
double retVal = 1.0;
boolean hadZero = false;
for (double d : a) {
if (d != 0)
retVal *= d;
if (d == 0)
hadZero = true;
}
// if (hadZero) System.out.println("variance droppped to zero");
return retVal;
}
public static double[] inverse(double[] a) {
double[] retVal = new double[a.length];
for (int i = 0; i < a.length; ++i) {
retVal[i] = (a[i] == 0.0) ? 0 : // Double.POSITIVE_INFINITY :
1.0 / a[i];
}
return retVal;
}
static double[][] outerProduct(double[] a, double[] b) {
if (a.length != b.length) {
return null;
}
double[][] retVal = new double[a.length][a.length];
for (int i = 0; i < a.length; ++i) {
for (int j = 0; j < a.length; ++j) {
retVal[i][j] = a[i] * b[j];
}
}
return retVal;
}
/**
* @param sumSq
* @param d
* @return
*/
public static double[][] multiply(double[][] sumSq, double d) {
double[][] retVal = new double[sumSq.length][];
for (int i = 0; i < sumSq.length; ++i)
{
retVal[i] = new double[sumSq[i].length];
retVal[i] = ArrayMath.multiply(sumSq[i],d);
}
return retVal;
}
public static String toString(double[] array)
{
if (array == null) return "[null]";
StringBuffer s = new StringBuffer("");
s.append("[");
for (int i = 0; i < array.length; ++i)
{
if (i > 0) s.append(",");
s.append(array[i]);
}
s.append("]");
return s.toString();
}
}
| |
/*
* Copyright [1999-2014] Wellcome Trust Sanger Institute and the EMBL-European Bioinformatics Institute
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.ensembl.healthcheck.eg_gui;
import java.awt.BorderLayout;
import java.awt.Color;
import java.awt.Component;
import java.awt.Container;
import java.awt.Dimension;
import java.awt.FlowLayout;
import java.awt.Font;
import java.awt.Rectangle;
import java.awt.Toolkit;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.io.FileOutputStream;
import java.io.PrintWriter;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import javax.swing.ImageIcon;
import javax.swing.JButton;
import javax.swing.JFrame;
import javax.swing.JOptionPane;
import javax.swing.JPanel;
import javax.swing.JScrollPane;
import javax.swing.JTree;
import javax.swing.ToolTipManager;
import javax.swing.tree.DefaultMutableTreeNode;
import javax.swing.tree.DefaultTreeCellRenderer;
import org.ensembl.healthcheck.ReportLine;
import org.ensembl.healthcheck.ReportManager;
import org.ensembl.healthcheck.eg_gui.GuiTestResultWindowTab;
import org.ensembl.healthcheck.eg_gui.JLabelTreeCellRenderer;
import org.ensembl.healthcheck.eg_gui.ResultNode;
import org.ensembl.healthcheck.eg_gui.ResultTreePanel;
/**
* <p>
* This is mostly the same as org.ensembl.healthcheck.gui.GuiTestResultWindow
* with some minor changes so it can appear in the eg_gui for people who want
* to view the test results in the format to which they are used.
* </p>
*
* Display the results of a test run.
*/
public class GuiTestResultWindowTab extends JPanel {
private static final String OUTPUT_FILE = "GuiTestRunner.txt";
/**
* Create a new GuiTestResultWindow.
*
* @param gtrf The frame that opened this window.
*/
//public GuiTestResultWindow(GuiTestRunnerFrame gtrf) {
public GuiTestResultWindowTab(String outputLevelAsString, int outputLevel) {
//super("Healthcheck Results");
//Container contentPane = getContentPane();
this.setLayout(new BorderLayout());
JPanel topPanel = new JPanel(new BorderLayout());
topPanel.setBackground(Color.RED);
ResultTreePanel resultTreePanel = new ResultTreePanel(outputLevelAsString, outputLevel);
topPanel.add(resultTreePanel, BorderLayout.CENTER);
JPanel bottomPanel = new JPanel(new FlowLayout(FlowLayout.CENTER));
bottomPanel.setBackground(Color.WHITE);
//JButton saveButton = new JButton("Save", new ImageIcon(this.getClass().getResource("save.gif")));
JButton saveButton = new JButton("Save");
saveButton.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
try {
PrintWriter pw = new PrintWriter(new FileOutputStream(OUTPUT_FILE));
pw.write("---- RESULTS BY TEST CASE ----\n");
Map map = ReportManager.getAllReportsByTestCase();
Set keys = map.keySet();
Iterator it = keys.iterator();
while (it.hasNext()) {
String key = (String) it.next();
System.out.println("\n" + key);
List lines = (List) map.get(key);
Iterator it2 = lines.iterator();
while (it2.hasNext()) {
ReportLine reportLine = (ReportLine) it2.next();
pw.write(" " + reportLine.getDatabaseName() + ": " + reportLine.getMessage() + "\n");
} // while it2
} // while it
pw.close();
} catch (Exception ee) {
System.err.println("Error writing to " + OUTPUT_FILE);
ee.printStackTrace();
}
JOptionPane.showMessageDialog((Component) e.getSource(), "Results saved to " + OUTPUT_FILE);
}
});
// JButton closeButton = new JButton("Close", new ImageIcon(this.getClass().getResource("close.gif")));
// closeButton.addActionListener(new ActionListener() {
//
// public void actionPerformed(ActionEvent e) {
//
// dispose();
//
// }
// });
bottomPanel.add(saveButton);
// bottomPanel.add(closeButton);
this.add(topPanel, BorderLayout.CENTER);
this.add(bottomPanel, BorderLayout.SOUTH);
//pack();
// Centre on screen
Dimension screen = Toolkit.getDefaultToolkit().getScreenSize();
Rectangle frame = getBounds();
setLocation((screen.width - frame.width) / 2, (screen.height - frame.height) / 2);
}
// -------------------------------------------------------------------------
/**
* Command-line entry point.
*
* @param args The command-line arguments.
*/
public static void main(String[] args) {
GuiTestResultWindowTab gtrw = new GuiTestResultWindowTab("", 0);
//gtrw.pack();
gtrw.setVisible(true);
}
// -------------------------------------------------------------------------
} // GuiTestResultWindow
//-------------------------------------------------------------------------
/**
* A class that creates a panel (in a JScrollPane) containing tests, and provides methods for
* accessing the selected ones.
*/
class ResultTreePanel extends JScrollPane {
private JTree tree;
//public ResultTreePanel(GuiTestRunnerFrame gtrf) {
public ResultTreePanel(String outputLevelAsString, int outputLevel) {
JPanel panel = new JPanel();
panel.setLayout(new BorderLayout());
panel.setBackground(Color.GREEN);
String title = "Test Results - minimum output level: " + outputLevelAsString.toLowerCase();
DefaultMutableTreeNode top = new DefaultMutableTreeNode(new ResultNode(title, false, false, false, false));
Map reportsByTest = ReportManager.getAllReportsByTestCase(outputLevel);
Set tests = reportsByTest.keySet();
Iterator it = tests.iterator();
while (it.hasNext()) {
String test = (String) it.next();
ResultNode n1 = new ResultNode(test, true, false, false, ReportManager.allDatabasesPassed(test));
DefaultMutableTreeNode testNode = new DefaultMutableTreeNode(n1);
List reports = (ArrayList) reportsByTest.get(test);
Iterator it2 = reports.iterator();
String lastDB = "";
while (it2.hasNext()) {
ReportLine line = (ReportLine) it2.next();
String database = line.getDatabaseName();
if (!database.equals(lastDB)) {
ResultNode n2 = new ResultNode(database, false, true, false, ReportManager.databasePassed(test, database));
DefaultMutableTreeNode dbNode = new DefaultMutableTreeNode(n2);
testNode.add(dbNode);
String detail = htmlize(ReportManager.getReports(test, database));
ResultNode n3 = new ResultNode(detail, false, false, true, false);
DefaultMutableTreeNode detailNode = new DefaultMutableTreeNode(n3);
dbNode.add(detailNode);
}
lastDB = database;
} // while it2
top.add(testNode);
}
tree = new JTree(top);
//tree.setRootVisible(false);
ToolTipManager.sharedInstance().registerComponent(tree);
tree.setCellRenderer(new JLabelTreeCellRenderer());
tree.setRowHeight(0);
panel.add(tree);
setViewportView(panel);
// make window as wide as it needs to be plus a bit of padding, fixed height
setPreferredSize(new Dimension(getPreferredSize().width + 150, 500));
}
// -------------------------------------------------------------------------
private String htmlize(List reports) {
StringBuffer buf = new StringBuffer();
buf.append("<html>");
Iterator it = reports.iterator();
while (it.hasNext()) {
ReportLine line = (ReportLine) it.next();
buf.append(getFontForReport(line));
buf.append(line.getMessage());
buf.append("</font>");
buf.append("<br>");
}
buf.append("</html>");
return buf.toString();
}
//---------------------------------------------------------------------
private String getFontForReport(ReportLine line) {
String s1 = "";
switch (line.getLevel()) {
case (ReportLine.PROBLEM):
s1 = "<font color='red'>";
break;
case (ReportLine.WARNING):
s1 = "<font color='black'>";
break;
case (ReportLine.INFO):
s1 = "<font color='grey'>";
break;
case (ReportLine.CORRECT):
s1 = "<font color='green'>";
break;
default:
s1 = "<font color='black'>";
}
return s1;
}
// -------------------------------------------------------------------------
} // ResultTreePanel
// -------------------------------------------------------------------------
/**
* Custom cell renderer for a tree of JLabels.
*/
class JLabelTreeCellRenderer extends DefaultTreeCellRenderer {
// private ImageIcon smallCross = new ImageIcon(this.getClass().getResource("small_cross.gif"));
//
// private ImageIcon smallTick = new ImageIcon(this.getClass().getResource("small_tick.gif"));
//
// private ImageIcon listPass = new ImageIcon(this.getClass().getResource("list_pass.gif"));
//
// private ImageIcon listFail = new ImageIcon(this.getClass().getResource("list_fail.gif"));
private Color green = new Color(0, 192, 0);
private Color red = new Color(192, 0, 0);
public JLabelTreeCellRenderer() {
}
public Component getTreeCellRendererComponent(JTree tree, Object value, boolean selected, boolean expanded, boolean leaf,
int row, boolean hasFocus) {
super.getTreeCellRendererComponent(tree, value, selected, expanded, leaf, row, hasFocus);
DefaultMutableTreeNode dmtNode = (DefaultMutableTreeNode) value;
ResultNode node = (ResultNode) (dmtNode.getUserObject());
String defaultFontName = getFont().getName();
int defaultFontSize = getFont().getSize();
// defaults
setText(node.getText());
setIcon(null);
setForeground(Color.BLACK);
setBackground(Color.WHITE);
setFont(new Font(defaultFontName, Font.PLAIN, defaultFontSize));
setToolTipText(null);
// node is rendered differently depending on how its flags are set
if (node.isTestName()) {
setFont(new Font(defaultFontName, Font.BOLD, defaultFontSize));
if (node.passed()) {
setForeground(green);
//setIcon(listPass);
} else {
setForeground(red);
//setIcon(listFail);
}
int[] passesFails = ReportManager.countPassesAndFailsTest(node.getText());
setToolTipText(passesFails[0] + " databases passed, " + passesFails[1] + " databases failed");
} else if (node.isDatabaseName()) {
if (node.passed()) {
setForeground(green);
//setIcon(smallTick);
} else {
setForeground(red);
//setIcon(smallCross);
}
int[] passesFails = ReportManager.countPassesAndFailsDatabase(node.getText());
setToolTipText(node.getText() + " passed a total of " + passesFails[0] + " tests and failed a total of "
+ passesFails[1] + " tests");
} else {
// other nodes - e.g. root
setFont(new Font(defaultFontName, Font.BOLD, defaultFontSize));
int[] passesFails = ReportManager.countPassesAndFailsAll();
setToolTipText("A total of " + passesFails[0] + " individual tests passed and " + passesFails[1] + " failed");
}
return this;
}
}
// -------------------------------------------------------------------------
/**
* Class to store information about a node of the result tree; a node may represent a test, a
* database name, a database label, or nothing in particular.
*/
class ResultNode {
private String text;
private boolean isDatabaseName;
private boolean isTestName;
private boolean isDatabaseLabel;
private boolean passed;
public ResultNode(String text, boolean isTestName, boolean isDatabaseName, boolean isDatabaseLabel, boolean passed) {
this.text = text;
this.isTestName = isTestName;
this.isDatabaseName = isDatabaseName;
this.isDatabaseLabel = isDatabaseLabel;
this.passed = passed;
}
public boolean passed() {
return passed;
}
/**
* @return Returns the isDatabaseLabel.
*/
public boolean isDatabaseLabel() {
return isDatabaseLabel;
}
/**
* @return Returns the isDatabaseName.
*/
public boolean isDatabaseName() {
return isDatabaseName;
}
/**
* @return Returns the isTestName.
*/
public boolean isTestName() {
return isTestName;
}
/**
* @return Returns the text.
*/
public String getText() {
return text;
}
public String toString() {
return text;
}
}
// -------------------------------------------------------------------------
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.action.percolate;
import org.elasticsearch.action.ShardOperationFailedException;
import org.elasticsearch.action.support.broadcast.BroadcastOperationResponse;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Streamable;
import org.elasticsearch.common.text.Text;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentBuilderString;
import org.elasticsearch.percolator.PercolatorService;
import org.elasticsearch.rest.action.support.RestActions;
import org.elasticsearch.search.aggregations.InternalAggregations;
import org.elasticsearch.search.facet.InternalFacets;
import org.elasticsearch.search.highlight.HighlightField;
import java.io.IOException;
import java.util.*;
/**
*
*/
public class PercolateResponse extends BroadcastOperationResponse implements Iterable<PercolateResponse.Match>, ToXContent {
public static final Match[] EMPTY = new Match[0];
private long tookInMillis;
private Match[] matches;
private long count;
private InternalFacets facets;
private InternalAggregations aggregations;
public PercolateResponse(int totalShards, int successfulShards, int failedShards, List<ShardOperationFailedException> shardFailures,
Match[] matches, long count, long tookInMillis, InternalFacets facets, InternalAggregations aggregations) {
super(totalShards, successfulShards, failedShards, shardFailures);
this.tookInMillis = tookInMillis;
this.matches = matches;
this.count = count;
this.facets = facets;
this.aggregations = aggregations;
}
public PercolateResponse(int totalShards, int successfulShards, int failedShards, List<ShardOperationFailedException> shardFailures, long tookInMillis, Match[] matches) {
super(totalShards, successfulShards, failedShards, shardFailures);
this.tookInMillis = tookInMillis;
this.matches = matches;
}
PercolateResponse() {
}
public PercolateResponse(Match[] matches) {
this.matches = matches;
}
/**
* How long the percolate took.
*/
public TimeValue getTook() {
return new TimeValue(tookInMillis);
}
/**
* How long the percolate took in milliseconds.
*/
public long getTookInMillis() {
return tookInMillis;
}
/**
* @return The queries that match with the document being percolated. This can return <code>null</code> if th.
*/
public Match[] getMatches() {
return this.matches;
}
/**
* @return The total number of queries that have matched with the document being percolated.
*/
public long getCount() {
return count;
}
/**
* @return Any facet that has been executed on the query metadata. This can return <code>null</code>.
*/
public InternalFacets getFacets() {
return facets;
}
/**
* @return Any aggregations that has been executed on the query metadata. This can return <code>null</code>.
*/
public InternalAggregations getAggregations() {
return aggregations;
}
@Override
public Iterator<Match> iterator() {
return Arrays.asList(matches).iterator();
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
builder.field(Fields.TOOK, tookInMillis);
RestActions.buildBroadcastShardsHeader(builder, this);
builder.field(Fields.TOTAL, count);
if (matches != null) {
builder.startArray(Fields.MATCHES);
boolean justIds = "ids".equals(params.param("percolate_format"));
if (justIds) {
for (PercolateResponse.Match match : matches) {
builder.value(match.getId());
}
} else {
for (PercolateResponse.Match match : matches) {
builder.startObject();
builder.field(Fields._INDEX, match.getIndex());
builder.field(Fields._ID, match.getId());
float score = match.getScore();
if (score != PercolatorService.NO_SCORE) {
builder.field(Fields._SCORE, match.getScore());
}
if (match.getHighlightFields() != null) {
builder.startObject(Fields.HIGHLIGHT);
for (HighlightField field : match.getHighlightFields().values()) {
builder.field(field.name());
if (field.fragments() == null) {
builder.nullValue();
} else {
builder.startArray();
for (Text fragment : field.fragments()) {
builder.value(fragment);
}
builder.endArray();
}
}
builder.endObject();
}
builder.endObject();
}
}
builder.endArray();
}
if (facets != null) {
facets.toXContent(builder, params);
}
if (aggregations != null) {
aggregations.toXContent(builder, params);
}
builder.endObject();
return builder;
}
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
tookInMillis = in.readVLong();
count = in.readVLong();
int size = in.readVInt();
if (size != -1) {
matches = new Match[size];
for (int i = 0; i < size; i++) {
matches[i] = new Match();
matches[i].readFrom(in);
}
}
facets = InternalFacets.readOptionalFacets(in);
aggregations = InternalAggregations.readOptionalAggregations(in);
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
out.writeVLong(tookInMillis);
out.writeVLong(count);
if (matches == null) {
out.writeVInt(-1);
} else {
out.writeVInt(matches.length);
for (Match match : matches) {
match.writeTo(out);
}
}
out.writeOptionalStreamable(facets);
out.writeOptionalStreamable(aggregations);
}
public static class Match implements Streamable {
private Text index;
private Text id;
private float score;
private Map<String, HighlightField> hl;
public Match(Text index, Text id, float score, Map<String, HighlightField> hl) {
this.id = id;
this.score = score;
this.index = index;
this.hl = hl;
}
public Match(Text index, Text id, float score) {
this.id = id;
this.score = score;
this.index = index;
}
Match() {
}
public Text getIndex() {
return index;
}
public Text getId() {
return id;
}
public float getScore() {
return score;
}
public Map<String, HighlightField> getHighlightFields() {
return hl;
}
@Override
public void readFrom(StreamInput in) throws IOException {
id = in.readText();
index = in.readText();
score = in.readFloat();
int size = in.readVInt();
if (size > 0) {
hl = new HashMap<String, HighlightField>(size);
for (int j = 0; j < size; j++) {
hl.put(in.readString(), HighlightField.readHighlightField(in));
}
}
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeText(id);
out.writeText(index);
out.writeFloat(score);
if (hl != null) {
out.writeVInt(hl.size());
for (Map.Entry<String, HighlightField> entry : hl.entrySet()) {
out.writeString(entry.getKey());
entry.getValue().writeTo(out);
}
} else {
out.writeVInt(0);
}
}
}
static final class Fields {
static final XContentBuilderString TOOK = new XContentBuilderString("took");
static final XContentBuilderString TOTAL = new XContentBuilderString("total");
static final XContentBuilderString MATCHES = new XContentBuilderString("matches");
static final XContentBuilderString _INDEX = new XContentBuilderString("_index");
static final XContentBuilderString _ID = new XContentBuilderString("_id");
static final XContentBuilderString _SCORE = new XContentBuilderString("_score");
static final XContentBuilderString HIGHLIGHT = new XContentBuilderString("highlight");
}
}
| |
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/texttospeech/v1beta1/cloud_tts.proto
package com.google.cloud.texttospeech.v1beta1;
/**
* <pre>
* The top-level message sent by the client for the `SynthesizeSpeech` method.
* </pre>
*
* Protobuf type {@code google.cloud.texttospeech.v1beta1.SynthesizeSpeechRequest}
*/
public final class SynthesizeSpeechRequest extends
com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:google.cloud.texttospeech.v1beta1.SynthesizeSpeechRequest)
SynthesizeSpeechRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use SynthesizeSpeechRequest.newBuilder() to construct.
private SynthesizeSpeechRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private SynthesizeSpeechRequest() {
}
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private SynthesizeSpeechRequest(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownFieldProto3(
input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
case 10: {
com.google.cloud.texttospeech.v1beta1.SynthesisInput.Builder subBuilder = null;
if (input_ != null) {
subBuilder = input_.toBuilder();
}
input_ = input.readMessage(com.google.cloud.texttospeech.v1beta1.SynthesisInput.parser(), extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(input_);
input_ = subBuilder.buildPartial();
}
break;
}
case 18: {
com.google.cloud.texttospeech.v1beta1.VoiceSelectionParams.Builder subBuilder = null;
if (voice_ != null) {
subBuilder = voice_.toBuilder();
}
voice_ = input.readMessage(com.google.cloud.texttospeech.v1beta1.VoiceSelectionParams.parser(), extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(voice_);
voice_ = subBuilder.buildPartial();
}
break;
}
case 26: {
com.google.cloud.texttospeech.v1beta1.AudioConfig.Builder subBuilder = null;
if (audioConfig_ != null) {
subBuilder = audioConfig_.toBuilder();
}
audioConfig_ = input.readMessage(com.google.cloud.texttospeech.v1beta1.AudioConfig.parser(), extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(audioConfig_);
audioConfig_ = subBuilder.buildPartial();
}
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.cloud.texttospeech.v1beta1.TextToSpeechProto.internal_static_google_cloud_texttospeech_v1beta1_SynthesizeSpeechRequest_descriptor;
}
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.texttospeech.v1beta1.TextToSpeechProto.internal_static_google_cloud_texttospeech_v1beta1_SynthesizeSpeechRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.texttospeech.v1beta1.SynthesizeSpeechRequest.class, com.google.cloud.texttospeech.v1beta1.SynthesizeSpeechRequest.Builder.class);
}
public static final int INPUT_FIELD_NUMBER = 1;
private com.google.cloud.texttospeech.v1beta1.SynthesisInput input_;
/**
* <pre>
* Required. The Synthesizer requires either plain text or SSML as input.
* </pre>
*
* <code>.google.cloud.texttospeech.v1beta1.SynthesisInput input = 1;</code>
*/
public boolean hasInput() {
return input_ != null;
}
/**
* <pre>
* Required. The Synthesizer requires either plain text or SSML as input.
* </pre>
*
* <code>.google.cloud.texttospeech.v1beta1.SynthesisInput input = 1;</code>
*/
public com.google.cloud.texttospeech.v1beta1.SynthesisInput getInput() {
return input_ == null ? com.google.cloud.texttospeech.v1beta1.SynthesisInput.getDefaultInstance() : input_;
}
/**
* <pre>
* Required. The Synthesizer requires either plain text or SSML as input.
* </pre>
*
* <code>.google.cloud.texttospeech.v1beta1.SynthesisInput input = 1;</code>
*/
public com.google.cloud.texttospeech.v1beta1.SynthesisInputOrBuilder getInputOrBuilder() {
return getInput();
}
public static final int VOICE_FIELD_NUMBER = 2;
private com.google.cloud.texttospeech.v1beta1.VoiceSelectionParams voice_;
/**
* <pre>
* Required. The desired voice of the synthesized audio.
* </pre>
*
* <code>.google.cloud.texttospeech.v1beta1.VoiceSelectionParams voice = 2;</code>
*/
public boolean hasVoice() {
return voice_ != null;
}
/**
* <pre>
* Required. The desired voice of the synthesized audio.
* </pre>
*
* <code>.google.cloud.texttospeech.v1beta1.VoiceSelectionParams voice = 2;</code>
*/
public com.google.cloud.texttospeech.v1beta1.VoiceSelectionParams getVoice() {
return voice_ == null ? com.google.cloud.texttospeech.v1beta1.VoiceSelectionParams.getDefaultInstance() : voice_;
}
/**
* <pre>
* Required. The desired voice of the synthesized audio.
* </pre>
*
* <code>.google.cloud.texttospeech.v1beta1.VoiceSelectionParams voice = 2;</code>
*/
public com.google.cloud.texttospeech.v1beta1.VoiceSelectionParamsOrBuilder getVoiceOrBuilder() {
return getVoice();
}
public static final int AUDIO_CONFIG_FIELD_NUMBER = 3;
private com.google.cloud.texttospeech.v1beta1.AudioConfig audioConfig_;
/**
* <pre>
* Required. The configuration of the synthesized audio.
* </pre>
*
* <code>.google.cloud.texttospeech.v1beta1.AudioConfig audio_config = 3;</code>
*/
public boolean hasAudioConfig() {
return audioConfig_ != null;
}
/**
* <pre>
* Required. The configuration of the synthesized audio.
* </pre>
*
* <code>.google.cloud.texttospeech.v1beta1.AudioConfig audio_config = 3;</code>
*/
public com.google.cloud.texttospeech.v1beta1.AudioConfig getAudioConfig() {
return audioConfig_ == null ? com.google.cloud.texttospeech.v1beta1.AudioConfig.getDefaultInstance() : audioConfig_;
}
/**
* <pre>
* Required. The configuration of the synthesized audio.
* </pre>
*
* <code>.google.cloud.texttospeech.v1beta1.AudioConfig audio_config = 3;</code>
*/
public com.google.cloud.texttospeech.v1beta1.AudioConfigOrBuilder getAudioConfigOrBuilder() {
return getAudioConfig();
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (input_ != null) {
output.writeMessage(1, getInput());
}
if (voice_ != null) {
output.writeMessage(2, getVoice());
}
if (audioConfig_ != null) {
output.writeMessage(3, getAudioConfig());
}
unknownFields.writeTo(output);
}
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (input_ != null) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(1, getInput());
}
if (voice_ != null) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(2, getVoice());
}
if (audioConfig_ != null) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(3, getAudioConfig());
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.texttospeech.v1beta1.SynthesizeSpeechRequest)) {
return super.equals(obj);
}
com.google.cloud.texttospeech.v1beta1.SynthesizeSpeechRequest other = (com.google.cloud.texttospeech.v1beta1.SynthesizeSpeechRequest) obj;
boolean result = true;
result = result && (hasInput() == other.hasInput());
if (hasInput()) {
result = result && getInput()
.equals(other.getInput());
}
result = result && (hasVoice() == other.hasVoice());
if (hasVoice()) {
result = result && getVoice()
.equals(other.getVoice());
}
result = result && (hasAudioConfig() == other.hasAudioConfig());
if (hasAudioConfig()) {
result = result && getAudioConfig()
.equals(other.getAudioConfig());
}
result = result && unknownFields.equals(other.unknownFields);
return result;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasInput()) {
hash = (37 * hash) + INPUT_FIELD_NUMBER;
hash = (53 * hash) + getInput().hashCode();
}
if (hasVoice()) {
hash = (37 * hash) + VOICE_FIELD_NUMBER;
hash = (53 * hash) + getVoice().hashCode();
}
if (hasAudioConfig()) {
hash = (37 * hash) + AUDIO_CONFIG_FIELD_NUMBER;
hash = (53 * hash) + getAudioConfig().hashCode();
}
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.texttospeech.v1beta1.SynthesizeSpeechRequest parseFrom(
java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.texttospeech.v1beta1.SynthesizeSpeechRequest parseFrom(
java.nio.ByteBuffer data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.texttospeech.v1beta1.SynthesizeSpeechRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.texttospeech.v1beta1.SynthesizeSpeechRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.texttospeech.v1beta1.SynthesizeSpeechRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.texttospeech.v1beta1.SynthesizeSpeechRequest parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.texttospeech.v1beta1.SynthesizeSpeechRequest parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.cloud.texttospeech.v1beta1.SynthesizeSpeechRequest parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.cloud.texttospeech.v1beta1.SynthesizeSpeechRequest parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.texttospeech.v1beta1.SynthesizeSpeechRequest parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.cloud.texttospeech.v1beta1.SynthesizeSpeechRequest parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.cloud.texttospeech.v1beta1.SynthesizeSpeechRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.texttospeech.v1beta1.SynthesizeSpeechRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* <pre>
* The top-level message sent by the client for the `SynthesizeSpeech` method.
* </pre>
*
* Protobuf type {@code google.cloud.texttospeech.v1beta1.SynthesizeSpeechRequest}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:google.cloud.texttospeech.v1beta1.SynthesizeSpeechRequest)
com.google.cloud.texttospeech.v1beta1.SynthesizeSpeechRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.cloud.texttospeech.v1beta1.TextToSpeechProto.internal_static_google_cloud_texttospeech_v1beta1_SynthesizeSpeechRequest_descriptor;
}
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.texttospeech.v1beta1.TextToSpeechProto.internal_static_google_cloud_texttospeech_v1beta1_SynthesizeSpeechRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.texttospeech.v1beta1.SynthesizeSpeechRequest.class, com.google.cloud.texttospeech.v1beta1.SynthesizeSpeechRequest.Builder.class);
}
// Construct using com.google.cloud.texttospeech.v1beta1.SynthesizeSpeechRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
}
}
public Builder clear() {
super.clear();
if (inputBuilder_ == null) {
input_ = null;
} else {
input_ = null;
inputBuilder_ = null;
}
if (voiceBuilder_ == null) {
voice_ = null;
} else {
voice_ = null;
voiceBuilder_ = null;
}
if (audioConfigBuilder_ == null) {
audioConfig_ = null;
} else {
audioConfig_ = null;
audioConfigBuilder_ = null;
}
return this;
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return com.google.cloud.texttospeech.v1beta1.TextToSpeechProto.internal_static_google_cloud_texttospeech_v1beta1_SynthesizeSpeechRequest_descriptor;
}
public com.google.cloud.texttospeech.v1beta1.SynthesizeSpeechRequest getDefaultInstanceForType() {
return com.google.cloud.texttospeech.v1beta1.SynthesizeSpeechRequest.getDefaultInstance();
}
public com.google.cloud.texttospeech.v1beta1.SynthesizeSpeechRequest build() {
com.google.cloud.texttospeech.v1beta1.SynthesizeSpeechRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public com.google.cloud.texttospeech.v1beta1.SynthesizeSpeechRequest buildPartial() {
com.google.cloud.texttospeech.v1beta1.SynthesizeSpeechRequest result = new com.google.cloud.texttospeech.v1beta1.SynthesizeSpeechRequest(this);
if (inputBuilder_ == null) {
result.input_ = input_;
} else {
result.input_ = inputBuilder_.build();
}
if (voiceBuilder_ == null) {
result.voice_ = voice_;
} else {
result.voice_ = voiceBuilder_.build();
}
if (audioConfigBuilder_ == null) {
result.audioConfig_ = audioConfig_;
} else {
result.audioConfig_ = audioConfigBuilder_.build();
}
onBuilt();
return result;
}
public Builder clone() {
return (Builder) super.clone();
}
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return (Builder) super.setField(field, value);
}
public Builder clearField(
com.google.protobuf.Descriptors.FieldDescriptor field) {
return (Builder) super.clearField(field);
}
public Builder clearOneof(
com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return (Builder) super.clearOneof(oneof);
}
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return (Builder) super.setRepeatedField(field, index, value);
}
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return (Builder) super.addRepeatedField(field, value);
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.texttospeech.v1beta1.SynthesizeSpeechRequest) {
return mergeFrom((com.google.cloud.texttospeech.v1beta1.SynthesizeSpeechRequest)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.texttospeech.v1beta1.SynthesizeSpeechRequest other) {
if (other == com.google.cloud.texttospeech.v1beta1.SynthesizeSpeechRequest.getDefaultInstance()) return this;
if (other.hasInput()) {
mergeInput(other.getInput());
}
if (other.hasVoice()) {
mergeVoice(other.getVoice());
}
if (other.hasAudioConfig()) {
mergeAudioConfig(other.getAudioConfig());
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
public final boolean isInitialized() {
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.cloud.texttospeech.v1beta1.SynthesizeSpeechRequest parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (com.google.cloud.texttospeech.v1beta1.SynthesizeSpeechRequest) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private com.google.cloud.texttospeech.v1beta1.SynthesisInput input_ = null;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.texttospeech.v1beta1.SynthesisInput, com.google.cloud.texttospeech.v1beta1.SynthesisInput.Builder, com.google.cloud.texttospeech.v1beta1.SynthesisInputOrBuilder> inputBuilder_;
/**
* <pre>
* Required. The Synthesizer requires either plain text or SSML as input.
* </pre>
*
* <code>.google.cloud.texttospeech.v1beta1.SynthesisInput input = 1;</code>
*/
public boolean hasInput() {
return inputBuilder_ != null || input_ != null;
}
/**
* <pre>
* Required. The Synthesizer requires either plain text or SSML as input.
* </pre>
*
* <code>.google.cloud.texttospeech.v1beta1.SynthesisInput input = 1;</code>
*/
public com.google.cloud.texttospeech.v1beta1.SynthesisInput getInput() {
if (inputBuilder_ == null) {
return input_ == null ? com.google.cloud.texttospeech.v1beta1.SynthesisInput.getDefaultInstance() : input_;
} else {
return inputBuilder_.getMessage();
}
}
/**
* <pre>
* Required. The Synthesizer requires either plain text or SSML as input.
* </pre>
*
* <code>.google.cloud.texttospeech.v1beta1.SynthesisInput input = 1;</code>
*/
public Builder setInput(com.google.cloud.texttospeech.v1beta1.SynthesisInput value) {
if (inputBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
input_ = value;
onChanged();
} else {
inputBuilder_.setMessage(value);
}
return this;
}
/**
* <pre>
* Required. The Synthesizer requires either plain text or SSML as input.
* </pre>
*
* <code>.google.cloud.texttospeech.v1beta1.SynthesisInput input = 1;</code>
*/
public Builder setInput(
com.google.cloud.texttospeech.v1beta1.SynthesisInput.Builder builderForValue) {
if (inputBuilder_ == null) {
input_ = builderForValue.build();
onChanged();
} else {
inputBuilder_.setMessage(builderForValue.build());
}
return this;
}
/**
* <pre>
* Required. The Synthesizer requires either plain text or SSML as input.
* </pre>
*
* <code>.google.cloud.texttospeech.v1beta1.SynthesisInput input = 1;</code>
*/
public Builder mergeInput(com.google.cloud.texttospeech.v1beta1.SynthesisInput value) {
if (inputBuilder_ == null) {
if (input_ != null) {
input_ =
com.google.cloud.texttospeech.v1beta1.SynthesisInput.newBuilder(input_).mergeFrom(value).buildPartial();
} else {
input_ = value;
}
onChanged();
} else {
inputBuilder_.mergeFrom(value);
}
return this;
}
/**
* <pre>
* Required. The Synthesizer requires either plain text or SSML as input.
* </pre>
*
* <code>.google.cloud.texttospeech.v1beta1.SynthesisInput input = 1;</code>
*/
public Builder clearInput() {
if (inputBuilder_ == null) {
input_ = null;
onChanged();
} else {
input_ = null;
inputBuilder_ = null;
}
return this;
}
/**
* <pre>
* Required. The Synthesizer requires either plain text or SSML as input.
* </pre>
*
* <code>.google.cloud.texttospeech.v1beta1.SynthesisInput input = 1;</code>
*/
public com.google.cloud.texttospeech.v1beta1.SynthesisInput.Builder getInputBuilder() {
onChanged();
return getInputFieldBuilder().getBuilder();
}
/**
* <pre>
* Required. The Synthesizer requires either plain text or SSML as input.
* </pre>
*
* <code>.google.cloud.texttospeech.v1beta1.SynthesisInput input = 1;</code>
*/
public com.google.cloud.texttospeech.v1beta1.SynthesisInputOrBuilder getInputOrBuilder() {
if (inputBuilder_ != null) {
return inputBuilder_.getMessageOrBuilder();
} else {
return input_ == null ?
com.google.cloud.texttospeech.v1beta1.SynthesisInput.getDefaultInstance() : input_;
}
}
/**
* <pre>
* Required. The Synthesizer requires either plain text or SSML as input.
* </pre>
*
* <code>.google.cloud.texttospeech.v1beta1.SynthesisInput input = 1;</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.texttospeech.v1beta1.SynthesisInput, com.google.cloud.texttospeech.v1beta1.SynthesisInput.Builder, com.google.cloud.texttospeech.v1beta1.SynthesisInputOrBuilder>
getInputFieldBuilder() {
if (inputBuilder_ == null) {
inputBuilder_ = new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.texttospeech.v1beta1.SynthesisInput, com.google.cloud.texttospeech.v1beta1.SynthesisInput.Builder, com.google.cloud.texttospeech.v1beta1.SynthesisInputOrBuilder>(
getInput(),
getParentForChildren(),
isClean());
input_ = null;
}
return inputBuilder_;
}
private com.google.cloud.texttospeech.v1beta1.VoiceSelectionParams voice_ = null;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.texttospeech.v1beta1.VoiceSelectionParams, com.google.cloud.texttospeech.v1beta1.VoiceSelectionParams.Builder, com.google.cloud.texttospeech.v1beta1.VoiceSelectionParamsOrBuilder> voiceBuilder_;
/**
* <pre>
* Required. The desired voice of the synthesized audio.
* </pre>
*
* <code>.google.cloud.texttospeech.v1beta1.VoiceSelectionParams voice = 2;</code>
*/
public boolean hasVoice() {
return voiceBuilder_ != null || voice_ != null;
}
/**
* <pre>
* Required. The desired voice of the synthesized audio.
* </pre>
*
* <code>.google.cloud.texttospeech.v1beta1.VoiceSelectionParams voice = 2;</code>
*/
public com.google.cloud.texttospeech.v1beta1.VoiceSelectionParams getVoice() {
if (voiceBuilder_ == null) {
return voice_ == null ? com.google.cloud.texttospeech.v1beta1.VoiceSelectionParams.getDefaultInstance() : voice_;
} else {
return voiceBuilder_.getMessage();
}
}
/**
* <pre>
* Required. The desired voice of the synthesized audio.
* </pre>
*
* <code>.google.cloud.texttospeech.v1beta1.VoiceSelectionParams voice = 2;</code>
*/
public Builder setVoice(com.google.cloud.texttospeech.v1beta1.VoiceSelectionParams value) {
if (voiceBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
voice_ = value;
onChanged();
} else {
voiceBuilder_.setMessage(value);
}
return this;
}
/**
* <pre>
* Required. The desired voice of the synthesized audio.
* </pre>
*
* <code>.google.cloud.texttospeech.v1beta1.VoiceSelectionParams voice = 2;</code>
*/
public Builder setVoice(
com.google.cloud.texttospeech.v1beta1.VoiceSelectionParams.Builder builderForValue) {
if (voiceBuilder_ == null) {
voice_ = builderForValue.build();
onChanged();
} else {
voiceBuilder_.setMessage(builderForValue.build());
}
return this;
}
/**
* <pre>
* Required. The desired voice of the synthesized audio.
* </pre>
*
* <code>.google.cloud.texttospeech.v1beta1.VoiceSelectionParams voice = 2;</code>
*/
public Builder mergeVoice(com.google.cloud.texttospeech.v1beta1.VoiceSelectionParams value) {
if (voiceBuilder_ == null) {
if (voice_ != null) {
voice_ =
com.google.cloud.texttospeech.v1beta1.VoiceSelectionParams.newBuilder(voice_).mergeFrom(value).buildPartial();
} else {
voice_ = value;
}
onChanged();
} else {
voiceBuilder_.mergeFrom(value);
}
return this;
}
/**
* <pre>
* Required. The desired voice of the synthesized audio.
* </pre>
*
* <code>.google.cloud.texttospeech.v1beta1.VoiceSelectionParams voice = 2;</code>
*/
public Builder clearVoice() {
if (voiceBuilder_ == null) {
voice_ = null;
onChanged();
} else {
voice_ = null;
voiceBuilder_ = null;
}
return this;
}
/**
* <pre>
* Required. The desired voice of the synthesized audio.
* </pre>
*
* <code>.google.cloud.texttospeech.v1beta1.VoiceSelectionParams voice = 2;</code>
*/
public com.google.cloud.texttospeech.v1beta1.VoiceSelectionParams.Builder getVoiceBuilder() {
onChanged();
return getVoiceFieldBuilder().getBuilder();
}
/**
* <pre>
* Required. The desired voice of the synthesized audio.
* </pre>
*
* <code>.google.cloud.texttospeech.v1beta1.VoiceSelectionParams voice = 2;</code>
*/
public com.google.cloud.texttospeech.v1beta1.VoiceSelectionParamsOrBuilder getVoiceOrBuilder() {
if (voiceBuilder_ != null) {
return voiceBuilder_.getMessageOrBuilder();
} else {
return voice_ == null ?
com.google.cloud.texttospeech.v1beta1.VoiceSelectionParams.getDefaultInstance() : voice_;
}
}
/**
* <pre>
* Required. The desired voice of the synthesized audio.
* </pre>
*
* <code>.google.cloud.texttospeech.v1beta1.VoiceSelectionParams voice = 2;</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.texttospeech.v1beta1.VoiceSelectionParams, com.google.cloud.texttospeech.v1beta1.VoiceSelectionParams.Builder, com.google.cloud.texttospeech.v1beta1.VoiceSelectionParamsOrBuilder>
getVoiceFieldBuilder() {
if (voiceBuilder_ == null) {
voiceBuilder_ = new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.texttospeech.v1beta1.VoiceSelectionParams, com.google.cloud.texttospeech.v1beta1.VoiceSelectionParams.Builder, com.google.cloud.texttospeech.v1beta1.VoiceSelectionParamsOrBuilder>(
getVoice(),
getParentForChildren(),
isClean());
voice_ = null;
}
return voiceBuilder_;
}
private com.google.cloud.texttospeech.v1beta1.AudioConfig audioConfig_ = null;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.texttospeech.v1beta1.AudioConfig, com.google.cloud.texttospeech.v1beta1.AudioConfig.Builder, com.google.cloud.texttospeech.v1beta1.AudioConfigOrBuilder> audioConfigBuilder_;
/**
* <pre>
* Required. The configuration of the synthesized audio.
* </pre>
*
* <code>.google.cloud.texttospeech.v1beta1.AudioConfig audio_config = 3;</code>
*/
public boolean hasAudioConfig() {
return audioConfigBuilder_ != null || audioConfig_ != null;
}
/**
* <pre>
* Required. The configuration of the synthesized audio.
* </pre>
*
* <code>.google.cloud.texttospeech.v1beta1.AudioConfig audio_config = 3;</code>
*/
public com.google.cloud.texttospeech.v1beta1.AudioConfig getAudioConfig() {
if (audioConfigBuilder_ == null) {
return audioConfig_ == null ? com.google.cloud.texttospeech.v1beta1.AudioConfig.getDefaultInstance() : audioConfig_;
} else {
return audioConfigBuilder_.getMessage();
}
}
/**
* <pre>
* Required. The configuration of the synthesized audio.
* </pre>
*
* <code>.google.cloud.texttospeech.v1beta1.AudioConfig audio_config = 3;</code>
*/
public Builder setAudioConfig(com.google.cloud.texttospeech.v1beta1.AudioConfig value) {
if (audioConfigBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
audioConfig_ = value;
onChanged();
} else {
audioConfigBuilder_.setMessage(value);
}
return this;
}
/**
* <pre>
* Required. The configuration of the synthesized audio.
* </pre>
*
* <code>.google.cloud.texttospeech.v1beta1.AudioConfig audio_config = 3;</code>
*/
public Builder setAudioConfig(
com.google.cloud.texttospeech.v1beta1.AudioConfig.Builder builderForValue) {
if (audioConfigBuilder_ == null) {
audioConfig_ = builderForValue.build();
onChanged();
} else {
audioConfigBuilder_.setMessage(builderForValue.build());
}
return this;
}
/**
* <pre>
* Required. The configuration of the synthesized audio.
* </pre>
*
* <code>.google.cloud.texttospeech.v1beta1.AudioConfig audio_config = 3;</code>
*/
public Builder mergeAudioConfig(com.google.cloud.texttospeech.v1beta1.AudioConfig value) {
if (audioConfigBuilder_ == null) {
if (audioConfig_ != null) {
audioConfig_ =
com.google.cloud.texttospeech.v1beta1.AudioConfig.newBuilder(audioConfig_).mergeFrom(value).buildPartial();
} else {
audioConfig_ = value;
}
onChanged();
} else {
audioConfigBuilder_.mergeFrom(value);
}
return this;
}
/**
* <pre>
* Required. The configuration of the synthesized audio.
* </pre>
*
* <code>.google.cloud.texttospeech.v1beta1.AudioConfig audio_config = 3;</code>
*/
public Builder clearAudioConfig() {
if (audioConfigBuilder_ == null) {
audioConfig_ = null;
onChanged();
} else {
audioConfig_ = null;
audioConfigBuilder_ = null;
}
return this;
}
/**
* <pre>
* Required. The configuration of the synthesized audio.
* </pre>
*
* <code>.google.cloud.texttospeech.v1beta1.AudioConfig audio_config = 3;</code>
*/
public com.google.cloud.texttospeech.v1beta1.AudioConfig.Builder getAudioConfigBuilder() {
onChanged();
return getAudioConfigFieldBuilder().getBuilder();
}
/**
* <pre>
* Required. The configuration of the synthesized audio.
* </pre>
*
* <code>.google.cloud.texttospeech.v1beta1.AudioConfig audio_config = 3;</code>
*/
public com.google.cloud.texttospeech.v1beta1.AudioConfigOrBuilder getAudioConfigOrBuilder() {
if (audioConfigBuilder_ != null) {
return audioConfigBuilder_.getMessageOrBuilder();
} else {
return audioConfig_ == null ?
com.google.cloud.texttospeech.v1beta1.AudioConfig.getDefaultInstance() : audioConfig_;
}
}
/**
* <pre>
* Required. The configuration of the synthesized audio.
* </pre>
*
* <code>.google.cloud.texttospeech.v1beta1.AudioConfig audio_config = 3;</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.texttospeech.v1beta1.AudioConfig, com.google.cloud.texttospeech.v1beta1.AudioConfig.Builder, com.google.cloud.texttospeech.v1beta1.AudioConfigOrBuilder>
getAudioConfigFieldBuilder() {
if (audioConfigBuilder_ == null) {
audioConfigBuilder_ = new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.texttospeech.v1beta1.AudioConfig, com.google.cloud.texttospeech.v1beta1.AudioConfig.Builder, com.google.cloud.texttospeech.v1beta1.AudioConfigOrBuilder>(
getAudioConfig(),
getParentForChildren(),
isClean());
audioConfig_ = null;
}
return audioConfigBuilder_;
}
public final Builder setUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFieldsProto3(unknownFields);
}
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.texttospeech.v1beta1.SynthesizeSpeechRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.texttospeech.v1beta1.SynthesizeSpeechRequest)
private static final com.google.cloud.texttospeech.v1beta1.SynthesizeSpeechRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.texttospeech.v1beta1.SynthesizeSpeechRequest();
}
public static com.google.cloud.texttospeech.v1beta1.SynthesizeSpeechRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<SynthesizeSpeechRequest>
PARSER = new com.google.protobuf.AbstractParser<SynthesizeSpeechRequest>() {
public SynthesizeSpeechRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new SynthesizeSpeechRequest(input, extensionRegistry);
}
};
public static com.google.protobuf.Parser<SynthesizeSpeechRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<SynthesizeSpeechRequest> getParserForType() {
return PARSER;
}
public com.google.cloud.texttospeech.v1beta1.SynthesizeSpeechRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
| |
package net.hockeyapp.android;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.Date;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import android.app.Activity;
import android.content.pm.PackageManager;
import android.content.pm.PackageManager.NameNotFoundException;
import android.graphics.Color;
import android.util.TypedValue;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.webkit.WebView;
import android.widget.BaseAdapter;
import android.widget.ListView;
import android.widget.RelativeLayout;
import android.widget.TextView;
public class UpdateInfoAdapter extends BaseAdapter {
Activity activity;
JSONObject newest;
ArrayList<JSONObject> sortedVersions;
public UpdateInfoAdapter(Activity activity, String infoJSON) {
super();
this.activity = activity;
loadVersions(infoJSON);
sortVersions();
}
private void loadVersions(String infoJSON) {
this.newest = new JSONObject();
try {
JSONArray versions = new JSONArray(infoJSON);
this.sortedVersions = new ArrayList<JSONObject>();
int versionCode = activity.getPackageManager().getPackageInfo(activity.getPackageName(), PackageManager.GET_META_DATA).versionCode;
for (int index = 0; index < versions.length(); index++) {
JSONObject entry = versions.getJSONObject(index);
if (entry.getInt("version") > versionCode) {
newest = entry;
versionCode = entry.getInt("version");
}
sortedVersions.add(entry);
}
}
catch (JSONException e) {
}
catch (NameNotFoundException e) {
}
}
private void sortVersions() {
Collections.sort(sortedVersions, new Comparator<JSONObject>() {
@Override
public int compare(JSONObject object1, JSONObject object2) {
try {
if (object1.getInt("version") > object2.getInt("version")) {
return 0;
}
}
catch (JSONException e) {
}
return 0;
}
});
}
public int getCount() {
return 2 * sortedVersions.size();
}
public Object getItem(int position) {
int currentVersionCode = -1;
try {
currentVersionCode = activity.getPackageManager().getPackageInfo(activity.getPackageName(), PackageManager.GET_META_DATA).versionCode;
}
catch (NameNotFoundException e) {
}
JSONObject version = sortedVersions.get(position / 2);
int versionCode = 0;
String versionName= "";
try {
versionCode = version.getInt("version");
versionName = version.getString("shortversion");
}
catch (JSONException e) {
}
String item = null;
switch (position % 2) {
case 0:
item = (position == 0 ? "Release Notes:" : "Version " + versionName + " (" + versionCode + "): " + (versionCode == currentVersionCode ? "[INSTALLED]" : ""));
break;
case 1:
item = failSafeGetStringFromJSON(version, "notes", "");
break;
case 2:
}
return item;
}
public String getVersionString() {
return failSafeGetStringFromJSON(newest, "shortversion", "") + " (" + failSafeGetStringFromJSON(newest, "version", "") + ")";
}
public String getFileInfoString() {
int appSize = failSafeGetIntFromJSON(newest, "appsize", 0);
long timestamp = failSafeGetIntFromJSON(newest, "timestamp", 0);
Date date = new Date(timestamp * 1000);
SimpleDateFormat dateFormat = new SimpleDateFormat("dd.MM.yyyy");
return dateFormat.format(date) + " - " + String.format("%.2f", appSize / 1024F / 1024F) + " MB";
}
private static String failSafeGetStringFromJSON(JSONObject json, String name, String defaultValue) {
try {
return json.getString(name);
}
catch (JSONException e) {
return defaultValue;
}
}
private static int failSafeGetIntFromJSON(JSONObject json, String name, int defaultValue) {
try {
return json.getInt(name);
}
catch (JSONException e) {
return defaultValue;
}
}
public long getItemId(int position) {
return new Integer(position).hashCode();
}
public View getView(int position, View convertView, ViewGroup parent) {
switch (position % 2) {
case 0:
return getSimpleView(position, convertView, parent);
case 1:
return getWebView(position, convertView, parent);
default:
return null;
}
}
private View getSimpleView(int position, View convertView, ViewGroup parent) {
View row = convertView;
if (!(row instanceof TextView)) {
LayoutInflater inflater = activity.getLayoutInflater();
row = inflater.inflate(android.R.layout.simple_list_item_1, parent, false);
}
String item = (String)getItem(position);
TextView textView = (TextView)row.findViewById(android.R.id.text1);
float scale = activity.getResources().getDisplayMetrics().density;
boolean leftPadding = (parent.getTag().equals("right"));
boolean topPadding = (position == 0);
textView.setPadding((int)(20 * scale) * (leftPadding ? 2 : 1), (int)(20 * scale) * (!leftPadding && topPadding ? 1 : 0), (int)(20 * scale), 0);
textView.setText(item);
textView.setTextColor(Color.BLACK);
textView.setTextSize(TypedValue.COMPLEX_UNIT_SP, 16);
return row;
}
private View getWebView(int position, View convertView, ViewGroup parent) {
View row = convertView;
if ((row == null) || (row.findViewById(1337) == null)) {
RelativeLayout layout = new RelativeLayout(activity);
layout.setLayoutParams(new ListView.LayoutParams(ListView.LayoutParams.FILL_PARENT, ListView.LayoutParams.WRAP_CONTENT));
row = layout;
WebView webView = new WebView(activity);
webView.setId(1337);
RelativeLayout.LayoutParams params = new RelativeLayout.LayoutParams(RelativeLayout.LayoutParams.FILL_PARENT, RelativeLayout.LayoutParams.WRAP_CONTENT);
float scale = activity.getResources().getDisplayMetrics().density;
boolean leftPadding = (parent.getTag().equals("right"));
params.setMargins((int)(20 * scale) * (leftPadding ? 2 : 1), (int)(0 * scale), (int)(20 * scale), 0);
webView.setLayoutParams(params);
layout.addView(webView);
}
WebView webView = (WebView)row.findViewById(1337);
String item = (String)getItem(position);
if (item.trim().length() == 0) {
webView.loadData("<em>No information.</em>", "text/html", "utf-8");
}
else {
webView.loadData(item, "text/html", "utf-8");
}
return row;
}
}
| |
/*
* Copyright 2013-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.facebook.buck.ocaml;
import com.facebook.buck.cxx.CxxHeaders;
import com.facebook.buck.cxx.CxxPreprocessorInput;
import com.facebook.buck.cxx.CxxSource;
import com.facebook.buck.cxx.platform.NativeLinkableInput;
import com.facebook.buck.cxx.platform.Preprocessor;
import com.facebook.buck.io.ProjectFilesystem;
import com.facebook.buck.model.BuildTarget;
import com.facebook.buck.model.BuildTargets;
import com.facebook.buck.model.UnflavoredBuildTarget;
import com.facebook.buck.rules.BuildRule;
import com.facebook.buck.rules.PathSourcePath;
import com.facebook.buck.rules.RuleKeyAppendable;
import com.facebook.buck.rules.RuleKeyObjectSink;
import com.facebook.buck.rules.SourcePath;
import com.facebook.buck.rules.SourcePathResolver;
import com.facebook.buck.rules.Tool;
import com.facebook.buck.rules.args.Arg;
import com.facebook.buck.rules.args.StringArg;
import com.facebook.buck.util.MoreIterables;
import com.facebook.buck.util.immutables.BuckStyleImmutable;
import com.google.common.collect.FluentIterable;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.ImmutableSortedSet;
import com.google.common.collect.Iterables;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.List;
import java.util.Optional;
import org.immutables.value.Value;
/**
* OCaml build context
*
* <p>OCaml has two build modes, "native" (ocamlopt) and "bytecode" (ocamlc), and that terminology
* is used throughout this file -- not to be confused with the "native" terminology used in
* com.facebook.buck.cxx.platform.NativeLinkableInput.
*/
@Value.Immutable
@BuckStyleImmutable
abstract class AbstractOcamlBuildContext implements RuleKeyAppendable {
static final String OCAML_COMPILED_BYTECODE_DIR = "bc";
static final String OCAML_COMPILED_DIR = "opt";
private static final String OCAML_GENERATED_SOURCE_DIR = "gen";
static final Path DEFAULT_OCAML_INTEROP_INCLUDE_DIR = Paths.get("/usr/local/lib/ocaml");
public abstract UnflavoredBuildTarget getBuildTarget();
public abstract ProjectFilesystem getProjectFilesystem();
public abstract SourcePathResolver getSourcePathResolver();
public abstract boolean isLibrary();
public abstract List<Arg> getFlags();
public abstract List<SourcePath> getInput();
public abstract List<String> getNativeIncludes();
public abstract List<String> getBytecodeIncludes();
/** Inputs for the native (ocamlopt) build */
public abstract NativeLinkableInput getNativeLinkableInput();
/** Inputs for the bytecode (ocamlc) build */
public abstract NativeLinkableInput getBytecodeLinkableInput();
/** Inputs for the C compiler (both builds) */
public abstract NativeLinkableInput getCLinkableInput();
public abstract List<OcamlLibrary> getOcamlInput();
public abstract CxxPreprocessorInput getCxxPreprocessorInput();
public abstract ImmutableSortedSet<BuildRule> getNativeCompileDeps();
public abstract ImmutableSortedSet<BuildRule> getBytecodeCompileDeps();
public abstract ImmutableSortedSet<BuildRule> getBytecodeLinkDeps();
public abstract Optional<Tool> getOcamlDepTool();
public abstract Optional<Tool> getOcamlCompiler();
public abstract Optional<Tool> getOcamlDebug();
public abstract Optional<Tool> getYaccCompiler();
public abstract Optional<Tool> getLexCompiler();
public abstract Optional<Tool> getOcamlBytecodeCompiler();
protected abstract List<String> getCFlags();
protected abstract Optional<String> getOcamlInteropIncludesDir();
protected abstract List<String> getLdFlags();
protected abstract Preprocessor getCPreprocessor();
public ImmutableList<SourcePath> getCInput() {
return FluentIterable.from(getInput())
.filter(OcamlUtil.sourcePathExt(getSourcePathResolver(), OcamlCompilables.OCAML_C))
.toSet()
.asList();
}
public ImmutableList<SourcePath> getLexInput() {
return FluentIterable.from(getInput())
.filter(OcamlUtil.sourcePathExt(getSourcePathResolver(), OcamlCompilables.OCAML_MLL))
.toSet()
.asList();
}
public ImmutableList<SourcePath> getYaccInput() {
return FluentIterable.from(getInput())
.filter(OcamlUtil.sourcePathExt(getSourcePathResolver(), OcamlCompilables.OCAML_MLY))
.toSet()
.asList();
}
public ImmutableList<SourcePath> getMLInput() {
return FluentIterable.from(getInput())
.filter(
OcamlUtil.sourcePathExt(
getSourcePathResolver(),
OcamlCompilables.OCAML_ML,
OcamlCompilables.OCAML_RE,
OcamlCompilables.OCAML_MLI,
OcamlCompilables.OCAML_REI))
.append(getLexOutput(getLexInput()))
.append(getYaccOutput(getYaccInput()))
.toSet()
.asList();
}
private static Path getArchiveNativeOutputPath(
UnflavoredBuildTarget target, ProjectFilesystem filesystem) {
return BuildTargets.getGenPath(
filesystem,
BuildTarget.of(target),
"%s/lib" + target.getShortName() + OcamlCompilables.OCAML_CMXA);
}
private static Path getArchiveBytecodeOutputPath(
UnflavoredBuildTarget target, ProjectFilesystem filesystem) {
return BuildTargets.getGenPath(
filesystem,
BuildTarget.of(target),
"%s/lib" + target.getShortName() + OcamlCompilables.OCAML_CMA);
}
public Path getNativeOutput() {
return getNativeOutputPath(getBuildTarget(), getProjectFilesystem(), isLibrary());
}
public Path getNativePluginOutput() {
UnflavoredBuildTarget target = getBuildTarget();
return BuildTargets.getGenPath(
getProjectFilesystem(),
BuildTarget.of(target),
"%s/lib" + target.getShortName() + OcamlCompilables.OCAML_CMXS);
}
public static Path getNativeOutputPath(
UnflavoredBuildTarget target, ProjectFilesystem filesystem, boolean isLibrary) {
if (isLibrary) {
return getArchiveNativeOutputPath(target, filesystem);
} else {
return BuildTargets.getScratchPath(
filesystem, BuildTarget.of(target), "%s/" + target.getShortName() + ".opt");
}
}
public Path getBytecodeOutput() {
return getBytecodeOutputPath(getBuildTarget(), getProjectFilesystem(), isLibrary());
}
public static Path getBytecodeOutputPath(
UnflavoredBuildTarget target, ProjectFilesystem filesystem, boolean isLibrary) {
if (isLibrary) {
return getArchiveBytecodeOutputPath(target, filesystem);
} else {
return BuildTargets.getScratchPath(
filesystem, BuildTarget.of(target), "%s/" + target.getShortName());
}
}
public Path getGeneratedSourceDir() {
return getNativeOutput().getParent().resolve(OCAML_GENERATED_SOURCE_DIR);
}
public Path getCompileNativeOutputDir() {
return getCompileNativeOutputDir(getBuildTarget(), getProjectFilesystem(), isLibrary());
}
public static Path getCompileNativeOutputDir(
UnflavoredBuildTarget buildTarget, ProjectFilesystem filesystem, boolean isLibrary) {
return getNativeOutputPath(buildTarget, filesystem, isLibrary)
.getParent()
.resolve(OCAML_COMPILED_DIR);
}
public Path getCompileBytecodeOutputDir() {
return getNativeOutput().getParent().resolve(OCAML_COMPILED_BYTECODE_DIR);
}
public Path getCOutput(Path cSrc) {
String inputFileName = cSrc.getFileName().toString();
String outputFileName =
inputFileName.replaceFirst(OcamlCompilables.OCAML_C_REGEX, OcamlCompilables.OCAML_O);
return getCompileNativeOutputDir().resolve(outputFileName);
}
public ImmutableList<String> getIncludeDirectories(boolean isBytecode, boolean excludeDeps) {
ImmutableSet.Builder<String> includeDirs = ImmutableSet.builder();
for (SourcePath mlFile : getMLInput()) {
Path parent = getSourcePathResolver().getAbsolutePath(mlFile).getParent();
if (parent != null) {
includeDirs.add(parent.toString());
}
}
if (!excludeDeps) {
includeDirs.addAll(isBytecode ? this.getBytecodeIncludes() : this.getNativeIncludes());
}
return ImmutableList.copyOf(includeDirs.build());
}
public ImmutableList<String> getIncludeFlags(boolean isBytecode, boolean excludeDeps) {
return ImmutableList.copyOf(
MoreIterables.zipAndConcat(
Iterables.cycle(OcamlCompilables.OCAML_INCLUDE_FLAG),
getIncludeDirectories(isBytecode, excludeDeps)));
}
public ImmutableList<String> getBytecodeIncludeFlags() {
return ImmutableList.copyOf(
MoreIterables.zipAndConcat(
Iterables.cycle(OcamlCompilables.OCAML_INCLUDE_FLAG), getBytecodeIncludeDirectories()));
}
public ImmutableList<String> getBytecodeIncludeDirectories() {
ImmutableList.Builder<String> includesBuilder = ImmutableList.builder();
includesBuilder.addAll(getIncludeDirectories(true, /* excludeDeps */ true));
includesBuilder.add(getCompileBytecodeOutputDir().toString());
return includesBuilder.build();
}
protected FluentIterable<SourcePath> getLexOutput(Iterable<SourcePath> lexInputs) {
return FluentIterable.from(lexInputs)
.transform(
lexInput -> {
Path fileName = getSourcePathResolver().getAbsolutePath(lexInput).getFileName();
Path out =
getGeneratedSourceDir()
.resolve(
fileName
.toString()
.replaceFirst(
OcamlCompilables.OCAML_MLL_REGEX, OcamlCompilables.OCAML_ML));
return new PathSourcePath(getProjectFilesystem(), out);
});
}
protected FluentIterable<SourcePath> getYaccOutput(Iterable<SourcePath> yaccInputs) {
return FluentIterable.from(yaccInputs)
.transformAndConcat(
yaccInput -> {
String yaccFileName =
getSourcePathResolver().getAbsolutePath(yaccInput).getFileName().toString();
ImmutableList.Builder<SourcePath> toReturn = ImmutableList.builder();
toReturn.add(
new PathSourcePath(
getProjectFilesystem(),
getGeneratedSourceDir()
.resolve(
yaccFileName.replaceFirst(
OcamlCompilables.OCAML_MLY_REGEX, OcamlCompilables.OCAML_ML))));
toReturn.add(
new PathSourcePath(
getProjectFilesystem(),
getGeneratedSourceDir()
.resolve(
yaccFileName.replaceFirst(
OcamlCompilables.OCAML_MLY_REGEX, OcamlCompilables.OCAML_MLI))));
return toReturn.build();
});
}
@Override
public void appendToRuleKey(RuleKeyObjectSink sink) {
sink.setReflectively("flags", getFlags())
.setReflectively("input", getInput())
.setReflectively("lexCompiler", getLexCompiler())
.setReflectively("ocamlBytecodeCompiler", getOcamlBytecodeCompiler())
.setReflectively("ocamlCompiler", getOcamlCompiler())
.setReflectively("ocamlDebug", getOcamlDebug())
.setReflectively("ocamlDepTool", getOcamlDepTool())
.setReflectively("yaccCompiler", getYaccCompiler());
}
public ImmutableList<Arg> getCCompileFlags() {
ImmutableList.Builder<Arg> compileFlags = ImmutableList.builder();
CxxPreprocessorInput cxxPreprocessorInput = getCxxPreprocessorInput();
compileFlags.addAll(
StringArg.from(
CxxHeaders.getArgs(
cxxPreprocessorInput.getIncludes(),
getSourcePathResolver(),
Optional.empty(),
getCPreprocessor())));
for (Arg cFlag : cxxPreprocessorInput.getPreprocessorFlags().get(CxxSource.Type.C)) {
compileFlags.add(cFlag);
}
return compileFlags.build();
}
private static ImmutableList<String> addPrefix(String prefix, Iterable<String> flags) {
return ImmutableList.copyOf(MoreIterables.zipAndConcat(Iterables.cycle(prefix), flags));
}
public ImmutableList<String> getCommonCFlags() {
ImmutableList.Builder<String> builder = ImmutableList.builder();
builder.addAll(getCFlags());
builder.add(
"-isystem"
+ getOcamlInteropIncludesDir().orElse(DEFAULT_OCAML_INTEROP_INCLUDE_DIR.toString()));
return builder.build();
}
public ImmutableList<String> getCommonCLinkerFlags() {
return addPrefix("-ccopt", getLdFlags());
}
public static OcamlBuildContext.Builder builder(OcamlBuckConfig config) {
return OcamlBuildContext.builder()
.setOcamlDepTool(config.getOcamlDepTool())
.setOcamlCompiler(config.getOcamlCompiler())
.setOcamlDebug(config.getOcamlDebug())
.setYaccCompiler(config.getYaccCompiler())
.setLexCompiler(config.getLexCompiler())
.setOcamlBytecodeCompiler(config.getOcamlBytecodeCompiler())
.setOcamlInteropIncludesDir(config.getOcamlInteropIncludesDir())
.setCFlags(config.getCFlags())
.setLdFlags(config.getLdFlags());
}
}
| |
// Copyright 2010-2015, Google Inc.
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following disclaimer
// in the documentation and/or other materials provided with the
// distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived from
// this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
package org.mozc.android.inputmethod.japanese.testing;
import android.content.Context;
import android.content.res.Configuration;
import android.graphics.drawable.Drawable;
import android.net.Uri;
import android.os.Bundle;
import android.view.KeyEvent;
import android.view.LayoutInflater;
import android.view.MotionEvent;
import android.view.SurfaceHolder.Callback2;
import android.view.View;
import android.view.ViewGroup.LayoutParams;
import android.view.Window;
/**
* @see MockContext
*/
public class MockWindow extends Window {
public MockWindow(Context context) {
super(context);
}
@Override
public void addContentView(View view, LayoutParams params) {
}
@Override
public void closeAllPanels() {
}
@Override
public void closePanel(int featureId) {
}
@Override
public View getCurrentFocus() {
return null;
}
@Override
public View getDecorView() {
return null;
}
@Override
public LayoutInflater getLayoutInflater() {
return null;
}
@Override
public int getVolumeControlStream() {
return 0;
}
@Override
public void invalidatePanelMenu(int featureId) {
}
@Override
public boolean isFloating() {
return false;
}
@Override
public boolean isShortcutKey(int keyCode, KeyEvent event) {
return false;
}
@Override
protected void onActive() {
}
@Override
public void onConfigurationChanged(Configuration newConfig) {
}
@Override
public void openPanel(int featureId, KeyEvent event) {
}
@Override
public View peekDecorView() {
return null;
}
@Override
public boolean performContextMenuIdentifierAction(int id, int flags) {
return false;
}
@Override
public boolean performPanelIdentifierAction(int featureId, int id, int flags) {
return false;
}
@Override
public boolean performPanelShortcut(int featureId, int keyCode, KeyEvent event, int flags) {
return false;
}
@Override
public void restoreHierarchyState(Bundle savedInstanceState) {
}
@Override
public Bundle saveHierarchyState() {
return null;
}
@Override
public void setBackgroundDrawable(Drawable drawable) {
}
@Override
public void setChildDrawable(int featureId, Drawable drawable) {
}
@Override
public void setChildInt(int featureId, int value) {
}
@Override
public void setContentView(int layoutResID) {
}
@Override
public void setContentView(View view) {
}
@Override
public void setContentView(View view, LayoutParams params) {
}
@Override
public void setFeatureDrawable(int featureId, Drawable drawable) {
}
@Override
public void setFeatureDrawableAlpha(int featureId, int alpha) {
}
@Override
public void setFeatureDrawableResource(int featureId, int resId) {
}
@Override
public void setFeatureDrawableUri(int featureId, Uri uri) {
}
@Override
public void setFeatureInt(int featureId, int value) {
}
@Override
public void setTitle(CharSequence title) {
}
@Deprecated
@Override
public void setTitleColor(int textColor) {
}
@Override
public void setVolumeControlStream(int streamType) {
}
@Override
public boolean superDispatchGenericMotionEvent(MotionEvent event) {
return false;
}
@Override
public boolean superDispatchKeyEvent(KeyEvent event) {
return false;
}
@Override
public boolean superDispatchKeyShortcutEvent(KeyEvent event) {
return false;
}
@Override
public boolean superDispatchTouchEvent(MotionEvent event) {
return false;
}
@Override
public boolean superDispatchTrackballEvent(MotionEvent event) {
return false;
}
@Override
public void takeInputQueue(android.view.InputQueue.Callback callback) {
}
@Override
public void takeKeyEvents(boolean get) {
}
@Override
public void takeSurface(Callback2 callback) {
}
@Override
public void togglePanel(int featureId, KeyEvent event) {
}
// hidden public abstract method.
public void alwaysReadCloseOnTouchAttr() {
}
@Override
public int getStatusBarColor() {
return 0;
}
@Override
public void setStatusBarColor(int color) {
}
@Override
public int getNavigationBarColor() {
return 0;
}
@Override
public void setNavigationBarColor(int color) {
}
}
| |
// Copyright 2014 Palantir Technologies
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.palantir.stash.stashbot.hooks;
import java.sql.SQLException;
import java.util.List;
import org.junit.Before;
import org.junit.Test;
import org.mockito.Mock;
import org.mockito.Mockito;
import org.mockito.MockitoAnnotations;
import com.atlassian.stash.build.BuildStats;
import com.atlassian.stash.build.BuildStatusService;
import com.atlassian.stash.commit.CommitService;
import com.atlassian.stash.content.Changeset;
import com.atlassian.stash.content.ChangesetsBetweenRequest;
import com.atlassian.stash.pull.PullRequest;
import com.atlassian.stash.pull.PullRequestRef;
import com.atlassian.stash.repository.Repository;
import com.atlassian.stash.scm.pull.MergeRequest;
import com.atlassian.stash.util.Page;
import com.atlassian.stash.util.PageRequest;
import com.google.common.collect.ImmutableList;
import com.palantir.stash.stashbot.config.ConfigurationPersistenceService;
import com.palantir.stash.stashbot.jobtemplate.JobType;
import com.palantir.stash.stashbot.logger.PluginLoggerFactory;
import com.palantir.stash.stashbot.persistence.PullRequestMetadata;
import com.palantir.stash.stashbot.persistence.RepositoryConfiguration;
public class PullRequestBuildSuccessMergeCheckTest {
private static final int REPO_ID = 1;
private static final long PULL_REQUEST_ID = 1234L;
private static final String TO_SHA = "refs/heads/master";
private static final String TO_SHA2 = "OTHER";
private static final String FROM_SHA = "FROMSHA";
private static final String VERIFY_REGEX = ".*master";
private static final String SHA_A = "142a0b425f9b7305e5a966c9b037ef589a3bbeda";
private static final String SHA_B = "4602ac23d61910110a66fd3456f2a74d62b2d9b2";
@Mock
private ConfigurationPersistenceService cpm;
@Mock
private BuildStatusService bss;
@Mock
private CommitService cs;
@Mock
private PullRequest pr;
@Mock
private MergeRequest mr;
@Mock
private Repository repo;
@Mock
private PullRequestRef fromRef;
@Mock
private PullRequestRef toRef;
@Mock
private RepositoryConfiguration rc;
private PullRequestBuildSuccessMergeCheck prmc;
@Mock
private PullRequestMetadata prm;
@Mock
private PullRequestMetadata prm2;
@Mock
private Page<Changeset> mockPage;
@Mock
private Changeset changeA;
@Mock
private Changeset changeB;
@Mock
private BuildStats bsA;
@Mock
private BuildStats bsB;
private final PluginLoggerFactory lf = new PluginLoggerFactory();
private List<Changeset> changesets;
@Before
public void setUp() throws SQLException {
MockitoAnnotations.initMocks(this);
Mockito.when(repo.getId()).thenReturn(REPO_ID);
Mockito.when(cpm.getRepositoryConfigurationForRepository(repo)).thenReturn(rc);
Mockito.when(cpm.getJobTypeStatusMapping(rc, JobType.VERIFY_PR)).thenReturn(true);
Mockito.when(rc.getCiEnabled()).thenReturn(true);
Mockito.when(rc.getVerifyBranchRegex()).thenReturn(VERIFY_REGEX);
Mockito.when(rc.getRebuildOnTargetUpdate()).thenReturn(true);
Mockito.when(rc.getStrictVerifyMode()).thenReturn(false);
Mockito.when(mr.getPullRequest()).thenReturn(pr);
Mockito.when(pr.getId()).thenReturn(PULL_REQUEST_ID);
Mockito.when(pr.getFromRef()).thenReturn(fromRef);
Mockito.when(pr.getToRef()).thenReturn(toRef);
Mockito.when(fromRef.getRepository()).thenReturn(repo);
Mockito.when(fromRef.getLatestChangeset()).thenReturn(TO_SHA);
Mockito.when(toRef.getRepository()).thenReturn(repo);
Mockito.when(toRef.getId()).thenReturn(TO_SHA);
Mockito.when(toRef.getLatestChangeset()).thenReturn(TO_SHA);
Mockito.when(cpm.getPullRequestMetadata(pr)).thenReturn(prm);
Mockito.when(cpm.getPullRequestMetadataWithoutToRef(pr)).thenReturn(ImmutableList.of(prm, prm2));
// prm and prm2 have same from sha, but different to shas.
Mockito.when(prm.getToSha()).thenReturn(TO_SHA);
Mockito.when(prm.getFromSha()).thenReturn(FROM_SHA);
Mockito.when(prm2.getToSha()).thenReturn(TO_SHA2);
Mockito.when(prm2.getFromSha()).thenReturn(FROM_SHA);
changesets = ImmutableList.of(changeA, changeB);
Mockito.when(
cs.getChangesetsBetween(Mockito.any(ChangesetsBetweenRequest.class), Mockito.any(PageRequest.class)))
.thenReturn(mockPage);
Mockito.when(mockPage.getValues()).thenReturn(changesets);
Mockito.when(mockPage.getIsLastPage()).thenReturn(true);
Mockito.when(changeA.getId()).thenReturn(SHA_A);
Mockito.when(changeB.getId()).thenReturn(SHA_B);
Mockito.when(bss.getStats(SHA_A)).thenReturn(bsA);
Mockito.when(bss.getStats(SHA_B)).thenReturn(bsB);
Mockito.when(bsA.getSuccessfulCount()).thenReturn(1);
Mockito.when(bsB.getSuccessfulCount()).thenReturn(1);
prmc = new PullRequestBuildSuccessMergeCheck(cs, bss, cpm, lf);
}
@Test
public void testSuccessMergeCheckTest() {
Mockito.when(prm.getSuccess()).thenReturn(true);
Mockito.when(prm.getOverride()).thenReturn(false);
prmc.check(mr);
Mockito.verify(mr, Mockito.never()).veto(Mockito.anyString(), Mockito.anyString());
}
@Test
public void testOverrideMergeCheckTest() {
Mockito.when(prm.getSuccess()).thenReturn(false);
Mockito.when(prm.getOverride()).thenReturn(true);
prmc.check(mr);
Mockito.verify(mr, Mockito.never()).veto(Mockito.anyString(), Mockito.anyString());
}
@Test
public void testFailsMergeCheckTest() {
Mockito.when(prm.getSuccess()).thenReturn(false);
Mockito.when(prm.getOverride()).thenReturn(false);
prmc.check(mr);
Mockito.verify(mr).veto(Mockito.anyString(), Mockito.anyString());
}
@Test
public void testSkipsMergeCheckWhenPRVerifyDisabledTest() {
Mockito.when(cpm.getJobTypeStatusMapping(rc, JobType.VERIFY_PR)).thenReturn(false);
Mockito.when(prm.getSuccess()).thenReturn(false);
Mockito.when(prm.getOverride()).thenReturn(false);
prmc.check(mr);
Mockito.verify(mr, Mockito.never()).veto(Mockito.anyString(), Mockito.anyString());
}
@Test
public void testSuccessMergeCheckWhenPartialMatchTest() {
Mockito.when(toRef.getLatestChangeset()).thenReturn(TO_SHA2); // instead of TO_SHA
// returns only prm2, not prm (so no success)
Mockito.when(cpm.getPullRequestMetadataWithoutToRef(pr)).thenReturn(ImmutableList.of(prm2));
Mockito.when(rc.getRebuildOnTargetUpdate()).thenReturn(false);
Mockito.when(prm.getSuccess()).thenReturn(true);
Mockito.when(prm.getOverride()).thenReturn(false);
Mockito.when(prm2.getSuccess()).thenReturn(false);
Mockito.when(prm2.getOverride()).thenReturn(false);
prmc.check(mr);
Mockito.verify(mr, Mockito.never()).veto(Mockito.anyString(), Mockito.anyString());
}
@Test
public void testFailsMergeCheckWhenPartialMatchTest() {
Mockito.when(toRef.getLatestChangeset()).thenReturn(TO_SHA2); // instead of TO_SHA
Mockito.when(rc.getRebuildOnTargetUpdate()).thenReturn(false);
// neither exact match nor inexact match have success
Mockito.when(prm.getSuccess()).thenReturn(false);
Mockito.when(prm.getOverride()).thenReturn(false);
Mockito.when(prm2.getSuccess()).thenReturn(false);
Mockito.when(prm2.getOverride()).thenReturn(false);
prmc.check(mr);
Mockito.verify(mr).veto(Mockito.anyString(), Mockito.anyString());
}
@Test
public void testSucceedsMergeCheckWhenStrictMode() {
// we only want to fail due to strict mode, so let's say the PR build succeeded
Mockito.when(prm.getSuccess()).thenReturn(true);
Mockito.when(rc.getStrictVerifyMode()).thenReturn(true);
prmc.check(mr);
Mockito.verify(mr, Mockito.never()).veto(Mockito.anyString(), Mockito.anyString());
}
@Test
public void testFailsMergeCheckWhenStrictMode() {
// we only want to fail due to strict mode, so let's say the PR build succeeded
Mockito.when(prm.getSuccess()).thenReturn(true);
Mockito.when(rc.getStrictVerifyMode()).thenReturn(true);
Mockito.when(bsB.getSuccessfulCount()).thenReturn(0);
prmc.check(mr);
Mockito.verify(mr).veto(Mockito.anyString(), Mockito.anyString());
}
}
| |
/*
* Autopsy Forensic Browser
*
* Copyright 2012 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.coreutils;
import java.io.BufferedInputStream;
import java.io.BufferedOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.lang.management.ManagementFactory;
import java.lang.management.MemoryMXBean;
import java.lang.management.MemoryUsage;
import java.nio.charset.Charset;
import java.util.ArrayList;
import java.util.List;
import javax.swing.filechooser.FileSystemView;
import org.hyperic.sigar.Sigar;
import org.hyperic.sigar.ptql.ProcessFinder;
import org.openide.modules.InstalledFileLocator;
import org.openide.modules.Places;
import org.sleuthkit.autopsy.casemodule.LocalDisk;
import org.sleuthkit.datamodel.SleuthkitJNI;
import org.sleuthkit.datamodel.TskCoreException;
/**
*
* Platform utilities
*/
public class PlatformUtil {
private static String javaPath = null;
public static final String OS_NAME_UNKNOWN = "unknown";
public static final String OS_VERSION_UNKNOWN = "unknown";
public static final String OS_ARCH_UNKNOWN = "unknown";
private static volatile long pid = -1;
private static volatile Sigar sigar = null;
private static volatile MemoryMXBean memoryManager = null;
/**
* Get root path where the application is installed
*
* @return absolute path string to the install root dir
*/
public static String getInstallPath() {
File coreFolder = InstalledFileLocator.getDefault().locate("core", PlatformUtil.class.getPackage().getName(), false);
File rootPath = coreFolder.getParentFile().getParentFile();
return rootPath.getAbsolutePath();
}
/**
* Get root path where the application modules are installed
*
* @return absolute path string to the install modules root dir, or null if
* not found
*/
public static String getInstallModulesPath() {
File coreFolder = InstalledFileLocator.getDefault().locate("core", PlatformUtil.class.getPackage().getName(), false);
File rootPath = coreFolder.getParentFile();
String modulesPath = rootPath.getAbsolutePath() + File.separator + "modules";
File modulesPathF = new File(modulesPath);
if (modulesPathF.exists() && modulesPathF.isDirectory()) {
return modulesPath;
} else {
rootPath = rootPath.getParentFile();
modulesPath = rootPath.getAbsolutePath() + File.separator + "modules";
modulesPathF = new File(modulesPath);
if (modulesPathF.exists() && modulesPathF.isDirectory()) {
return modulesPath;
} else {
return null;
}
}
}
/**
* Get root path where the user modules are installed
*
* @return absolute path string to the install modules root dir, or null if
* not found
*/
public static String getUserModulesPath() {
return getUserDirectory().getAbsolutePath() + File.separator + "modules";
}
/**
* get file path to the java executable binary use embedded java if
* available, otherwise use system java in PATH no validation is done if
* java exists in PATH
*
* @return file path to java binary
*/
public synchronized static String getJavaPath() {
if (javaPath != null) {
return javaPath;
}
File jrePath = new File(getInstallPath() + File.separator + "jre");
if (jrePath != null && jrePath.exists() && jrePath.isDirectory()) {
System.out.println("Embedded jre directory found in: " + jrePath.getAbsolutePath());
javaPath = jrePath.getAbsolutePath() + File.separator + "bin" + File.separator + "java";
} else {
//else use system installed java in PATH env variable
javaPath = "java";
}
System.out.println("Using java binary path: " + javaPath);
return javaPath;
}
/**
* Get user directory where application wide user settings, cache, temp
* files are stored
*
* @return File object representing user directory
*/
public static File getUserDirectory() {
return Places.getUserDirectory();
}
/**
* Get RCP project dirs
* @return
*/
public static List<String> getProjectsDirs() {
List<String> ret = new ArrayList<String>();
String projectDir = System.getProperty("netbeans.dirs");
if (projectDir == null) {
return ret;
}
String [] split = projectDir.split(";");
if (split == null || split.length == 0) {
return ret;
}
for (String path : split) {
ret.add(path);
}
return ret;
}
/**
* Get user config directory path
*
* @return Get user config directory path string
*/
public static String getUserConfigDirectory() {
return Places.getUserDirectory() + File.separator + "config";
}
/**
* Get log directory path
*
* @return Get log directory path string
*/
public static String getLogDirectory() {
return Places.getUserDirectory().getAbsolutePath() + File.separator
+ "var" + File.separator + "log" + File.separator;
}
public static String getDefaultPlatformFileEncoding() {
return System.getProperty("file.encoding");
}
public static String getDefaultPlatformCharset() {
return Charset.defaultCharset().name();
}
public static String getLogFileEncoding() {
return Charset.forName("UTF-8").name();
}
/**
* Utility to extract a resource file to a user configuration directory, if
* it does not exist - useful for setting up default configurations.
*
* @param resourceClass class in the same package as the resourceFile to
* extract
* @param resourceFile resource file name to extract
* @return true if extracted, false otherwise (if file already exists)
* @throws IOException exception thrown if extract the file failed for IO
* reasons
*/
public static boolean extractResourceToUserConfigDir(final Class resourceClass, final String resourceFile) throws IOException {
final File userDir = new File(getUserConfigDirectory());
final File resourceFileF = new File(userDir + File.separator + resourceFile);
if (resourceFileF.exists()) {
return false;
}
InputStream inputStream = resourceClass.getResourceAsStream(resourceFile);
OutputStream out = null;
InputStream in = null;
try {
in = new BufferedInputStream(inputStream);
OutputStream outFile = new FileOutputStream(resourceFileF);
out = new BufferedOutputStream(outFile);
int readBytes = 0;
while ((readBytes = in.read()) != -1) {
out.write(readBytes);
}
} finally {
if (in != null) {
in.close();
}
if (out != null) {
out.flush();
out.close();
}
}
return true;
}
/**
* Get operating system name, or OS_NAME_UNKNOWN
*
* @return OS name string
*/
public static String getOSName() {
return System.getProperty("os.name", OS_NAME_UNKNOWN);
}
/**
* Get operating system version, or OS_VERSION_UNKNOWN
*
* @return OS version string
*/
public static String getOSVersion() {
return System.getProperty("os.version", OS_VERSION_UNKNOWN);
}
/**
* Get OS arch details, or OS_ARCH_UNKNOWN
*
* @return OS arch string
*/
public static String getOSArch() {
return System.getProperty("os.arch", OS_ARCH_UNKNOWN);
}
/**
* Check if running on Windows OS
*
* @return true if running on Windows OS
*/
public static boolean isWindowsOS() {
return PlatformUtil.getOSName().toLowerCase().contains("windows");
}
/**
* Convert file path (quote) for OS specific
*
* @param origFilePath
* @return converted file path
*/
public static String getOSFilePath(String origFilePath) {
if (isWindowsOS()) {
return "\"" + origFilePath + "\"";
} else {
return origFilePath;
}
}
/**
* Get a list of all physical drives attached to the client's machine. Error
* threshold of 4 non-existent physical drives before giving up.
*
* @return list of physical drives
*/
public static List<LocalDisk> getPhysicalDrives() {
List<LocalDisk> drives = new ArrayList<LocalDisk>();
// Windows drives
if (PlatformUtil.isWindowsOS()) {
int n = 0;
int breakCount = 0;
while (true) {
String path = "\\\\.\\PhysicalDrive" + n;
if (canReadDrive(path)) {
try {
drives.add(new LocalDisk("Drive " + n, path, SleuthkitJNI.findDeviceSize(path)));
} catch (TskCoreException ex) {
// Don't add the drive because we can't read the size
}
n++;
} else {
if (breakCount > 4) { // Give up after 4 non-existent drives
break;
}
breakCount++;
n++;
}
}
// Linux drives
} else {
File dev = new File("/dev/");
File[] files = dev.listFiles();
for (File f : files) {
String name = f.getName();
if ((name.contains("hd") || name.contains("sd")) && f.canRead() && name.length() == 3) {
String path = "/dev/" + name;
if (canReadDrive(path)) {
try {
drives.add(new LocalDisk(path, path, SleuthkitJNI.findDeviceSize(path)));
} catch (TskCoreException ex) {
// Don't add the drive because we can't read the size
}
}
}
}
}
return drives;
}
/**
* Get a list all all the local drives and partitions on the client's
* machine.
*
* @return list of local drives and partitions
*/
public static List<LocalDisk> getPartitions() {
List<LocalDisk> drives = new ArrayList<LocalDisk>();
FileSystemView fsv = FileSystemView.getFileSystemView();
if (PlatformUtil.isWindowsOS()) {
File[] f = File.listRoots();
for (int i = 0; i < f.length; i++) {
String name = fsv.getSystemDisplayName(f[i]);
// Check if it is a drive, readable, and not mapped to the network
if (f[i].canRead() && !name.contains("\\\\") && (fsv.isDrive(f[i]) || fsv.isFloppyDrive(f[i]))) {
String path = f[i].getPath();
String diskPath = "\\\\.\\" + path.substring(0, path.length() - 1);
if (canReadDrive(diskPath)) {
drives.add(new LocalDisk(fsv.getSystemDisplayName(f[i]), diskPath, f[i].getTotalSpace()));
}
}
}
} else {
File dev = new File("/dev/");
File[] files = dev.listFiles();
for (File f : files) {
String name = f.getName();
if ((name.contains("hd") || name.contains("sd")) && f.canRead() && name.length() == 4) {
String path = "/dev/" + name;
if (canReadDrive(path)) {
drives.add(new LocalDisk(path, path, f.getTotalSpace()));
}
}
}
}
return drives;
}
/**
* Are we able to read this drive? Usually related to admin permissions.
*
* For all drives and partitions, we are using Java's ability to read the
* first byte of a drive to determine if TSK would be able to read the drive
* during the add image process. This returns whether the drive is readable
* or not far faster than validating if TSK can open the drive. We are
* assuming the results are almost exactly the same.
*
* @param diskPath path to the disk we want to read
* @return true if we successfully read the first byte
* @throws IOException if we fail to read
*/
private static boolean canReadDrive(String diskPath) {
BufferedInputStream br = null;
try {
File tmp = new File(diskPath);
br = new BufferedInputStream(new FileInputStream(tmp));
int b = br.read();
return b != -1;
} catch (IOException ex) {
return false;
} finally {
try {
if (br != null) {
br.close();
}
} catch (IOException ex) {
}
}
}
/**
* Query and get PID of this process
*
* @return PID of this process or -1 if it couldn't be determined
*/
public static synchronized long getPID() {
if (pid != -1) {
return pid;
}
try {
if (sigar == null) {
sigar = org.sleuthkit.autopsy.corelibs.SigarLoader.getSigar();
}
if (sigar != null) {
pid = sigar.getPid();
} else {
System.out.println("Can't get PID, sigar not initialized");
}
} catch (Exception e) {
System.out.println("Can't get PID," + e.toString());
}
return pid;
}
/**
* Query and get PID of another java process
*
* @param sigarSubQuery a sigar subquery to identify a unique java process among
* other java processes, for example, by class name, use:
* Args.*.eq=org.jboss.Main more examples here:
* http://support.hyperic.com/display/SIGAR/PTQL
*
* @return PID of a java process or -1 if it couldn't be determined
*/
public static synchronized long getJavaPID(String sigarSubQuery) {
long jpid = -1;
final String sigarQuery = "State.Name.sw=java," + sigarSubQuery;
try {
if (sigar == null) {
sigar = org.sleuthkit.autopsy.corelibs.SigarLoader.getSigar();
}
if (sigar != null) {
ProcessFinder finder = new ProcessFinder(sigar);
jpid = finder.findSingleProcess(sigarQuery);
} else {
System.out.println("Can't get PID of a java process, sigar not initialized");
}
} catch (Exception e) {
System.out.println("Can't get PID for query: " + sigarQuery + ", " + e.toString());
}
return jpid;
}
/**
* Query and get PIDs of another java processes matching a query
*
* @param sigarSubQuery a sigar subquery to identify a java processes among other
* java processes, for example, by class name, use: Args.*.eq=org.jboss.Main
* more examples here: http://support.hyperic.com/display/SIGAR/PTQL
*
* @return array of PIDs of a java processes matching the query or null if
* it couldn't be determined
*/
public static synchronized long[] getJavaPIDs(String sigarSubQuery) {
long[] jpids = null;
final String sigarQuery = "State.Name.sw=java," + sigarSubQuery;
try {
if (sigar == null) {
sigar = org.sleuthkit.autopsy.corelibs.SigarLoader.getSigar();
}
if (sigar != null) {
ProcessFinder finder = new ProcessFinder(sigar);
jpids = finder.find(sigarQuery);
} else {
System.out.println("Can't get PIDs of a java process, sigar not initialized");
}
} catch (Exception e) {
System.out.println("Can't get PIDs for query: " + sigarQuery + ", " + e.toString());
}
return jpids;
}
/**
* Kill a process by PID by sending signal to it using Sigar
*
* @param pid pid of the process to kill
*/
public static synchronized void killProcess(long pid) {
try {
if (sigar == null) {
sigar = org.sleuthkit.autopsy.corelibs.SigarLoader.getSigar();
}
if (sigar != null) {
sigar.kill(pid, 9);
} else {
System.out.println("Can't kill process by pid, sigar not initialized.");
}
} catch (Exception e) {
System.out.println("Can't kill process: " + pid + ", " + e.toString());
}
}
/**
* Query and return virtual memory used by the process
*
* @return virt memory used in bytes or -1 if couldn't be queried
*/
public static synchronized long getProcessVirtualMemoryUsed() {
long pid = getPID();
long virtMem = -1;
try {
if (sigar == null) {
sigar = org.sleuthkit.autopsy.corelibs.SigarLoader.getSigar();
}
if (sigar == null || pid == -1) {
System.out.println("Can't get virt mem used, sigar not initialized. ");
return -1;
}
virtMem = sigar.getProcMem(pid).getSize();
} catch (Exception e) {
System.out.println("Can't get virt mem used, " + e.toString());
}
return virtMem;
}
/**
* Return formatted string with Jvm heap and non-heap memory usage
*
* @return formatted string with jvm memory usage
*/
public static String getJvmMemInfo() {
synchronized (PlatformUtil.class) {
if (memoryManager == null) {
memoryManager = ManagementFactory.getMemoryMXBean();
}
}
final MemoryUsage heap = memoryManager.getHeapMemoryUsage();
final MemoryUsage nonHeap = memoryManager.getNonHeapMemoryUsage();
return "JVM heap usage: " + heap.toString() + ", JVM non-heap usage: " + nonHeap.toString();
}
/**
* Return formatted string with physical memory usage
*
* @return formatted string with physical memory usage
*/
public static String getPhysicalMemInfo() {
final Runtime runTime = Runtime.getRuntime();
final long maxMemory = runTime.maxMemory();
final long totalMemory = runTime.totalMemory();
final long freeMemory = runTime.freeMemory();
return "Physical memory usage (max, total, free): "
+ Long.toString(maxMemory) + ", " + Long.toString(totalMemory)
+ ", " + Long.toString(freeMemory);
}
/**
* Return formatted string with all memory usage (jvm, physical, native)
*
* @return formatted string with all memory usage info
*/
public static String getAllMemUsageInfo() {
StringBuilder sb = new StringBuilder();
sb.append(PlatformUtil.getPhysicalMemInfo()).append("\n");
sb.append(PlatformUtil.getJvmMemInfo()).append("\n");
sb.append("Process Virtual Memory: ").append(PlatformUtil.getProcessVirtualMemoryUsed());
return sb.toString();
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.tomcat.websocket;
import java.io.EOFException;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.nio.channels.AsynchronousSocketChannel;
import java.nio.channels.CompletionHandler;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import java.util.concurrent.atomic.AtomicBoolean;
import javax.net.ssl.SSLEngine;
import javax.net.ssl.SSLEngineResult;
import javax.net.ssl.SSLEngineResult.HandshakeStatus;
import javax.net.ssl.SSLEngineResult.Status;
import javax.net.ssl.SSLException;
import org.apache.juli.logging.Log;
import org.apache.juli.logging.LogFactory;
import org.apache.tomcat.util.res.StringManager;
/**
* Wraps the {@link AsynchronousSocketChannel} with SSL/TLS. This needs a lot
* more testing before it can be considered robust.
*/
public class AsyncChannelWrapperSecure implements AsyncChannelWrapper {
private static final Log log =
LogFactory.getLog(AsyncChannelWrapperSecure.class);
private static final StringManager sm =
StringManager.getManager(Constants.PACKAGE_NAME);
private static final ByteBuffer DUMMY = ByteBuffer.allocate(8192);
private final AsynchronousSocketChannel socketChannel;
private final SSLEngine sslEngine;
private final ByteBuffer socketReadBuffer;
private final ByteBuffer socketWriteBuffer;
// One thread for read, one for write
private final ExecutorService executor = Executors.newFixedThreadPool(2);
private AtomicBoolean writing = new AtomicBoolean(false);
private AtomicBoolean reading = new AtomicBoolean(false);
public AsyncChannelWrapperSecure(AsynchronousSocketChannel socketChannel,
SSLEngine sslEngine) {
this.socketChannel = socketChannel;
this.sslEngine = sslEngine;
int socketBufferSize = sslEngine.getSession().getPacketBufferSize();
socketReadBuffer = ByteBuffer.allocateDirect(socketBufferSize);
socketWriteBuffer = ByteBuffer.allocateDirect(socketBufferSize);
}
@Override
public Future<Integer> read(ByteBuffer dst) {
WrapperFuture<Integer,Void> future = new WrapperFuture<>();
if (!reading.compareAndSet(false, true)) {
throw new IllegalStateException(sm.getString(
"asyncChannelWrapperSecure.concurrentRead"));
}
ReadTask readTask = new ReadTask(dst, future);
executor.execute(readTask);
return future;
}
@Override
public <B,A extends B> void read(ByteBuffer dst, A attachment,
CompletionHandler<Integer,B> handler) {
WrapperFuture<Integer,B> future =
new WrapperFuture<>(handler, attachment);
if (!reading.compareAndSet(false, true)) {
throw new IllegalStateException(sm.getString(
"asyncChannelWrapperSecure.concurrentRead"));
}
ReadTask readTask = new ReadTask(dst, future);
executor.execute(readTask);
}
@Override
public Future<Integer> write(ByteBuffer src) {
WrapperFuture<Long,Void> inner = new WrapperFuture<>();
if (!writing.compareAndSet(false, true)) {
throw new IllegalStateException(sm.getString(
"asyncChannelWrapperSecure.concurrentWrite"));
}
WriteTask writeTask =
new WriteTask(new ByteBuffer[] {src}, 0, 1, inner);
executor.execute(writeTask);
Future<Integer> future = new LongToIntegerFuture(inner);
return future;
}
@Override
public <B,A extends B> void write(ByteBuffer[] srcs, int offset, int length,
long timeout, TimeUnit unit, A attachment,
CompletionHandler<Long,B> handler) {
WrapperFuture<Long,B> future =
new WrapperFuture<>(handler, attachment);
if (!writing.compareAndSet(false, true)) {
throw new IllegalStateException(sm.getString(
"asyncChannelWrapperSecure.concurrentWrite"));
}
WriteTask writeTask = new WriteTask(srcs, offset, length, future);
executor.execute(writeTask);
}
@Override
public void close() {
try {
socketChannel.close();
} catch (IOException e) {
log.info(sm.getString("asyncChannelWrapperSecure.closeFail"));
}
}
@Override
public Future<Void> handshake() throws SSLException {
WrapperFuture<Void,Void> wFuture = new WrapperFuture<>();
Thread t = new WebSocketSslHandshakeThread(wFuture);
t.start();
return wFuture;
}
private class WriteTask implements Runnable {
private final ByteBuffer[] srcs;
private final int offset;
private final int length;
private final WrapperFuture<Long,?> future;
public WriteTask(ByteBuffer[] srcs, int offset, int length,
WrapperFuture<Long,?> future) {
this.srcs = srcs;
this.future = future;
this.offset = offset;
this.length = length;
}
@Override
public void run() {
long written = 0;
try {
for (int i = offset; i < offset + length; i++) {
ByteBuffer src = srcs[i];
while (src.hasRemaining()) {
socketWriteBuffer.clear();
// Encrypt the data
SSLEngineResult r = sslEngine.wrap(src, socketWriteBuffer);
written += r.bytesConsumed();
Status s = r.getStatus();
if (s == Status.OK || s == Status.BUFFER_OVERFLOW) {
// Need to write out the bytes and may need to read from
// the source again to empty it
} else {
// Status.BUFFER_UNDERFLOW - only happens on unwrap
// Status.CLOSED - unexpected
throw new IllegalStateException(sm.getString(
"asyncChannelWrapperSecure.statusWrap"));
}
// Check for tasks
if (r.getHandshakeStatus() == HandshakeStatus.NEED_TASK) {
Runnable runnable = sslEngine.getDelegatedTask();
while (runnable != null) {
runnable.run();
runnable = sslEngine.getDelegatedTask();
}
}
socketWriteBuffer.flip();
// Do the write
int toWrite = r.bytesProduced();
while (toWrite > 0) {
Future<Integer> f =
socketChannel.write(socketWriteBuffer);
Integer socketWrite = f.get();
toWrite -= socketWrite.intValue();
}
}
}
if (writing.compareAndSet(true, false)) {
future.complete(Long.valueOf(written));
} else {
future.fail(new IllegalStateException(sm.getString(
"asyncChannelWrapperSecure.wrongStateWrite")));
}
} catch (Exception e) {
future.fail(e);
}
}
}
private class ReadTask implements Runnable {
private final ByteBuffer dest;
private final WrapperFuture<Integer,?> future;
public ReadTask(ByteBuffer dest, WrapperFuture<Integer,?> future) {
this.dest = dest;
this.future = future;
}
@Override
public void run() {
int read = 0;
boolean forceRead = false;
try {
while (read == 0) {
socketReadBuffer.compact();
if (forceRead) {
Future<Integer> f =
socketChannel.read(socketReadBuffer);
Integer socketRead = f.get();
if (socketRead.intValue() == -1) {
throw new EOFException(sm.getString(
"asyncChannelWrapperSecure.eof"));
}
}
socketReadBuffer.flip();
if (socketReadBuffer.hasRemaining()) {
// Decrypt the data in the buffer
SSLEngineResult r =
sslEngine.unwrap(socketReadBuffer, dest);
read += r.bytesProduced();
Status s = r.getStatus();
if (s == Status.OK) {
// Bytes available for reading and there may be
// sufficient data in the socketReadBuffer to
// support further reads without reading from the
// socket
} else if (s == Status.BUFFER_UNDERFLOW) {
// There is partial data in the socketReadBuffer
if (read == 0) {
// Need more data before the partial data can be
// processed and some output generated
forceRead = true;
}
// else return the data we have and deal with the
// partial data on the next read
} else if (s == Status.BUFFER_OVERFLOW) {
// Not enough space in the destination buffer to
// store all of the data. We could use a bytes read
// value of -bufferSizeRequired to signal the new
// buffer size required but an explicit exception is
// clearer.
if (reading.compareAndSet(true, false)) {
throw new ReadBufferOverflowException(sslEngine.
getSession().getApplicationBufferSize());
} else {
future.fail(new IllegalStateException(sm.getString(
"asyncChannelWrapperSecure.wrongStateRead")));
}
} else {
// Status.CLOSED - unexpected
throw new IllegalStateException(sm.getString(
"asyncChannelWrapperSecure.statusUnwrap"));
}
// Check for tasks
if (r.getHandshakeStatus() == HandshakeStatus.NEED_TASK) {
Runnable runnable = sslEngine.getDelegatedTask();
while (runnable != null) {
runnable.run();
runnable = sslEngine.getDelegatedTask();
}
}
} else {
forceRead = true;
}
}
if (reading.compareAndSet(true, false)) {
future.complete(Integer.valueOf(read));
} else {
future.fail(new IllegalStateException(sm.getString(
"asyncChannelWrapperSecure.wrongStateRead")));
}
} catch (Exception e) {
future.fail(e);
}
}
}
private class WebSocketSslHandshakeThread extends Thread {
private final WrapperFuture<Void,Void> hFuture;
private HandshakeStatus handshakeStatus;
private Status resultStatus;
public WebSocketSslHandshakeThread(WrapperFuture<Void,Void> hFuture) {
this.hFuture = hFuture;
}
@Override
public void run() {
try {
sslEngine.beginHandshake();
// So the first compact does the right thing
socketReadBuffer.position(socketReadBuffer.limit());
handshakeStatus = sslEngine.getHandshakeStatus();
resultStatus = Status.OK;
boolean handshaking = true;
while(handshaking) {
switch (handshakeStatus) {
case NEED_WRAP: {
socketWriteBuffer.clear();
SSLEngineResult r =
sslEngine.wrap(DUMMY, socketWriteBuffer);
checkResult(r, true);
socketWriteBuffer.flip();
Future<Integer> fWrite =
socketChannel.write(socketWriteBuffer);
fWrite.get();
break;
}
case NEED_UNWRAP: {
socketReadBuffer.compact();
if (socketReadBuffer.position() == 0 ||
resultStatus == Status.BUFFER_UNDERFLOW) {
Future<Integer> fRead =
socketChannel.read(socketReadBuffer);
fRead.get();
}
socketReadBuffer.flip();
SSLEngineResult r =
sslEngine.unwrap(socketReadBuffer, DUMMY);
checkResult(r, false);
break;
}
case NEED_TASK: {
Runnable r = null;
while ((r = sslEngine.getDelegatedTask()) != null) {
r.run();
}
handshakeStatus = sslEngine.getHandshakeStatus();
break;
}
case FINISHED: {
handshaking = false;
break;
}
default: {
throw new SSLException("TODO");
}
}
}
} catch (SSLException | InterruptedException |
ExecutionException e) {
hFuture.fail(e);
}
hFuture.complete(null);
}
private void checkResult(SSLEngineResult result, boolean wrap)
throws SSLException {
handshakeStatus = result.getHandshakeStatus();
resultStatus = result.getStatus();
if (resultStatus != Status.OK &&
(wrap || resultStatus != Status.BUFFER_UNDERFLOW)) {
throw new SSLException("TODO");
}
if (wrap && result.bytesConsumed() != 0) {
throw new SSLException("TODO");
}
if (!wrap && result.bytesProduced() != 0) {
throw new SSLException("TODO");
}
}
}
private static class WrapperFuture<T,A> implements Future<T> {
private final CompletionHandler<T,A> handler;
private final A attachment;
private volatile T result = null;
private volatile Throwable throwable = null;
private CountDownLatch completionLatch = new CountDownLatch(1);
public WrapperFuture() {
this(null, null);
}
public WrapperFuture(CompletionHandler<T,A> handler, A attachment) {
this.handler = handler;
this.attachment = attachment;
}
public void complete(T result) {
this.result = result;
completionLatch.countDown();
if (handler != null) {
handler.completed(result, attachment);
}
}
public void fail(Throwable t) {
throwable = t;
completionLatch.countDown();
if (handler != null) {
handler.failed(throwable, attachment);
}
}
@Override
public final boolean cancel(boolean mayInterruptIfRunning) {
// Could support cancellation by closing the connection
return false;
}
@Override
public final boolean isCancelled() {
// Could support cancellation by closing the connection
return false;
}
@Override
public final boolean isDone() {
return completionLatch.getCount() > 0;
}
@Override
public T get() throws InterruptedException, ExecutionException {
completionLatch.await();
if (throwable != null) {
throw new ExecutionException(throwable);
}
return result;
}
@Override
public T get(long timeout, TimeUnit unit)
throws InterruptedException, ExecutionException,
TimeoutException {
boolean latchResult = completionLatch.await(timeout, unit);
if (latchResult == false) {
throw new TimeoutException();
}
if (throwable != null) {
throw new ExecutionException(throwable);
}
return result;
}
}
private static final class LongToIntegerFuture implements Future<Integer> {
private final Future<Long> wrapped;
public LongToIntegerFuture(Future<Long> wrapped) {
this.wrapped = wrapped;
}
@Override
public boolean cancel(boolean mayInterruptIfRunning) {
return wrapped.cancel(mayInterruptIfRunning);
}
@Override
public boolean isCancelled() {
return wrapped.isCancelled();
}
@Override
public boolean isDone() {
return wrapped.isDone();
}
@Override
public Integer get() throws InterruptedException, ExecutionException {
Long result = wrapped.get();
if (result.longValue() > Integer.MAX_VALUE) {
throw new ExecutionException(sm.getString(
"asyncChannelWrapperSecure.tooBig", result), null);
}
return new Integer(result.intValue());
}
@Override
public Integer get(long timeout, TimeUnit unit)
throws InterruptedException, ExecutionException,
TimeoutException {
Long result = wrapped.get(timeout, unit);
if (result.longValue() > Integer.MAX_VALUE) {
throw new ExecutionException(sm.getString(
"asyncChannelWrapperSecure.tooBig", result), null);
}
return new Integer(result.intValue());
}
}
}
| |
// Copyright 2017 The Nomulus Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package google.registry.testing;
import static com.google.common.base.Preconditions.checkNotNull;
import static com.google.common.truth.Fact.fact;
import static com.google.common.truth.Fact.simpleFact;
import static com.google.common.truth.OptionalSubject.optionals;
import static google.registry.model.EppResourceUtils.isActive;
import static google.registry.testing.DatabaseHelper.getHistoryEntriesOfType;
import static google.registry.testing.HistoryEntrySubject.historyEntries;
import static google.registry.util.DiffUtils.prettyPrintEntityDeepDiff;
import com.google.common.collect.ImmutableSet;
import com.google.common.truth.FailureMetadata;
import com.google.common.truth.Subject;
import google.registry.model.EppResource;
import google.registry.model.ImmutableObject;
import google.registry.model.eppcommon.StatusValue;
import google.registry.model.reporting.HistoryEntry;
import google.registry.testing.TruthChainer.And;
import google.registry.testing.TruthChainer.Which;
import java.util.List;
import java.util.Optional;
import javax.annotation.Nullable;
import org.joda.time.DateTime;
/** Base Truth subject for asserting things about epp resources. */
abstract class AbstractEppResourceSubject<
T extends EppResource, S extends AbstractEppResourceSubject<T, S>>
extends Subject {
private final T actual;
public AbstractEppResourceSubject(FailureMetadata failureMetadata, T subject) {
super(failureMetadata, checkNotNull(subject));
this.actual = subject;
}
private List<? extends HistoryEntry> getHistoryEntries() {
return DatabaseHelper.getHistoryEntries(actual);
}
@SuppressWarnings("unchecked")
protected And<S> andChainer() {
return new And<>((S) this);
}
@Override
protected String actualCustomStringRepresentation() {
return String.format(
"%s with foreign key '%s'", actual.getClass().getSimpleName(), actual.getForeignKey());
}
@Override
public void isEqualTo(@Nullable Object other) {
// If the objects differ and we can show an interesting ImmutableObject diff, do so.
if (actual != null && other instanceof ImmutableObject && !actual.equals(other)) {
String diffText =
prettyPrintEntityDeepDiff(
((ImmutableObject) other).toDiffableFieldMap(), actual.toDiffableFieldMap());
failWithoutActual(fact("expected", other), fact("but was", actual), fact("diff", diffText));
}
// Otherwise, fall back to regular behavior.
super.isEqualTo(other);
}
public And<S> hasRepoId(long roid) {
return hasValue(roid, actual.getRepoId(), "getRepoId()");
}
public And<S> hasNoHistoryEntries() {
if (!getHistoryEntries().isEmpty()) {
failWithActual(simpleFact("expected to have no history entries"));
}
return andChainer();
}
public And<S> hasNumHistoryEntries(int num) {
check("getHistoryEntries()").that(getHistoryEntries()).hasSize(num);
return andChainer();
}
public And<S> hasNumHistoryEntriesOfType(HistoryEntry.Type type, int num) {
List<HistoryEntry> entries = getHistoryEntriesOfType(actual, type);
check("getHistoryEntriesOfType(%s)", type).that(entries).hasSize(num);
return andChainer();
}
public And<S> hasOneHistoryEntryEachOfTypes(HistoryEntry.Type ... types) {
hasNumHistoryEntries(types.length);
for (HistoryEntry.Type type : types) {
hasNumHistoryEntriesOfType(type, 1);
}
return andChainer();
}
public And<S> hasOnlyOneHistoryEntry() {
return hasNumHistoryEntries(1);
}
public HistoryEntrySubject hasOnlyOneHistoryEntryWhich() {
hasOnlyOneHistoryEntry();
return check("onlyHistoryEntry()").about(historyEntries()).that(getHistoryEntries().get(0));
}
// Temporarily suppressing style warning for Truth 0.45 upgrade
// TODO(weiminyu): Remove after next Truth update
@SuppressWarnings("UnnecessaryParentheses")
public Which<HistoryEntrySubject> hasHistoryEntryAtIndex(int index) {
List<? extends HistoryEntry> historyEntries = getHistoryEntries();
check("getHistoryEntries().size()").that(historyEntries.size()).isAtLeast(index + 1);
return new Which<>(
check("getHistoryEntries(%s)", index)
.about(historyEntries())
.that((getHistoryEntries().get(index))));
}
public And<S> hasStatusValue(StatusValue statusValue) {
check("getStatusValues()").that(actual.getStatusValues()).contains(statusValue);
return andChainer();
}
public And<S> doesNotHaveStatusValue(StatusValue statusValue) {
check("getStatusValues()").that(actual.getStatusValues()).doesNotContain(statusValue);
return andChainer();
}
public And<S> hasExactlyStatusValues(StatusValue... statusValues) {
if (!ImmutableSet.copyOf(actual.getStatusValues()).equals(ImmutableSet.copyOf(statusValues))) {
check("getStatusValues()")
.that(actual.getStatusValues())
.containsExactly((Object[]) statusValues);
}
return andChainer();
}
public And<S> hasDeletionTime(DateTime deletionTime) {
return hasValue(deletionTime, actual.getDeletionTime(), "getDeletionTime()");
}
public And<S> hasLastEppUpdateTime(DateTime lastUpdateTime) {
return hasValue(lastUpdateTime, actual.getLastEppUpdateTime(), "has lastEppUpdateTime");
}
public And<S> hasLastEppUpdateTimeAtLeast(DateTime before) {
DateTime lastEppUpdateTime = actual.getLastEppUpdateTime();
check("getLastEppUpdateTime()").that(lastEppUpdateTime).isAtLeast(before);
return andChainer();
}
public And<S> hasLastEppUpdateClientId(String registrarId) {
return hasValue(
registrarId, actual.getLastEppUpdateRegistrarId(), "getLastEppUpdateRegistrarId()");
}
public And<S> hasPersistedCurrentSponsorRegistrarId(String registrarId) {
return hasValue(
registrarId,
actual.getPersistedCurrentSponsorRegistrarId(),
"getPersistedCurrentSponsorRegistrarId()");
}
public And<S> isActiveAt(DateTime time) {
if (!isActive(actual, time)) {
failWithActual("expected to be active at", time);
}
return andChainer();
}
public And<S> isNotActiveAt(DateTime time) {
if (isActive(actual, time)) {
failWithActual("expected not to be active at", time);
}
return andChainer();
}
protected <E> And<S> hasValue(@Nullable E expected, @Nullable E actual, String name) {
check(name).that(actual).isEqualTo(expected);
return andChainer();
}
protected <E> And<S> hasValue(E expected, Optional<E> actual, String name) {
check(name).about(optionals()).that(actual).hasValue(expected);
return andChainer();
}
protected <E> And<S> hasNoValue(Optional<E> actual, String name) {
check(name).about(optionals()).that(actual).isEmpty();
return andChainer();
}
protected <E> And<S> doesNotHaveValue(E badValue, E actual, String name) {
check(name).that(actual).isNotEqualTo(badValue);
return andChainer();
}
}
| |
/**
* Copyright 2016 Nikita Koksharov
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.redisson;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.List;
import java.util.ListIterator;
import java.util.Queue;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicReference;
import java.util.concurrent.locks.Condition;
import java.util.concurrent.locks.Lock;
import org.redisson.api.RFuture;
import org.redisson.api.RLock;
import io.netty.util.concurrent.Future;
import io.netty.util.internal.ThreadLocalRandom;
/**
* Groups multiple independent locks and manages them as one lock.
*
* @author Nikita Koksharov
*
*/
public class RedissonMultiLock implements Lock {
final List<RLock> locks = new ArrayList<RLock>();
/**
* Creates instance with multiple {@link RLock} objects.
* Each RLock object could be created by own Redisson instance.
*
* @param locks - array of locks
*/
public RedissonMultiLock(RLock... locks) {
if (locks.length == 0) {
throw new IllegalArgumentException("Lock objects are not defined");
}
this.locks.addAll(Arrays.asList(locks));
}
@Override
public void lock() {
try {
lockInterruptibly();
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
}
}
public void lock(long leaseTime, TimeUnit unit) {
try {
lockInterruptibly(leaseTime, unit);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
}
}
@Override
public void lockInterruptibly() throws InterruptedException {
lockInterruptibly(-1, null);
}
public void lockInterruptibly(long leaseTime, TimeUnit unit) throws InterruptedException {
long waitTime = -1;
if (leaseTime == -1) {
waitTime = 5;
unit = TimeUnit.SECONDS;
} else {
waitTime = unit.toMillis(leaseTime);
if (waitTime <= 2000) {
waitTime = 2000;
} else if (waitTime <= 5000) {
waitTime = ThreadLocalRandom.current().nextLong(waitTime/2, waitTime);
} else {
waitTime = ThreadLocalRandom.current().nextLong(5000, waitTime);
}
waitTime = unit.convert(waitTime, TimeUnit.MILLISECONDS);
}
while (true) {
if (tryLock(waitTime, leaseTime, unit)) {
return;
}
}
}
@Override
public boolean tryLock() {
try {
return tryLock(-1, -1, null);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
return false;
}
}
protected void unlockInner(Collection<RLock> locks) {
List<RFuture<Void>> futures = new ArrayList<RFuture<Void>>(locks.size());
for (RLock lock : locks) {
futures.add(lock.unlockAsync());
}
for (RFuture<Void> unlockFuture : futures) {
unlockFuture.awaitUninterruptibly();
}
}
@Override
public boolean tryLock(long waitTime, TimeUnit unit) throws InterruptedException {
return tryLock(waitTime, -1, unit);
}
protected int failedLocksLimit() {
return 0;
}
public boolean tryLock(long waitTime, long leaseTime, TimeUnit unit) throws InterruptedException {
long newLeaseTime = -1;
if (leaseTime != -1) {
newLeaseTime = waitTime*2;
}
long time = System.currentTimeMillis();
long remainTime = -1;
if (waitTime != -1) {
remainTime = unit.toMillis(waitTime);
}
int failedLocksLimit = failedLocksLimit();
List<RLock> lockedLocks = new ArrayList<RLock>(locks.size());
for (ListIterator<RLock> iterator = locks.listIterator(); iterator.hasNext();) {
RLock lock = iterator.next();
boolean lockAcquired;
try {
if (waitTime == -1 && leaseTime == -1) {
lockAcquired = lock.tryLock();
} else {
long awaitTime = unit.convert(remainTime, TimeUnit.MILLISECONDS);
lockAcquired = lock.tryLock(awaitTime, newLeaseTime, unit);
}
} catch (Exception e) {
lockAcquired = false;
}
if (lockAcquired) {
lockedLocks.add(lock);
} else {
if (locks.size() - lockedLocks.size() == failedLocksLimit()) {
break;
}
if (failedLocksLimit == 0) {
unlockInner(lockedLocks);
if (waitTime == -1 && leaseTime == -1) {
return false;
}
failedLocksLimit = failedLocksLimit();
lockedLocks.clear();
// reset iterator
while (iterator.hasPrevious()) {
iterator.previous();
}
} else {
failedLocksLimit--;
}
}
if (remainTime != -1) {
remainTime -= (System.currentTimeMillis() - time);
time = System.currentTimeMillis();
if (remainTime <= 0) {
unlockInner(lockedLocks);
return false;
}
}
}
if (leaseTime != -1) {
List<RFuture<Boolean>> futures = new ArrayList<RFuture<Boolean>>(lockedLocks.size());
for (RLock rLock : lockedLocks) {
RFuture<Boolean> future = rLock.expireAsync(unit.toMillis(leaseTime), TimeUnit.MILLISECONDS);
futures.add(future);
}
for (RFuture<Boolean> rFuture : futures) {
rFuture.syncUninterruptibly();
}
}
return true;
}
@Override
public void unlock() {
List<RFuture<Void>> futures = new ArrayList<RFuture<Void>>(locks.size());
for (RLock lock : locks) {
futures.add(lock.unlockAsync());
}
for (RFuture<Void> future : futures) {
future.syncUninterruptibly();
}
}
@Override
public Condition newCondition() {
throw new UnsupportedOperationException();
}
protected boolean isLockFailed(Future<Boolean> future) {
return !future.isSuccess();
}
protected boolean isAllLocksAcquired(AtomicReference<RLock> lockedLockHolder, AtomicReference<Throwable> failed, Queue<RLock> lockedLocks) {
return lockedLockHolder.get() == null && failed.get() == null;
}
}
| |
/*
* Copyright (c) 2016 Titan Robotics Club (http://www.titanrobotics.com)
* Based on sample code by Robert Atkinson.
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package ftclib;
import android.graphics.Bitmap;
import com.vuforia.HINT;
import com.vuforia.Image;
import com.vuforia.PIXEL_FORMAT;
import com.vuforia.Vuforia;
import org.firstinspires.ftc.robotcore.external.ClassFactory;
import org.firstinspires.ftc.robotcore.external.matrices.OpenGLMatrix;
import org.firstinspires.ftc.robotcore.external.navigation.AngleUnit;
import org.firstinspires.ftc.robotcore.external.navigation.AxesOrder;
import org.firstinspires.ftc.robotcore.external.navigation.AxesReference;
import org.firstinspires.ftc.robotcore.external.navigation.Orientation;
import org.firstinspires.ftc.robotcore.external.navigation.VuforiaLocalizer;
import org.firstinspires.ftc.robotcore.external.navigation.VuforiaTrackable;
import org.firstinspires.ftc.robotcore.external.navigation.VuforiaTrackableDefaultListener;
import org.firstinspires.ftc.robotcore.external.navigation.VuforiaTrackables;
import org.opencv.android.Utils;
import org.opencv.core.Mat;
import hallib.HalVideoSource;
/**
* This class makes using Vuforia a little easier by minimizing the number of calls to it. It only exposes the
* minimum things you need to set for the FTC competition. If you want to do more complex stuff, you may want
* to not use this and call Vuforia directly so you can customize other stuff.
*/
public class FtcVuforia implements HalVideoSource<Mat>
{
/**
* This class contains information required to make a trackable target. It has two constructors. One with all the
* rotation/translation info for tracking the robot location on the field. If you don't need to track the robot's
* location, then you can use the constructor with only the target name.
*/
public static class Target
{
public final String name;
public final float rotateX;
public final float rotateY;
public final float rotateZ;
public final float translateX;
public final float translateY;
public final float translateZ;
public Target(
final String name, final float rotateX, final float rotateY, final float rotateZ,
final float translateX, final float translateY, final float translateZ)
{
this.name = name;
this.rotateX = rotateX;
this.rotateY = rotateY;
this.rotateZ = rotateZ;
this.translateX = translateX;
this.translateY = translateY;
this.translateZ = translateZ;
} //Target
public Target(final String name)
{
this(name, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f);
} //Target
} //class Target
public VuforiaLocalizer localizer;
private VuforiaLocalizer.CameraDirection cameraDir;
private VuforiaTrackables targetList = null;
private int imageWidth = 0;
private int imageHeight = 0;
/**
* Constructor: Create an instance of this object. It initializes Vuforia with the specified target images and
* other parameters.
*
* @param licenseKey specifies the Vuforia license key.
* @param cameraViewId specifies the camera view ID on the activity, -1 if none given.
* @param cameraDir specifies which camera to use (front or back).
* @param trackablesFile specifies the XML file that contains the target info, can be null.
* @param numTargets specifies the number of simultaneous trackable targets.
* @param cameraMonitorFeedback specifies the feedback image showing the orientation of the target.
*/
public FtcVuforia(
String licenseKey, int cameraViewId, VuforiaLocalizer.CameraDirection cameraDir,
String trackablesFile, int numTargets,
VuforiaLocalizer.Parameters.CameraMonitorFeedback cameraMonitorFeedback)
{
this.cameraDir = cameraDir;
//
// If no camera view ID, do not activate camera monitor view to save power.
//
VuforiaLocalizer.Parameters params =
cameraViewId == -1? new VuforiaLocalizer.Parameters(): new VuforiaLocalizer.Parameters(cameraViewId);
params.vuforiaLicenseKey = licenseKey;
params.cameraDirection = cameraDir;
params.cameraMonitorFeedback = cameraMonitorFeedback;
localizer = ClassFactory.createVuforiaLocalizer(params);
Vuforia.setHint(HINT.HINT_MAX_SIMULTANEOUS_IMAGE_TARGETS, numTargets);
if (trackablesFile != null)
{
targetList = localizer.loadTrackablesFromAsset(trackablesFile);
}
} //FtcVuforia
/**
* Constructor: Create an instance of this object. It initializes Vuforia with the specified target images and
* other parameters.
*
* @param licenseKey specifies the Vuforia license key.
* @param cameraViewId specifies the camera view ID on the activity.
* @param cameraDir specifies which camera to use (front or back).
* @param trackablesFile specifies the XML file that contains the target info.
* @param numTargets specifies the number of simultaneous trackable targets.
*/
public FtcVuforia(
String licenseKey, int cameraViewId, VuforiaLocalizer.CameraDirection cameraDir,
String trackablesFile, int numTargets)
{
this(licenseKey, cameraViewId, cameraDir, trackablesFile, numTargets,
VuforiaLocalizer.Parameters.CameraMonitorFeedback.AXES);
} //FtcVuforia
/**
* Constructor: Create an instance of this object. It initializes Vuforia with the specified target images and
* other parameters.
*
* @param licenseKey specifies the Vuforia license key.
* @param cameraViewId specifies the camera view ID on the activity.
* @param cameraDir specifies which camera to use (front or back).
*/
public FtcVuforia(String licenseKey, int cameraViewId, VuforiaLocalizer.CameraDirection cameraDir)
{
this(licenseKey, cameraViewId, cameraDir, null, 0, VuforiaLocalizer.Parameters.CameraMonitorFeedback.AXES);
} //FtcVuforia
/**
* This method enables/disables target tracking.
*
* @param enabled specifies true to enable target tracking, false otherwise.
*/
public void setTrackingEnabled(boolean enabled)
{
if (targetList != null)
{
if (enabled)
{
targetList.activate();
} else
{
targetList.deactivate();
}
}
} //setTrackingEnabled
/**
* This method sets the properties of the specified target.
*
* @param index specifies the target index in the XML file.
* @param name specifies the target name.
* @param locationOnField specifies the target location on the field, can be null if no robot tracking.
* @param phoneLocationOnRobot specifies the phone location on the robot, can be null if no robot tracking.
*/
public void setTargetInfo(int index, String name, OpenGLMatrix locationOnField, OpenGLMatrix phoneLocationOnRobot)
{
if (targetList != null)
{
VuforiaTrackable target = targetList.get(index);
target.setName(name);
if (locationOnField != null)
{
target.setLocation(locationOnField);
}
if (phoneLocationOnRobot != null)
{
((VuforiaTrackableDefaultListener) target.getListener()).setPhoneInformation(
phoneLocationOnRobot, cameraDir);
}
}
} //setTargetInfo
/**
* This method sets the properties of the specified target.
*
* @param index specifies the target index in the XML file.
* @param name specifies the target name.
*/
public void setTargetInfo(int index, String name)
{
setTargetInfo(index, name, null, null);
} //setTargetInfo
/**
* This method sets tracking info for the targets described in the given target array.
*
* @param targets specifies the array of targets to set tracking info.
* @param phoneLocationOnRobot specifies the location marix of the phone on the robot.
*/
public void setTargets(Target[] targets, OpenGLMatrix phoneLocationOnRobot)
{
for (int i = 0; i < targets.length; i++)
{
OpenGLMatrix targetLocationOnField =
phoneLocationOnRobot == null?
null:
locationMatrix(
targets[i].rotateX, targets[i].rotateY, targets[i].rotateZ,
targets[i].translateX, targets[i].translateY, targets[i].translateZ);
setTargetInfo(i, targets[i].name, targetLocationOnField, phoneLocationOnRobot);
}
} //setTargets
/**
* This method creates a location matrix that can be used to relocate an object to its final location by rotating
* and translating the object from the origin of the field. It is doing the operation in the order of the
* parameters. In other words, it will first rotate the object on the X-axis, then rotate on the Y-axis, then
* rotate on the Z-axis, then translate on the X-axis, then translate on the Y-axis and finally translate on the
* Z-axis.
*
* @param rotateX specifies rotation on the X-axis.
* @param rotateY specifies rotation on the Y-axis.
* @param rotateZ specifies rotation on the Z-axis.
* @param translateX specifies translation on the X-axis.
* @param translateY specifies translation on the Y-axis.
* @param translateZ specifies translation on the Z-axis.
* @return returns the location matrix.
*/
public OpenGLMatrix locationMatrix(
float rotateX, float rotateY, float rotateZ, float translateX, float translateY, float translateZ)
{
return OpenGLMatrix.translation(translateX, translateY, translateZ)
.multiplied(Orientation.getRotationMatrix(
AxesReference.EXTRINSIC, AxesOrder.XYZ, AngleUnit.DEGREES, rotateX, rotateY, rotateZ));
} //locationMatrix
/**
* This method returns the list of trackable targets.
*
* @return list of trackable targets.
*/
public VuforiaTrackables getTargetList()
{
return targetList;
} //getTargetList
/**
* This method returns the target object with the specified index in the target list.
*
* @param index specifies the target index in the list.
* @return target.
*/
public VuforiaTrackable getTarget(int index)
{
return targetList != null? targetList.get(index): null;
} //getTarget
/**
* This method returns the target object with the specified target name.
*
* @param name specifies the name of the target.
* @return target.
*/
public VuforiaTrackable getTarget(String name)
{
VuforiaTrackable target = null;
if (targetList != null)
{
for (int i = 0; i < targetList.size(); i++)
{
target = targetList.get(i);
if (name.equals(target.getName()))
{
break;
} else
{
target = null;
}
}
}
return target;
} //getTarget
/**
* This method determines if the target is visible.
*
* @param target specifies the target object.
* @return true if the target is in view, false otherwise.
*/
public boolean isTargetVisible(VuforiaTrackable target)
{
VuforiaTrackableDefaultListener listener = (VuforiaTrackableDefaultListener)target.getListener();
return listener.isVisible();
} //isTargetVisible
/**
* This method returns the position matrix of the specified target.
*
* @param target specifies the target to get the position matrix.
* @return position matrix of the specified target.
*/
public OpenGLMatrix getTargetPose(VuforiaTrackable target)
{
VuforiaTrackableDefaultListener listener = (VuforiaTrackableDefaultListener)target.getListener();
return listener.getPose();
} //getTargetPose
/**
* This method determines the robot location by the given target.
*
* @param target specifies the target to be used to determine robot location.
* @return robot location matrix.
*/
public OpenGLMatrix getRobotLocation(VuforiaTrackable target)
{
VuforiaTrackableDefaultListener listener = (VuforiaTrackableDefaultListener)target.getListener();
return listener.getRobotLocation();
} //getRobotLocation
/**
* This method configures Vuforia to capture video frames of the given format.
*
* @param imageWidth specifies the image width to capture.
* @param imageHeight specifies the image height to capture.
* @param queueCapacity specifies the frame queue capacity.
*/
public void configVideoSource(int imageWidth, int imageHeight, int queueCapacity)
{
this.imageWidth = imageWidth;
this.imageHeight = imageHeight;
Vuforia.setFrameFormat(PIXEL_FORMAT.RGB565, true);
localizer.setFrameQueueCapacity(queueCapacity);
} //configVideoSource
//
// Implements HalVideoSource interface.
//
/**
* This method gets a frame from the frame queue and returns the image that matches the format specified by the
* configVideoSource method.
*
* @param frame specifies the frame object to hold image.
* @return true if success, false otherwise.
*/
@Override
public boolean getFrame(Mat frame)
{
boolean success = false;
try
{
VuforiaLocalizer.CloseableFrame closeableFrame = localizer.getFrameQueue().take();
for (int i = 0; i < closeableFrame.getNumImages(); i++)
{
Image image = closeableFrame.getImage(i);
if (image.getWidth() == imageWidth && image.getHeight() == imageHeight &&
image.getFormat() == PIXEL_FORMAT.RGB565)
{
Bitmap bm = Bitmap.createBitmap(image.getWidth(), image.getHeight(), Bitmap.Config.RGB_565);
bm.copyPixelsFromBuffer(image.getPixels());
Utils.bitmapToMat(bm, frame);
break;
}
}
closeableFrame.close();
success = true;
}
catch (InterruptedException e)
{
e.printStackTrace();
}
return success;
} //getFrame
/**
* This method draws the given image frame to the display surface.
*
* @param frame specifies the image frame to be displayed.
*/
@Override
public void putFrame(Mat frame)
{
// TODO: figure out how to render frame back to Vuforia.
} //putFrame
} //class FtcVuforia
| |
/*
* Copyright 2015 DECOIT GmbH
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package de.decoit.simu.cbor.xml.dictionary;
import co.nstant.in.cbor.model.DataItem;
import de.decoit.simu.cbor.xml.dictionary.exception.DictionaryPathException;
import de.decoit.simu.cbor.xml.dictionary.parser.DictionaryParser;
import java.io.IOException;
import java.nio.file.Path;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import lombok.ToString;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.collections4.BidiMap;
import org.apache.commons.collections4.bidimap.DualHashBidiMap;
import org.apache.commons.collections4.bidimap.UnmodifiableBidiMap;
/**
* Single instance of a CBOR-XML dictionary.
* May be loaded with one or more dictionary description files. An object of this class
* should be obtained using the {@link DictionaryProvider} class, which provides a singleton
* Dictionary instance.<br>
* This class provides methods to find a specific element or attribute entry inside this dictionary.
* The target entry is defined by a dictionary path. The dictionary path describes a path from namespace
* to the target element or attribute. It looks like these:<br>
* - <NAMESPACE>ELEMENTNAME<br>
* - <NAMESPACE>ELEMENTNAME@ATTRIBUTENAME<br>
* - <NAMESPACE>ELEMENTNAME_1+ELEMENTNAME_2<br>
* - <NAMESPACE>ELEMENTNAME_1+ELEMENTNAME_2@ATTRIBUTENAME<br>
* A path defines a single NAMESPACE to start the search at. The namespace is followed by one or more ELEMENTNAMEs separated by plus signs.
* Such a path will look for the dictionary entry of the last ELEMENTNAME. If an attribute of ELEMENTNAME is the desired target entry instead of the
* element itself, the path may be suffixed by a single @ATTRIBUTENAME where ATTRIBUTENAME if the XML name of the target attribute.
*
* @author Thomas Rix (rix@decoit.de)
*/
@ToString
@Slf4j
public class Dictionary {
private static final Pattern FULL_PATH_PATTERN = Pattern.compile("^\\<([^\\<\\>]+)\\>([a-zA-Z_][\\w-.]*(?:\\+[a-zA-Z_][\\w-.]*)*)(?:@([a-zA-Z_:][-a-zA-Z0-9_:.]*))?$");
private final BidiMap<String, DictionaryNamespace> namespaces;
/**
* Create a new empty dictionary.
* Constructor is package private because a dictionary should be provided by
* the {@link DictionaryProvider}.
*/
Dictionary() {
this.namespaces = new DualHashBidiMap<>();
if(log.isTraceEnabled()) {
log.trace("Dictionary constructed:");
log.trace(this.toString());
}
}
/**
* Load a dictionary from an input file.
* This method extends the existing dictionary, existing mappings will not be removed.
* The only exception from that rule is if the input file overrides existing mappings.
*
* @param inFile File to read the dictionary from
* @throws IOException if the file cannot be read
*/
public void extendDictionary(Path inFile) throws IOException {
DictionaryParser dp = new DictionaryParser(this);
dp.parseDictionary(inFile);
}
/**
* Load a dictionary from an input file.
* This method removes all existing mapping before loading the new ones from the input file.
*
* @param inFile File to read the dictionary from
* @throws IOException if the file cannot be read
*/
public void replaceDictionary(Path inFile) throws IOException {
this.clear();
this.extendDictionary(inFile);
}
/**
* Lookup the CBOR data item representation for the provided attribute XML namespace URI.
* The actual CBOR data item may be retrieved by calling getCborName() on the returned
* object.
*
* @param xmlName XML name string
* @return Dictionary entry for the specified attribute
*/
public DictionaryNamespace lookupNamespace(String xmlName) {
return this.namespaces.get(xmlName);
}
/**
* Lookup the XML namespace URI for the specified CBOR data item representation.
* The actual XML namespace URI may be retrieved by calling getXmlName() on the returned
* object.
*
* @param cborName CBOR data item
* @return Dictionary entry for the specified namespace
*/
public DictionaryNamespace reverseLookupNamespace(DataItem cborName) {
DictionaryNamespace tmpDsa = new DictionaryNamespace(cborName);
String key = this.namespaces.getKey(tmpDsa);
return this.namespaces.get(key);
}
/**
* Add a new namespace to this dictionary.
*
* @param dns Namespace object
*/
public void addNamespace(DictionaryNamespace dns) {
if(dns == null) {
throw new IllegalArgumentException("Dictionary namespace must not be null");
}
String prevKey = this.namespaces.getKey(dns);
DictionaryNamespace prev = this.namespaces.put(dns.getXmlName(), dns);
if(prevKey != null) {
log.warn("Two namespaces with same CBOR mapping: old:" + prevKey + ", new:" + dns.getXmlName());
}
if(prev != null) {
log.warn("Previous namespace mapping overridden: " + dns.getXmlName() + ", Namespace:" + prev.toString());
}
}
/**
* Remove the namespace identified by the specified XML name from this dictionary.
*
* @param xmlName XML name string
*/
public void removeNamespace(String xmlName) {
this.namespaces.remove(xmlName);
}
/**
* Remove all entries from this dictionary.
*/
public void clear() {
this.namespaces.clear();
}
/**
* Evaluate a dictionary path and return the dictionary entry for the target element.
* If the target element or any element on the path (including namespace) does not exist in the dictionary,
* this method will return null.
*
* @param path Dictionary path to evaluate
* @return Dictionary element of target element or null
* @throws DictionaryPathException if the provided path cannot be evaluated
*/
public DictionarySimpleElement findElementByPath(final String path) throws DictionaryPathException {
if(path == null) {
throw new DictionaryPathException("Null reference for dictionary path");
}
Matcher m = Dictionary.FULL_PATH_PATTERN.matcher(path);
// Test if provided path is valid
if(m.matches()) {
String namespace = m.group(1);
String[] elements = m.group(2).split("\\+");
// Get namespace entry from dictionary
DictionaryNamespace nsEntry = this.lookupNamespace(namespace);
// Only continue if namespace entry exists, otherwise return null
if(nsEntry != null) {
// Read the first element
DictionarySimpleElement eEntry = nsEntry.lookupElement(elements[0]);
// Iterate over the remaining elements
for(int i=1; i<elements.length; i++) {
// If previous entry was a complex element, read the next and continue. Otherwise return null.
if(eEntry instanceof DictionaryComplexElement) {
DictionaryComplexElement complexEntry = (DictionaryComplexElement) eEntry;
eEntry = complexEntry.lookupNestedElement(elements[i]);
}
else {
return null;
}
}
// Return the last element entry
// Will be the target element if it exists in the dictionary. Otherwise it is null.
return eEntry;
}
else {
return null;
}
}
else {
throw new DictionaryPathException("Cannot evaluate dictionary path: " + path);
}
}
/**
* Evaluate a dictionary path and return the dictionary entry for the target attribute.
* If the target attribute or any element on the path (including namespace) does not exist in the dictionary,
* this method will return null.
*
* @param path Dictionary path to evaluate
* @return Dictionary element of target attribute or null
* @throws DictionaryPathException if the provided path cannot be evaluated
*/
public DictionarySimpleAttribute findAttributeByPath(final String path) throws DictionaryPathException {
DictionarySimpleElement targetElement = this.findElementByPath(path);
if(targetElement != null) {
Matcher m = Dictionary.FULL_PATH_PATTERN.matcher(path);
// Test if provided path is valid (it should be at this point) and fill groups
if(m.matches()) {
String attribute = m.group(3);
if(attribute != null) {
return targetElement.lookupAttribute(attribute);
}
else {
throw new DictionaryPathException("Path specifies no target attribute: " + path);
}
}
}
return null;
}
/**
* Return an immutable view of the namespaces map for testing purposes.
*
* @return Immutable map view
*/
BidiMap<String, DictionaryNamespace> getUnmodifiableNamespaces() {
return UnmodifiableBidiMap.unmodifiableBidiMap(this.namespaces);
}
}
| |
/*
* Copyright (c) 2008-2015 Citrix Systems, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.citrix.netscaler.nitro.resource.config.vpn;
import com.citrix.netscaler.nitro.resource.base.*;
import com.citrix.netscaler.nitro.service.nitro_service;
import com.citrix.netscaler.nitro.service.options;
import com.citrix.netscaler.nitro.util.*;
import com.citrix.netscaler.nitro.exception.nitro_exception;
class vpnvserver_auditnslogpolicy_binding_response extends base_response
{
public vpnvserver_auditnslogpolicy_binding[] vpnvserver_auditnslogpolicy_binding;
}
/**
* Binding class showing the auditnslogpolicy that can be bound to vpnvserver.
*/
public class vpnvserver_auditnslogpolicy_binding extends base_resource
{
private String policy;
private Long priority;
private Long acttype;
private String name;
private Boolean secondary;
private Boolean groupextraction;
private String gotopriorityexpression;
private String bindpoint;
private Long __count;
/**
* <pre>
* The priority, if any, of the vpn vserver policy.
* </pre>
*/
public void set_priority(long priority) throws Exception {
this.priority = new Long(priority);
}
/**
* <pre>
* The priority, if any, of the vpn vserver policy.
* </pre>
*/
public void set_priority(Long priority) throws Exception{
this.priority = priority;
}
/**
* <pre>
* The priority, if any, of the vpn vserver policy.
* </pre>
*/
public Long get_priority() throws Exception {
return this.priority;
}
/**
* <pre>
* Expression or other value specifying the next policy to evaluate if the current policy evaluates to TRUE. Specify one of the following values:
* NEXT - Evaluate the policy with the next higher priority number.
* END - End policy evaluation.
* USE_INVOCATION_RESULT - Applicable if this policy invokes another policy label. If the final goto in the invoked policy label has a value of END, the evaluation stops. If the final goto is anything other than END, the current policy label performs a NEXT.
* A default syntax or classic expression that evaluates to a number.
If you specify an expression, the number to which it evaluates determines the next policy to evaluate, as follows:
* If the expression evaluates to a higher numbered priority, the policy with that priority is evaluated next.
* If the expression evaluates to the priority of the current policy, the policy with the next higher numbered priority is evaluated next.
* If the expression evaluates to a number that is larger than the largest numbered priority, policy evaluation ends.
An UNDEF event is triggered if:
* The expression is invalid.
* The expression evaluates to a priority number that is numerically lower than the current policy's priority.
* The expression evaluates to a priority number that is between the current policy's priority number (say, 30) and the highest priority number (say, 100), but does not match any configured priority number (for example, the expression evaluates to the number 85). This example assumes that the priority number increments by 10 for every successive policy, and therefore a priority number of 85 does not exist in the policy label.
* </pre>
*/
public void set_gotopriorityexpression(String gotopriorityexpression) throws Exception{
this.gotopriorityexpression = gotopriorityexpression;
}
/**
* <pre>
* Expression or other value specifying the next policy to evaluate if the current policy evaluates to TRUE. Specify one of the following values:
* NEXT - Evaluate the policy with the next higher priority number.
* END - End policy evaluation.
* USE_INVOCATION_RESULT - Applicable if this policy invokes another policy label. If the final goto in the invoked policy label has a value of END, the evaluation stops. If the final goto is anything other than END, the current policy label performs a NEXT.
* A default syntax or classic expression that evaluates to a number.
If you specify an expression, the number to which it evaluates determines the next policy to evaluate, as follows:
* If the expression evaluates to a higher numbered priority, the policy with that priority is evaluated next.
* If the expression evaluates to the priority of the current policy, the policy with the next higher numbered priority is evaluated next.
* If the expression evaluates to a number that is larger than the largest numbered priority, policy evaluation ends.
An UNDEF event is triggered if:
* The expression is invalid.
* The expression evaluates to a priority number that is numerically lower than the current policy's priority.
* The expression evaluates to a priority number that is between the current policy's priority number (say, 30) and the highest priority number (say, 100), but does not match any configured priority number (for example, the expression evaluates to the number 85). This example assumes that the priority number increments by 10 for every successive policy, and therefore a priority number of 85 does not exist in the policy label.
* </pre>
*/
public String get_gotopriorityexpression() throws Exception {
return this.gotopriorityexpression;
}
/**
* <pre>
* The name of the policy, if any, bound to the vpn vserver.
* </pre>
*/
public void set_policy(String policy) throws Exception{
this.policy = policy;
}
/**
* <pre>
* The name of the policy, if any, bound to the vpn vserver.
* </pre>
*/
public String get_policy() throws Exception {
return this.policy;
}
/**
* <pre>
* Bind the Authentication policy to a tertiary chain which will be used only for group extraction. The user will not authenticate against this server, and this will only be called if primary and/or secondary authentication has succeeded.
* </pre>
*/
public void set_groupextraction(boolean groupextraction) throws Exception {
this.groupextraction = new Boolean(groupextraction);
}
/**
* <pre>
* Bind the Authentication policy to a tertiary chain which will be used only for group extraction. The user will not authenticate against this server, and this will only be called if primary and/or secondary authentication has succeeded.
* </pre>
*/
public void set_groupextraction(Boolean groupextraction) throws Exception{
this.groupextraction = groupextraction;
}
/**
* <pre>
* Bind the Authentication policy to a tertiary chain which will be used only for group extraction. The user will not authenticate against this server, and this will only be called if primary and/or secondary authentication has succeeded.
* </pre>
*/
public Boolean get_groupextraction() throws Exception {
return this.groupextraction;
}
/**
* <pre>
* Name of the virtual server.<br> Minimum length = 1
* </pre>
*/
public void set_name(String name) throws Exception{
this.name = name;
}
/**
* <pre>
* Name of the virtual server.<br> Minimum length = 1
* </pre>
*/
public String get_name() throws Exception {
return this.name;
}
/**
* <pre>
* Bind the authentication policy as the secondary policy to use in a two-factor configuration. A user must then authenticate not only via a primary authentication method but also via a secondary authentication method. User groups are aggregated across both. The user name must be exactly the same for both authentication methods, but they can require different passwords.
* </pre>
*/
public void set_secondary(boolean secondary) throws Exception {
this.secondary = new Boolean(secondary);
}
/**
* <pre>
* Bind the authentication policy as the secondary policy to use in a two-factor configuration. A user must then authenticate not only via a primary authentication method but also via a secondary authentication method. User groups are aggregated across both. The user name must be exactly the same for both authentication methods, but they can require different passwords.
* </pre>
*/
public void set_secondary(Boolean secondary) throws Exception{
this.secondary = secondary;
}
/**
* <pre>
* Bind the authentication policy as the secondary policy to use in a two-factor configuration. A user must then authenticate not only via a primary authentication method but also via a secondary authentication method. User groups are aggregated across both. The user name must be exactly the same for both authentication methods, but they can require different passwords.
* </pre>
*/
public Boolean get_secondary() throws Exception {
return this.secondary;
}
/**
* <pre>
* Bind point to which to bind the policy. Applies only to rewrite and cache policies. If you do not set this parameter, the policy is bound to REQ_DEFAULT or RES_DEFAULT, depending on whether the policy rule is a response-time or a request-time expression.<br> Possible values = REQUEST, RESPONSE, ICA_REQUEST, OTHERTCP_REQUEST
* </pre>
*/
public void set_bindpoint(String bindpoint) throws Exception{
this.bindpoint = bindpoint;
}
/**
* <pre>
* Bind point to which to bind the policy. Applies only to rewrite and cache policies. If you do not set this parameter, the policy is bound to REQ_DEFAULT or RES_DEFAULT, depending on whether the policy rule is a response-time or a request-time expression.<br> Possible values = REQUEST, RESPONSE, ICA_REQUEST, OTHERTCP_REQUEST
* </pre>
*/
public String get_bindpoint() throws Exception {
return this.bindpoint;
}
/**
* <pre>
* .
* </pre>
*/
public Long get_acttype() throws Exception {
return this.acttype;
}
/**
* <pre>
* converts nitro response into object and returns the object array in case of get request.
* </pre>
*/
protected base_resource[] get_nitro_response(nitro_service service, String response) throws Exception{
vpnvserver_auditnslogpolicy_binding_response result = (vpnvserver_auditnslogpolicy_binding_response) service.get_payload_formatter().string_to_resource(vpnvserver_auditnslogpolicy_binding_response.class, response);
if(result.errorcode != 0) {
if (result.errorcode == 444) {
service.clear_session();
}
if(result.severity != null)
{
if (result.severity.equals("ERROR"))
throw new nitro_exception(result.message,result.errorcode);
}
else
{
throw new nitro_exception(result.message,result.errorcode);
}
}
return result.vpnvserver_auditnslogpolicy_binding;
}
/**
* <pre>
* Returns the value of object identifier argument
* </pre>
*/
protected String get_object_name() {
return this.name;
}
public static base_response add(nitro_service client, vpnvserver_auditnslogpolicy_binding resource) throws Exception {
vpnvserver_auditnslogpolicy_binding updateresource = new vpnvserver_auditnslogpolicy_binding();
updateresource.name = resource.name;
updateresource.policy = resource.policy;
updateresource.priority = resource.priority;
updateresource.secondary = resource.secondary;
updateresource.groupextraction = resource.groupextraction;
updateresource.gotopriorityexpression = resource.gotopriorityexpression;
updateresource.bindpoint = resource.bindpoint;
return updateresource.update_resource(client);
}
public static base_responses add(nitro_service client, vpnvserver_auditnslogpolicy_binding resources[]) throws Exception {
base_responses result = null;
if (resources != null && resources.length > 0) {
vpnvserver_auditnslogpolicy_binding updateresources[] = new vpnvserver_auditnslogpolicy_binding[resources.length];
for (int i=0;i<resources.length;i++){
updateresources[i] = new vpnvserver_auditnslogpolicy_binding();
updateresources[i].name = resources[i].name;
updateresources[i].policy = resources[i].policy;
updateresources[i].priority = resources[i].priority;
updateresources[i].secondary = resources[i].secondary;
updateresources[i].groupextraction = resources[i].groupextraction;
updateresources[i].gotopriorityexpression = resources[i].gotopriorityexpression;
updateresources[i].bindpoint = resources[i].bindpoint;
}
result = update_bulk_request(client, updateresources);
}
return result;
}
public static base_response delete(nitro_service client, vpnvserver_auditnslogpolicy_binding resource) throws Exception {
vpnvserver_auditnslogpolicy_binding deleteresource = new vpnvserver_auditnslogpolicy_binding();
deleteresource.name = resource.name;
deleteresource.policy = resource.policy;
deleteresource.secondary = resource.secondary;
deleteresource.groupextraction = resource.groupextraction;
deleteresource.bindpoint = resource.bindpoint;
return deleteresource.delete_resource(client);
}
public static base_responses delete(nitro_service client, vpnvserver_auditnslogpolicy_binding resources[]) throws Exception {
base_responses result = null;
if (resources != null && resources.length > 0) {
vpnvserver_auditnslogpolicy_binding deleteresources[] = new vpnvserver_auditnslogpolicy_binding[resources.length];
for (int i=0;i<resources.length;i++){
deleteresources[i] = new vpnvserver_auditnslogpolicy_binding();
deleteresources[i].name = resources[i].name;
deleteresources[i].policy = resources[i].policy;
deleteresources[i].secondary = resources[i].secondary;
deleteresources[i].groupextraction = resources[i].groupextraction;
deleteresources[i].bindpoint = resources[i].bindpoint;
}
result = delete_bulk_request(client, deleteresources);
}
return result;
}
/**
* Use this API to fetch vpnvserver_auditnslogpolicy_binding resources of given name .
*/
public static vpnvserver_auditnslogpolicy_binding[] get(nitro_service service, String name) throws Exception{
vpnvserver_auditnslogpolicy_binding obj = new vpnvserver_auditnslogpolicy_binding();
obj.set_name(name);
vpnvserver_auditnslogpolicy_binding response[] = (vpnvserver_auditnslogpolicy_binding[]) obj.get_resources(service);
return response;
}
/**
* Use this API to fetch filtered set of vpnvserver_auditnslogpolicy_binding resources.
* filter string should be in JSON format.eg: "port:80,servicetype:HTTP".
*/
public static vpnvserver_auditnslogpolicy_binding[] get_filtered(nitro_service service, String name, String filter) throws Exception{
vpnvserver_auditnslogpolicy_binding obj = new vpnvserver_auditnslogpolicy_binding();
obj.set_name(name);
options option = new options();
option.set_filter(filter);
vpnvserver_auditnslogpolicy_binding[] response = (vpnvserver_auditnslogpolicy_binding[]) obj.getfiltered(service, option);
return response;
}
/**
* Use this API to fetch filtered set of vpnvserver_auditnslogpolicy_binding resources.
* set the filter parameter values in filtervalue object.
*/
public static vpnvserver_auditnslogpolicy_binding[] get_filtered(nitro_service service, String name, filtervalue[] filter) throws Exception{
vpnvserver_auditnslogpolicy_binding obj = new vpnvserver_auditnslogpolicy_binding();
obj.set_name(name);
options option = new options();
option.set_filter(filter);
vpnvserver_auditnslogpolicy_binding[] response = (vpnvserver_auditnslogpolicy_binding[]) obj.getfiltered(service, option);
return response;
}
/**
* Use this API to count vpnvserver_auditnslogpolicy_binding resources configued on NetScaler.
*/
public static long count(nitro_service service, String name) throws Exception{
vpnvserver_auditnslogpolicy_binding obj = new vpnvserver_auditnslogpolicy_binding();
obj.set_name(name);
options option = new options();
option.set_count(true);
vpnvserver_auditnslogpolicy_binding response[] = (vpnvserver_auditnslogpolicy_binding[]) obj.get_resources(service,option);
if (response != null) {
return response[0].__count;
}
return 0;
}
/**
* Use this API to count the filtered set of vpnvserver_auditnslogpolicy_binding resources.
* filter string should be in JSON format.eg: "port:80,servicetype:HTTP".
*/
public static long count_filtered(nitro_service service, String name, String filter) throws Exception{
vpnvserver_auditnslogpolicy_binding obj = new vpnvserver_auditnslogpolicy_binding();
obj.set_name(name);
options option = new options();
option.set_count(true);
option.set_filter(filter);
vpnvserver_auditnslogpolicy_binding[] response = (vpnvserver_auditnslogpolicy_binding[]) obj.getfiltered(service, option);
if (response != null) {
return response[0].__count;
}
return 0;
}
/**
* Use this API to count the filtered set of vpnvserver_auditnslogpolicy_binding resources.
* set the filter parameter values in filtervalue object.
*/
public static long count_filtered(nitro_service service, String name, filtervalue[] filter) throws Exception{
vpnvserver_auditnslogpolicy_binding obj = new vpnvserver_auditnslogpolicy_binding();
obj.set_name(name);
options option = new options();
option.set_count(true);
option.set_filter(filter);
vpnvserver_auditnslogpolicy_binding[] response = (vpnvserver_auditnslogpolicy_binding[]) obj.getfiltered(service, option);
if (response != null) {
return response[0].__count;
}
return 0;
}
public static class bindpointEnum {
public static final String REQUEST = "REQUEST";
public static final String RESPONSE = "RESPONSE";
public static final String ICA_REQUEST = "ICA_REQUEST";
public static final String OTHERTCP_REQUEST = "OTHERTCP_REQUEST";
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.streaming.connectors.kinesis.internals;
import org.apache.flink.annotation.Internal;
import org.apache.flink.annotation.VisibleForTesting;
import org.apache.flink.api.common.functions.RuntimeContext;
import org.apache.flink.metrics.MetricGroup;
import org.apache.flink.streaming.api.functions.AssignerWithPeriodicWatermarks;
import org.apache.flink.streaming.api.functions.source.SourceFunction;
import org.apache.flink.streaming.api.operators.StreamingRuntimeContext;
import org.apache.flink.streaming.api.watermark.Watermark;
import org.apache.flink.streaming.connectors.kinesis.KinesisShardAssigner;
import org.apache.flink.streaming.connectors.kinesis.config.ConsumerConfigConstants;
import org.apache.flink.streaming.connectors.kinesis.metrics.KinesisConsumerMetricConstants;
import org.apache.flink.streaming.connectors.kinesis.metrics.ShardMetricsReporter;
import org.apache.flink.streaming.connectors.kinesis.model.KinesisStreamShardState;
import org.apache.flink.streaming.connectors.kinesis.model.SentinelSequenceNumber;
import org.apache.flink.streaming.connectors.kinesis.model.SequenceNumber;
import org.apache.flink.streaming.connectors.kinesis.model.StreamShardHandle;
import org.apache.flink.streaming.connectors.kinesis.model.StreamShardMetadata;
import org.apache.flink.streaming.connectors.kinesis.proxy.GetShardListResult;
import org.apache.flink.streaming.connectors.kinesis.proxy.KinesisProxy;
import org.apache.flink.streaming.connectors.kinesis.proxy.KinesisProxyInterface;
import org.apache.flink.streaming.connectors.kinesis.serialization.KinesisDeserializationSchema;
import org.apache.flink.streaming.connectors.kinesis.util.RecordEmitter;
import org.apache.flink.streaming.connectors.kinesis.util.WatermarkTracker;
import org.apache.flink.streaming.runtime.operators.windowing.TimestampedValue;
import org.apache.flink.streaming.runtime.tasks.ProcessingTimeCallback;
import org.apache.flink.streaming.runtime.tasks.ProcessingTimeService;
import org.apache.flink.util.InstantiationUtil;
import org.apache.flink.util.Preconditions;
import com.amazonaws.services.kinesis.model.HashKeyRange;
import com.amazonaws.services.kinesis.model.SequenceNumberRange;
import com.amazonaws.services.kinesis.model.Shard;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.ThreadFactory;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.atomic.AtomicReference;
import static org.apache.flink.util.Preconditions.checkNotNull;
/**
* A KinesisDataFetcher is responsible for fetching data from multiple Kinesis shards. Each parallel subtask instantiates
* and runs a single fetcher throughout the subtask's lifetime. The fetcher accomplishes the following:
* <ul>
* <li>1. continuously poll Kinesis to discover shards that the subtask should subscribe to. The subscribed subset
* of shards, including future new shards, is non-overlapping across subtasks (no two subtasks will be
* subscribed to the same shard) and determinate across subtask restores (the subtask will always subscribe
* to the same subset of shards even after restoring)</li>
* <li>2. decide where in each discovered shard should the fetcher start subscribing to</li>
* <li>3. subscribe to shards by creating a single thread for each shard</li>
* </ul>
*
* <p>The fetcher manages two states: 1) last seen shard ids of each subscribed stream (used for continuous shard discovery),
* and 2) last processed sequence numbers of each subscribed shard. Since operations on the second state will be performed
* by multiple threads, these operations should only be done using the handler methods provided in this class.
*/
@Internal
public class KinesisDataFetcher<T> {
public static final KinesisShardAssigner DEFAULT_SHARD_ASSIGNER = (shard, subtasks) -> shard.hashCode();
private static final Logger LOG = LoggerFactory.getLogger(KinesisDataFetcher.class);
// ------------------------------------------------------------------------
// Consumer-wide settings
// ------------------------------------------------------------------------
/** Configuration properties for the Flink Kinesis Consumer. */
private final Properties configProps;
/** The list of Kinesis streams that the consumer is subscribing to. */
private final List<String> streams;
/**
* The deserialization schema we will be using to convert Kinesis records to Flink objects.
* Note that since this might not be thread-safe, {@link ShardConsumer}s using this must
* clone a copy using {@link KinesisDataFetcher#getClonedDeserializationSchema()}.
*/
private final KinesisDeserializationSchema<T> deserializationSchema;
/**
* The function that determines which subtask a shard should be assigned to.
*/
private final KinesisShardAssigner shardAssigner;
// ------------------------------------------------------------------------
// Consumer metrics
// ------------------------------------------------------------------------
/** The metric group that all metrics should be registered to. */
private final MetricGroup consumerMetricGroup;
// ------------------------------------------------------------------------
// Subtask-specific settings
// ------------------------------------------------------------------------
/** Runtime context of the subtask that this fetcher was created in. */
private final RuntimeContext runtimeContext;
private final int totalNumberOfConsumerSubtasks;
private final int indexOfThisConsumerSubtask;
// ------------------------------------------------------------------------
// Executor services to run created threads
// ------------------------------------------------------------------------
/** Executor service to run {@link ShardConsumer}s to consume Kinesis shards. */
private final ExecutorService shardConsumersExecutor;
// ------------------------------------------------------------------------
// Managed state, accessed and updated across multiple threads
// ------------------------------------------------------------------------
/** The last discovered shard ids of each subscribed stream, updated as the fetcher discovers new shards in.
* Note: this state will be updated if new shards are found when {@link KinesisDataFetcher#discoverNewShardsToSubscribe()} is called.
*/
private final Map<String, String> subscribedStreamsToLastDiscoveredShardIds;
/**
* The shards, along with their last processed sequence numbers, that this fetcher is subscribed to. The fetcher
* will add new subscribed shard states to this list as it discovers new shards. {@link ShardConsumer} threads update
* the last processed sequence number of subscribed shards as they fetch and process records.
*
* <p>Note that since multiple {@link ShardConsumer} threads will be performing operations on this list, all operations
* must be wrapped in synchronized blocks on the {@link KinesisDataFetcher#checkpointLock} lock. For this purpose,
* all threads must use the following thread-safe methods this class provides to operate on this list:
* <ul>
* <li>{@link KinesisDataFetcher#registerNewSubscribedShardState(KinesisStreamShardState)}</li>
* <li>{@link KinesisDataFetcher#updateState(int, SequenceNumber)}</li>
* <li>{@link KinesisDataFetcher#emitRecordAndUpdateState(T, long, int, SequenceNumber)}</li>
* </ul>
*/
private final List<KinesisStreamShardState> subscribedShardsState;
private final SourceFunction.SourceContext<T> sourceContext;
/** Checkpoint lock, also used to synchronize operations on subscribedShardsState. */
private final Object checkpointLock;
/** Reference to the first error thrown by any of the {@link ShardConsumer} threads. */
private final AtomicReference<Throwable> error;
/** The Kinesis proxy factory that will be used to create instances for discovery and shard consumers. */
private final FlinkKinesisProxyFactory kinesisProxyFactory;
/** The Kinesis proxy that the fetcher will be using to discover new shards. */
private final KinesisProxyInterface kinesis;
/** Thread that executed runFetcher(). */
private volatile Thread mainThread;
/**
* The current number of shards that are actively read by this fetcher.
*
* <p>This value is updated in {@link KinesisDataFetcher#registerNewSubscribedShardState(KinesisStreamShardState)},
* and {@link KinesisDataFetcher#updateState(int, SequenceNumber)}.
*/
private final AtomicInteger numberOfActiveShards = new AtomicInteger(0);
private volatile boolean running = true;
private final AssignerWithPeriodicWatermarks<T> periodicWatermarkAssigner;
private final WatermarkTracker watermarkTracker;
private final transient RecordEmitter recordEmitter;
private transient boolean isIdle;
/**
* The watermark related state for each shard consumer. Entries in this map will be created when shards
* are discovered. After recovery, this shard map will be recreated, possibly with different shard index keys,
* since those are transient and not part of checkpointed state.
*/
private ConcurrentHashMap<Integer, ShardWatermarkState> shardWatermarks = new ConcurrentHashMap<>();
/**
* The most recent watermark, calculated from the per shard watermarks. The initial value will never be emitted and
* also apply after recovery. The fist watermark that will be emitted is derived from actually consumed records.
* In case of recovery and replay, the watermark will rewind, consistent wth the shard consumer sequence.
*/
private long lastWatermark = Long.MIN_VALUE;
/**
* The next watermark used for synchronization.
* For purposes of global watermark calculation, we need to consider the next watermark based
* on the buffered records vs. the last emitted watermark to allow for progress.
*/
private long nextWatermark = Long.MIN_VALUE;
/**
* The time span since last consumed record, after which a shard will be considered idle for purpose of watermark
* calculation. A positive value will allow the watermark to progress even when some shards don't receive new records.
*/
private long shardIdleIntervalMillis = ConsumerConfigConstants.DEFAULT_SHARD_IDLE_INTERVAL_MILLIS;
/**
* Factory to create Kinesis proxy instances used by a fetcher.
*/
public interface FlinkKinesisProxyFactory {
KinesisProxyInterface create(Properties configProps);
}
/**
* The wrapper that holds the watermark handling related parameters
* of a record produced by the shard consumer thread.
*
* @param <T>
*/
private static class RecordWrapper<T> extends TimestampedValue<T> {
int shardStateIndex;
SequenceNumber lastSequenceNumber;
long timestamp;
Watermark watermark;
private RecordWrapper(T record, long timestamp) {
super(record, timestamp);
this.timestamp = timestamp;
}
@Override
public long getTimestamp() {
return timestamp;
}
}
/** Kinesis data fetcher specific, asynchronous record emitter. */
private class AsyncKinesisRecordEmitter extends RecordEmitter<RecordWrapper<T>> {
private AsyncKinesisRecordEmitter() {
this(DEFAULT_QUEUE_CAPACITY);
}
private AsyncKinesisRecordEmitter(int queueCapacity) {
super(queueCapacity);
}
@Override
public void emit(RecordWrapper<T> record, RecordQueue<RecordWrapper<T>> queue) {
emitRecordAndUpdateState(record);
ShardWatermarkState<T> sws = shardWatermarks.get(queue.getQueueId());
sws.lastEmittedRecordWatermark = record.watermark;
}
}
/** Synchronous emitter for use w/o watermark synchronization. */
private class SyncKinesisRecordEmitter extends AsyncKinesisRecordEmitter {
private final ConcurrentHashMap<Integer, RecordQueue<RecordWrapper<T>>> queues =
new ConcurrentHashMap<>();
@Override
public RecordQueue<RecordWrapper<T>> getQueue(int producerIndex) {
return queues.computeIfAbsent(producerIndex, (key) -> {
return new RecordQueue<RecordWrapper<T>>() {
@Override
public void put(RecordWrapper<T> record) {
emit(record, this);
}
@Override
public int getQueueId() {
return producerIndex;
}
@Override
public int getSize() {
return 0;
}
@Override
public RecordWrapper<T> peek() {
return null;
}
};
});
}
}
/**
* Creates a Kinesis Data Fetcher.
*
* @param streams the streams to subscribe to
* @param sourceContext context of the source function
* @param runtimeContext this subtask's runtime context
* @param configProps the consumer configuration properties
* @param deserializationSchema deserialization schema
*/
public KinesisDataFetcher(List<String> streams,
SourceFunction.SourceContext<T> sourceContext,
RuntimeContext runtimeContext,
Properties configProps,
KinesisDeserializationSchema<T> deserializationSchema,
KinesisShardAssigner shardAssigner,
AssignerWithPeriodicWatermarks<T> periodicWatermarkAssigner,
WatermarkTracker watermarkTracker) {
this(streams,
sourceContext,
sourceContext.getCheckpointLock(),
runtimeContext,
configProps,
deserializationSchema,
shardAssigner,
periodicWatermarkAssigner,
watermarkTracker,
new AtomicReference<>(),
new ArrayList<>(),
createInitialSubscribedStreamsToLastDiscoveredShardsState(streams),
KinesisProxy::create);
}
@VisibleForTesting
protected KinesisDataFetcher(List<String> streams,
SourceFunction.SourceContext<T> sourceContext,
Object checkpointLock,
RuntimeContext runtimeContext,
Properties configProps,
KinesisDeserializationSchema<T> deserializationSchema,
KinesisShardAssigner shardAssigner,
AssignerWithPeriodicWatermarks<T> periodicWatermarkAssigner,
WatermarkTracker watermarkTracker,
AtomicReference<Throwable> error,
List<KinesisStreamShardState> subscribedShardsState,
HashMap<String, String> subscribedStreamsToLastDiscoveredShardIds,
FlinkKinesisProxyFactory kinesisProxyFactory) {
this.streams = checkNotNull(streams);
this.configProps = checkNotNull(configProps);
this.sourceContext = checkNotNull(sourceContext);
this.checkpointLock = checkNotNull(checkpointLock);
this.runtimeContext = checkNotNull(runtimeContext);
this.totalNumberOfConsumerSubtasks = runtimeContext.getNumberOfParallelSubtasks();
this.indexOfThisConsumerSubtask = runtimeContext.getIndexOfThisSubtask();
this.deserializationSchema = checkNotNull(deserializationSchema);
this.shardAssigner = checkNotNull(shardAssigner);
this.periodicWatermarkAssigner = periodicWatermarkAssigner;
this.watermarkTracker = watermarkTracker;
this.kinesisProxyFactory = checkNotNull(kinesisProxyFactory);
this.kinesis = kinesisProxyFactory.create(configProps);
this.consumerMetricGroup = runtimeContext.getMetricGroup()
.addGroup(KinesisConsumerMetricConstants.KINESIS_CONSUMER_METRICS_GROUP);
this.error = checkNotNull(error);
this.subscribedShardsState = checkNotNull(subscribedShardsState);
this.subscribedStreamsToLastDiscoveredShardIds = checkNotNull(subscribedStreamsToLastDiscoveredShardIds);
this.shardConsumersExecutor =
createShardConsumersThreadPool(runtimeContext.getTaskNameWithSubtasks());
this.recordEmitter = createRecordEmitter(configProps);
}
private RecordEmitter createRecordEmitter(Properties configProps) {
if (periodicWatermarkAssigner != null && watermarkTracker != null) {
int queueCapacity = Integer.parseInt(configProps.getProperty(
ConsumerConfigConstants.WATERMARK_SYNC_QUEUE_CAPACITY,
Integer.toString(AsyncKinesisRecordEmitter.DEFAULT_QUEUE_CAPACITY)));
return new AsyncKinesisRecordEmitter(queueCapacity);
}
return new SyncKinesisRecordEmitter();
}
/**
* Create a new shard consumer.
* Override this method to customize shard consumer behavior in subclasses.
* @param subscribedShardStateIndex the state index of the shard this consumer is subscribed to
* @param subscribedShard the shard this consumer is subscribed to
* @param lastSequenceNum the sequence number in the shard to start consuming
* @param shardMetricsReporter the reporter to report metrics to
* @return shard consumer
*/
protected ShardConsumer createShardConsumer(
Integer subscribedShardStateIndex,
StreamShardHandle subscribedShard,
SequenceNumber lastSequenceNum,
ShardMetricsReporter shardMetricsReporter) {
return new ShardConsumer<>(
this,
subscribedShardStateIndex,
subscribedShard,
lastSequenceNum,
this.kinesisProxyFactory.create(configProps),
shardMetricsReporter);
}
/**
* Starts the fetcher. After starting the fetcher, it can only
* be stopped by calling {@link KinesisDataFetcher#shutdownFetcher()}.
*
* @throws Exception the first error or exception thrown by the fetcher or any of the threads created by the fetcher.
*/
public void runFetcher() throws Exception {
// check that we are running before proceeding
if (!running) {
return;
}
this.mainThread = Thread.currentThread();
// ------------------------------------------------------------------------
// Procedures before starting the infinite while loop:
// ------------------------------------------------------------------------
// 1. check that there is at least one shard in the subscribed streams to consume from (can be done by
// checking if at least one value in subscribedStreamsToLastDiscoveredShardIds is not null)
boolean hasShards = false;
StringBuilder streamsWithNoShardsFound = new StringBuilder();
for (Map.Entry<String, String> streamToLastDiscoveredShardEntry : subscribedStreamsToLastDiscoveredShardIds.entrySet()) {
if (streamToLastDiscoveredShardEntry.getValue() != null) {
hasShards = true;
} else {
streamsWithNoShardsFound.append(streamToLastDiscoveredShardEntry.getKey()).append(", ");
}
}
if (streamsWithNoShardsFound.length() != 0 && LOG.isWarnEnabled()) {
LOG.warn("Subtask {} has failed to find any shards for the following subscribed streams: {}",
indexOfThisConsumerSubtask, streamsWithNoShardsFound.toString());
}
if (!hasShards) {
throw new RuntimeException("No shards can be found for all subscribed streams: " + streams);
}
// 2. start consuming any shard state we already have in the subscribedShardState up to this point; the
// subscribedShardState may already be seeded with values due to step 1., or explicitly added by the
// consumer using a restored state checkpoint
for (int seededStateIndex = 0; seededStateIndex < subscribedShardsState.size(); seededStateIndex++) {
KinesisStreamShardState seededShardState = subscribedShardsState.get(seededStateIndex);
// only start a consuming thread if the seeded subscribed shard has not been completely read already
if (!seededShardState.getLastProcessedSequenceNum().equals(SentinelSequenceNumber.SENTINEL_SHARD_ENDING_SEQUENCE_NUM.get())) {
if (LOG.isInfoEnabled()) {
LOG.info("Subtask {} will start consuming seeded shard {} from sequence number {} with ShardConsumer {}",
indexOfThisConsumerSubtask, seededShardState.getStreamShardHandle().toString(),
seededShardState.getLastProcessedSequenceNum(), seededStateIndex);
}
shardConsumersExecutor.submit(
createShardConsumer(
seededStateIndex,
subscribedShardsState.get(seededStateIndex).getStreamShardHandle(),
subscribedShardsState.get(seededStateIndex).getLastProcessedSequenceNum(),
registerShardMetrics(consumerMetricGroup, subscribedShardsState.get(seededStateIndex))));
}
}
// start periodic watermark emitter, if a watermark assigner was configured
if (periodicWatermarkAssigner != null) {
long periodicWatermarkIntervalMillis = runtimeContext.getExecutionConfig().getAutoWatermarkInterval();
if (periodicWatermarkIntervalMillis > 0) {
ProcessingTimeService timerService = ((StreamingRuntimeContext) runtimeContext).getProcessingTimeService();
LOG.info("Starting periodic watermark emitter with interval {}", periodicWatermarkIntervalMillis);
new PeriodicWatermarkEmitter(timerService, periodicWatermarkIntervalMillis).start();
if (watermarkTracker != null) {
// setup global watermark tracking
long watermarkSyncMillis = Long.parseLong(
getConsumerConfiguration().getProperty(ConsumerConfigConstants.WATERMARK_SYNC_MILLIS,
Long.toString(ConsumerConfigConstants.DEFAULT_WATERMARK_SYNC_MILLIS)));
watermarkTracker.setUpdateTimeoutMillis(watermarkSyncMillis * 3); // synchronization latency
watermarkTracker.open(runtimeContext);
new WatermarkSyncCallback(timerService, watermarkSyncMillis).start();
// emit records ahead of watermark to offset synchronization latency
long lookaheadMillis = Long.parseLong(
getConsumerConfiguration().getProperty(ConsumerConfigConstants.WATERMARK_LOOKAHEAD_MILLIS,
Long.toString(0)));
recordEmitter.setMaxLookaheadMillis(Math.max(lookaheadMillis, watermarkSyncMillis * 3));
}
}
this.shardIdleIntervalMillis = Long.parseLong(
getConsumerConfiguration().getProperty(ConsumerConfigConstants.SHARD_IDLE_INTERVAL_MILLIS,
Long.toString(ConsumerConfigConstants.DEFAULT_SHARD_IDLE_INTERVAL_MILLIS)));
// run record emitter in separate thread since main thread is used for discovery
Thread thread = new Thread(this.recordEmitter);
thread.setName("recordEmitter-" + runtimeContext.getTaskNameWithSubtasks());
thread.setDaemon(true);
thread.start();
}
// ------------------------------------------------------------------------
// finally, start the infinite shard discovery and consumer launching loop;
// we will escape from this loop only when shutdownFetcher() or stopWithError() is called
// TODO: have this thread emit the records for tracking backpressure
final long discoveryIntervalMillis = Long.valueOf(
configProps.getProperty(
ConsumerConfigConstants.SHARD_DISCOVERY_INTERVAL_MILLIS,
Long.toString(ConsumerConfigConstants.DEFAULT_SHARD_DISCOVERY_INTERVAL_MILLIS)));
if (this.numberOfActiveShards.get() == 0) {
LOG.info("Subtask {} has no active shards to read on startup; marking the subtask as temporarily idle ...",
indexOfThisConsumerSubtask);
sourceContext.markAsTemporarilyIdle();
}
while (running) {
if (LOG.isDebugEnabled()) {
LOG.debug("Subtask {} is trying to discover new shards that were created due to resharding ...",
indexOfThisConsumerSubtask);
}
List<StreamShardHandle> newShardsDueToResharding = discoverNewShardsToSubscribe();
for (StreamShardHandle shard : newShardsDueToResharding) {
// since there may be delay in discovering a new shard, all new shards due to
// resharding should be read starting from the earliest record possible
KinesisStreamShardState newShardState =
new KinesisStreamShardState(convertToStreamShardMetadata(shard), shard, SentinelSequenceNumber.SENTINEL_EARLIEST_SEQUENCE_NUM.get());
int newStateIndex = registerNewSubscribedShardState(newShardState);
if (LOG.isInfoEnabled()) {
LOG.info("Subtask {} has discovered a new shard {} due to resharding, and will start consuming " +
"the shard from sequence number {} with ShardConsumer {}",
indexOfThisConsumerSubtask, newShardState.getStreamShardHandle().toString(),
newShardState.getLastProcessedSequenceNum(), newStateIndex);
}
shardConsumersExecutor.submit(
createShardConsumer(
newStateIndex,
newShardState.getStreamShardHandle(),
newShardState.getLastProcessedSequenceNum(),
registerShardMetrics(consumerMetricGroup, newShardState)));
}
// we also check if we are running here so that we won't start the discovery sleep
// interval if the running flag was set to false during the middle of the while loop
if (running && discoveryIntervalMillis != 0) {
try {
Thread.sleep(discoveryIntervalMillis);
} catch (InterruptedException iex) {
// the sleep may be interrupted by shutdownFetcher()
}
}
}
// make sure all resources have been terminated before leaving
try {
awaitTermination();
} catch (InterruptedException ie) {
// If there is an original exception, preserve it, since that's more important/useful.
this.error.compareAndSet(null, ie);
}
// any error thrown in the shard consumer threads will be thrown to the main thread
Throwable throwable = this.error.get();
if (throwable != null) {
if (throwable instanceof Exception) {
throw (Exception) throwable;
} else if (throwable instanceof Error) {
throw (Error) throwable;
} else {
throw new Exception(throwable);
}
}
}
/**
* Creates a snapshot of the current last processed sequence numbers of each subscribed shard.
*
* @return state snapshot
*/
public HashMap<StreamShardMetadata, SequenceNumber> snapshotState() {
// this method assumes that the checkpoint lock is held
assert Thread.holdsLock(checkpointLock);
HashMap<StreamShardMetadata, SequenceNumber> stateSnapshot = new HashMap<>();
for (KinesisStreamShardState shardWithState : subscribedShardsState) {
stateSnapshot.put(shardWithState.getStreamShardMetadata(), shardWithState.getLastProcessedSequenceNum());
}
return stateSnapshot;
}
/**
* Starts shutting down the fetcher. Must be called to allow {@link KinesisDataFetcher#runFetcher()} to complete.
* Once called, the shutdown procedure will be executed and all shard consuming threads will be interrupted.
*/
public void shutdownFetcher() {
running = false;
if (mainThread != null) {
mainThread.interrupt(); // the main thread may be sleeping for the discovery interval
}
if (watermarkTracker != null) {
watermarkTracker.close();
}
this.recordEmitter.stop();
if (LOG.isInfoEnabled()) {
LOG.info("Shutting down the shard consumer threads of subtask {} ...", indexOfThisConsumerSubtask);
}
shardConsumersExecutor.shutdownNow();
}
/** After calling {@link KinesisDataFetcher#shutdownFetcher()}, this can be called to await the fetcher shutdown. */
public void awaitTermination() throws InterruptedException {
while (!shardConsumersExecutor.awaitTermination(1, TimeUnit.MINUTES)) {
// Keep waiting.
}
}
/** Called by created threads to pass on errors. Only the first thrown error is set.
* Once set, the shutdown process will be executed and all shard consuming threads will be interrupted. */
protected void stopWithError(Throwable throwable) {
if (this.error.compareAndSet(null, throwable)) {
shutdownFetcher();
}
}
// ------------------------------------------------------------------------
// Functions that update the subscribedStreamToLastDiscoveredShardIds state
// ------------------------------------------------------------------------
/** Updates the last discovered shard of a subscribed stream; only updates if the update is valid. */
public void advanceLastDiscoveredShardOfStream(String stream, String shardId) {
String lastSeenShardIdOfStream = this.subscribedStreamsToLastDiscoveredShardIds.get(stream);
// the update is valid only if the given shard id is greater
// than the previous last seen shard id of the stream
if (lastSeenShardIdOfStream == null) {
// if not previously set, simply put as the last seen shard id
this.subscribedStreamsToLastDiscoveredShardIds.put(stream, shardId);
} else if (shouldAdvanceLastDiscoveredShardId(shardId, lastSeenShardIdOfStream)) {
this.subscribedStreamsToLastDiscoveredShardIds.put(stream, shardId);
}
}
/** Given lastSeenShardId, check if last discovered shardId should be advanced. */
protected boolean shouldAdvanceLastDiscoveredShardId(String shardId, String lastSeenShardIdOfStream) {
return (StreamShardHandle.compareShardIds(shardId, lastSeenShardIdOfStream) > 0);
}
/**
* A utility function that does the following:
*
* <p>1. Find new shards for each stream that we haven't seen before
* 2. For each new shard, determine whether this consumer subtask should subscribe to them;
* if yes, it is added to the returned list of shards
* 3. Update the subscribedStreamsToLastDiscoveredShardIds state so that we won't get shards
* that we have already seen before the next time this function is called
*/
public List<StreamShardHandle> discoverNewShardsToSubscribe() throws InterruptedException {
List<StreamShardHandle> newShardsToSubscribe = new LinkedList<>();
GetShardListResult shardListResult = kinesis.getShardList(subscribedStreamsToLastDiscoveredShardIds);
if (shardListResult.hasRetrievedShards()) {
Set<String> streamsWithNewShards = shardListResult.getStreamsWithRetrievedShards();
for (String stream : streamsWithNewShards) {
List<StreamShardHandle> newShardsOfStream = shardListResult.getRetrievedShardListOfStream(stream);
for (StreamShardHandle newShard : newShardsOfStream) {
int hashCode = shardAssigner.assign(newShard, totalNumberOfConsumerSubtasks);
if (isThisSubtaskShouldSubscribeTo(hashCode, totalNumberOfConsumerSubtasks, indexOfThisConsumerSubtask)) {
newShardsToSubscribe.add(newShard);
}
}
advanceLastDiscoveredShardOfStream(
stream, shardListResult.getLastSeenShardOfStream(stream).getShard().getShardId());
}
}
return newShardsToSubscribe;
}
// ------------------------------------------------------------------------
// Functions to get / set information about the consumer
// ------------------------------------------------------------------------
protected Properties getConsumerConfiguration() {
return configProps;
}
protected KinesisDeserializationSchema<T> getClonedDeserializationSchema() {
try {
return InstantiationUtil.clone(deserializationSchema, runtimeContext.getUserCodeClassLoader());
} catch (IOException | ClassNotFoundException ex) {
// this really shouldn't happen; simply wrap it around a runtime exception
throw new RuntimeException(ex);
}
}
// ------------------------------------------------------------------------
// Thread-safe operations for record emitting and shard state updating
// that assure atomicity with respect to the checkpoint lock
// ------------------------------------------------------------------------
/**
* Atomic operation to collect a record and update state to the sequence number of the record.
* This method is called by {@link ShardConsumer}s.
*
* @param record the record to collect
* @param recordTimestamp timestamp to attach to the collected record
* @param shardStateIndex index of the shard to update in subscribedShardsState;
* this index should be the returned value from
* {@link KinesisDataFetcher#registerNewSubscribedShardState(KinesisStreamShardState)}, called
* when the shard state was registered.
* @param lastSequenceNumber the last sequence number value to update
*/
protected void emitRecordAndUpdateState(T record, long recordTimestamp, int shardStateIndex, SequenceNumber lastSequenceNumber) {
ShardWatermarkState sws = shardWatermarks.get(shardStateIndex);
Preconditions.checkNotNull(
sws, "shard watermark state initialized in registerNewSubscribedShardState");
Watermark watermark = null;
if (sws.periodicWatermarkAssigner != null) {
recordTimestamp =
sws.periodicWatermarkAssigner.extractTimestamp(record, sws.lastRecordTimestamp);
// track watermark per record since extractTimestamp has side effect
watermark = sws.periodicWatermarkAssigner.getCurrentWatermark();
}
sws.lastRecordTimestamp = recordTimestamp;
sws.lastUpdated = getCurrentTimeMillis();
RecordWrapper<T> recordWrapper = new RecordWrapper<>(record, recordTimestamp);
recordWrapper.shardStateIndex = shardStateIndex;
recordWrapper.lastSequenceNumber = lastSequenceNumber;
recordWrapper.watermark = watermark;
try {
sws.emitQueue.put(recordWrapper);
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
}
/**
* Actual record emission called from the record emitter.
*
* <p>Responsible for tracking per shard watermarks and emit timestamps extracted from
* the record, when a watermark assigner was configured.
*
* @param rw
*/
private void emitRecordAndUpdateState(RecordWrapper<T> rw) {
synchronized (checkpointLock) {
if (rw.getValue() != null) {
sourceContext.collectWithTimestamp(rw.getValue(), rw.timestamp);
} else {
LOG.warn("Skipping non-deserializable record at sequence number {} of shard {}.",
rw.lastSequenceNumber,
subscribedShardsState.get(rw.shardStateIndex).getStreamShardHandle());
}
updateState(rw.shardStateIndex, rw.lastSequenceNumber);
}
}
/**
* Update the shard to last processed sequence number state.
* This method is called by {@link ShardConsumer}s.
*
* @param shardStateIndex index of the shard to update in subscribedShardsState;
* this index should be the returned value from
* {@link KinesisDataFetcher#registerNewSubscribedShardState(KinesisStreamShardState)}, called
* when the shard state was registered.
* @param lastSequenceNumber the last sequence number value to update
*/
protected final void updateState(int shardStateIndex, SequenceNumber lastSequenceNumber) {
synchronized (checkpointLock) {
subscribedShardsState.get(shardStateIndex).setLastProcessedSequenceNum(lastSequenceNumber);
// if a shard's state is updated to be SENTINEL_SHARD_ENDING_SEQUENCE_NUM by its consumer thread,
// we've finished reading the shard and should determine it to be non-active
if (lastSequenceNumber.equals(SentinelSequenceNumber.SENTINEL_SHARD_ENDING_SEQUENCE_NUM.get())) {
LOG.info("Subtask {} has reached the end of subscribed shard: {}",
indexOfThisConsumerSubtask, subscribedShardsState.get(shardStateIndex).getStreamShardHandle());
// check if we need to mark the source as idle;
// note that on resharding, if registerNewSubscribedShardState was invoked for newly discovered shards
// AFTER the old shards had reached the end, the subtask's status will be automatically toggled back to
// be active immediately afterwards as soon as we collect records from the new shards
if (this.numberOfActiveShards.decrementAndGet() == 0) {
LOG.info("Subtask {} has reached the end of all currently subscribed shards; marking the subtask as temporarily idle ...",
indexOfThisConsumerSubtask);
sourceContext.markAsTemporarilyIdle();
}
}
}
}
/**
* Register a new subscribed shard state.
*
* @param newSubscribedShardState the new shard state that this fetcher is to be subscribed to
*/
public int registerNewSubscribedShardState(KinesisStreamShardState newSubscribedShardState) {
synchronized (checkpointLock) {
subscribedShardsState.add(newSubscribedShardState);
// If a registered shard has initial state that is not SENTINEL_SHARD_ENDING_SEQUENCE_NUM (will be the case
// if the consumer had already finished reading a shard before we failed and restored), we determine that
// this subtask has a new active shard
if (!newSubscribedShardState.getLastProcessedSequenceNum().equals(SentinelSequenceNumber.SENTINEL_SHARD_ENDING_SEQUENCE_NUM.get())) {
this.numberOfActiveShards.incrementAndGet();
}
int shardStateIndex = subscribedShardsState.size() - 1;
// track all discovered shards for watermark determination
ShardWatermarkState sws = shardWatermarks.get(shardStateIndex);
if (sws == null) {
sws = new ShardWatermarkState();
try {
sws.periodicWatermarkAssigner = InstantiationUtil.clone(periodicWatermarkAssigner);
} catch (Exception e) {
throw new RuntimeException("Failed to instantiate new WatermarkAssigner", e);
}
sws.emitQueue = recordEmitter.getQueue(shardStateIndex);
sws.lastUpdated = getCurrentTimeMillis();
sws.lastRecordTimestamp = Long.MIN_VALUE;
shardWatermarks.put(shardStateIndex, sws);
}
return shardStateIndex;
}
}
/**
* Return the current system time. Allow tests to override this to simulate progress for watermark
* logic.
*
* @return current processing time
*/
@VisibleForTesting
protected long getCurrentTimeMillis() {
return System.currentTimeMillis();
}
/**
* Called periodically to emit a watermark. Checks all shards for the current event time
* watermark, and possibly emits the next watermark.
*
* <p>Shards that have not received an update for a certain interval are considered inactive so as
* to not hold back the watermark indefinitely. When all shards are inactive, the subtask will be
* marked as temporarily idle to not block downstream operators.
*/
@VisibleForTesting
protected void emitWatermark() {
LOG.debug("Evaluating watermark for subtask {} time {}", indexOfThisConsumerSubtask, getCurrentTimeMillis());
long potentialWatermark = Long.MAX_VALUE;
long potentialNextWatermark = Long.MAX_VALUE;
long idleTime =
(shardIdleIntervalMillis > 0)
? getCurrentTimeMillis() - shardIdleIntervalMillis
: Long.MAX_VALUE;
for (Map.Entry<Integer, ShardWatermarkState> e : shardWatermarks.entrySet()) {
Watermark w = e.getValue().lastEmittedRecordWatermark;
// consider only active shards, or those that would advance the watermark
if (w != null && (e.getValue().lastUpdated >= idleTime
|| e.getValue().emitQueue.getSize() > 0
|| w.getTimestamp() > lastWatermark)) {
potentialWatermark = Math.min(potentialWatermark, w.getTimestamp());
// for sync, use the watermark of the next record, when available
// otherwise watermark may stall when record is blocked by synchronization
RecordEmitter.RecordQueue<RecordWrapper<T>> q = e.getValue().emitQueue;
RecordWrapper<T> nextRecord = q.peek();
Watermark nextWatermark = (nextRecord != null) ? nextRecord.watermark : w;
potentialNextWatermark = Math.min(potentialNextWatermark, nextWatermark.getTimestamp());
}
}
// advance watermark if possible (watermarks can only be ascending)
if (potentialWatermark == Long.MAX_VALUE) {
if (shardWatermarks.isEmpty() || shardIdleIntervalMillis > 0) {
LOG.info("No active shard for subtask {}, marking the source idle.",
indexOfThisConsumerSubtask);
// no active shard, signal downstream operators to not wait for a watermark
sourceContext.markAsTemporarilyIdle();
isIdle = true;
}
} else {
if (potentialWatermark > lastWatermark) {
LOG.debug("Emitting watermark {} from subtask {}",
potentialWatermark,
indexOfThisConsumerSubtask);
sourceContext.emitWatermark(new Watermark(potentialWatermark));
lastWatermark = potentialWatermark;
isIdle = false;
}
nextWatermark = potentialNextWatermark;
}
}
/** Per shard tracking of watermark and last activity. */
private static class ShardWatermarkState<T> {
private AssignerWithPeriodicWatermarks<T> periodicWatermarkAssigner;
private RecordEmitter.RecordQueue<RecordWrapper<T>> emitQueue;
private volatile long lastRecordTimestamp;
private volatile long lastUpdated;
private volatile Watermark lastEmittedRecordWatermark;
}
/**
* The periodic watermark emitter. In its given interval, it checks all shards for the current
* event time watermark, and possibly emits the next watermark.
*/
private class PeriodicWatermarkEmitter implements ProcessingTimeCallback {
private final ProcessingTimeService timerService;
private final long interval;
PeriodicWatermarkEmitter(ProcessingTimeService timerService, long autoWatermarkInterval) {
this.timerService = checkNotNull(timerService);
this.interval = autoWatermarkInterval;
}
public void start() {
LOG.debug("registering periodic watermark timer with interval {}", interval);
timerService.registerTimer(timerService.getCurrentProcessingTime() + interval, this);
}
@Override
public void onProcessingTime(long timestamp) {
emitWatermark();
// schedule the next watermark
timerService.registerTimer(timerService.getCurrentProcessingTime() + interval, this);
}
}
/** Timer task to update shared watermark state. */
private class WatermarkSyncCallback implements ProcessingTimeCallback {
private final ProcessingTimeService timerService;
private final long interval;
private final MetricGroup shardMetricsGroup;
private long lastGlobalWatermark = Long.MIN_VALUE;
private long propagatedLocalWatermark = Long.MIN_VALUE;
private long logIntervalMillis = 60_000;
private int stalledWatermarkIntervalCount = 0;
private long lastLogged;
WatermarkSyncCallback(ProcessingTimeService timerService, long interval) {
this.timerService = checkNotNull(timerService);
this.interval = interval;
this.shardMetricsGroup = consumerMetricGroup.addGroup("subtaskId",
String.valueOf(indexOfThisConsumerSubtask));
this.shardMetricsGroup.gauge("localWatermark", () -> nextWatermark);
this.shardMetricsGroup.gauge("globalWatermark", () -> lastGlobalWatermark);
}
public void start() {
LOG.info("Registering watermark tracker with interval {}", interval);
timerService.registerTimer(timerService.getCurrentProcessingTime() + interval, this);
}
@Override
public void onProcessingTime(long timestamp) {
if (nextWatermark != Long.MIN_VALUE) {
long globalWatermark = lastGlobalWatermark;
// TODO: refresh watermark while idle
if (!(isIdle && nextWatermark == propagatedLocalWatermark)) {
globalWatermark = watermarkTracker.updateWatermark(nextWatermark);
propagatedLocalWatermark = nextWatermark;
} else {
LOG.info("WatermarkSyncCallback subtask: {} is idle", indexOfThisConsumerSubtask);
}
if (timestamp - lastLogged > logIntervalMillis) {
lastLogged = System.currentTimeMillis();
LOG.info("WatermarkSyncCallback subtask: {} local watermark: {}"
+ ", global watermark: {}, delta: {} timeouts: {}, emitter: {}",
indexOfThisConsumerSubtask,
nextWatermark,
globalWatermark,
nextWatermark - globalWatermark,
watermarkTracker.getUpdateTimeoutCount(),
recordEmitter.printInfo());
// Following is for debugging non-reproducible issue with stalled watermark
if (globalWatermark == nextWatermark && globalWatermark == lastGlobalWatermark
&& stalledWatermarkIntervalCount++ > 5) {
// subtask blocks watermark, log to aid troubleshooting
stalledWatermarkIntervalCount = 0;
for (Map.Entry<Integer, ShardWatermarkState> e : shardWatermarks.entrySet()) {
RecordEmitter.RecordQueue<RecordWrapper<T>> q = e.getValue().emitQueue;
RecordWrapper<T> nextRecord = q.peek();
if (nextRecord != null) {
LOG.info("stalled watermark {} key {} next watermark {} next timestamp {}",
nextWatermark,
e.getKey(),
nextRecord.watermark,
nextRecord.timestamp);
}
}
}
}
lastGlobalWatermark = globalWatermark;
recordEmitter.setCurrentWatermark(globalWatermark);
}
// schedule next callback
timerService.registerTimer(timerService.getCurrentProcessingTime() + interval, this);
}
}
/**
* Registers a metric group associated with the shard id of the provided {@link KinesisStreamShardState shardState}.
*
* @return a {@link ShardMetricsReporter} that can be used to update metric values
*/
private static ShardMetricsReporter registerShardMetrics(MetricGroup metricGroup, KinesisStreamShardState shardState) {
ShardMetricsReporter shardMetrics = new ShardMetricsReporter();
MetricGroup streamShardMetricGroup = metricGroup
.addGroup(
KinesisConsumerMetricConstants.STREAM_METRICS_GROUP,
shardState.getStreamShardHandle().getStreamName())
.addGroup(
KinesisConsumerMetricConstants.SHARD_METRICS_GROUP,
shardState.getStreamShardHandle().getShard().getShardId());
streamShardMetricGroup.gauge(KinesisConsumerMetricConstants.MILLIS_BEHIND_LATEST_GAUGE, shardMetrics::getMillisBehindLatest);
streamShardMetricGroup.gauge(KinesisConsumerMetricConstants.MAX_RECORDS_PER_FETCH, shardMetrics::getMaxNumberOfRecordsPerFetch);
streamShardMetricGroup.gauge(KinesisConsumerMetricConstants.NUM_AGGREGATED_RECORDS_PER_FETCH, shardMetrics::getNumberOfAggregatedRecords);
streamShardMetricGroup.gauge(KinesisConsumerMetricConstants.NUM_DEAGGREGATED_RECORDS_PER_FETCH, shardMetrics::getNumberOfDeaggregatedRecords);
streamShardMetricGroup.gauge(KinesisConsumerMetricConstants.AVG_RECORD_SIZE_BYTES, shardMetrics::getAverageRecordSizeBytes);
streamShardMetricGroup.gauge(KinesisConsumerMetricConstants.BYTES_PER_READ, shardMetrics::getBytesPerRead);
streamShardMetricGroup.gauge(KinesisConsumerMetricConstants.RUNTIME_LOOP_NANOS, shardMetrics::getRunLoopTimeNanos);
streamShardMetricGroup.gauge(KinesisConsumerMetricConstants.LOOP_FREQUENCY_HZ, shardMetrics::getLoopFrequencyHz);
streamShardMetricGroup.gauge(KinesisConsumerMetricConstants.SLEEP_TIME_MILLIS, shardMetrics::getSleepTimeMillis);
return shardMetrics;
}
// ------------------------------------------------------------------------
// Miscellaneous utility functions
// ------------------------------------------------------------------------
/**
* Utility function to determine whether a shard should be subscribed by this consumer subtask.
*
* @param shardHash hash code for the shard
* @param totalNumberOfConsumerSubtasks total number of consumer subtasks
* @param indexOfThisConsumerSubtask index of this consumer subtask
*/
public static boolean isThisSubtaskShouldSubscribeTo(int shardHash,
int totalNumberOfConsumerSubtasks,
int indexOfThisConsumerSubtask) {
return (Math.abs(shardHash % totalNumberOfConsumerSubtasks)) == indexOfThisConsumerSubtask;
}
@VisibleForTesting
protected ExecutorService createShardConsumersThreadPool(final String subtaskName) {
return Executors.newCachedThreadPool(new ThreadFactory() {
@Override
public Thread newThread(Runnable runnable) {
final AtomicLong threadCount = new AtomicLong(0);
Thread thread = new Thread(runnable);
thread.setName("shardConsumers-" + subtaskName + "-thread-" + threadCount.getAndIncrement());
thread.setDaemon(true);
return thread;
}
});
}
@VisibleForTesting
public List<KinesisStreamShardState> getSubscribedShardsState() {
return subscribedShardsState;
}
/**
* Utility function to create an initial map of the last discovered shard id of each subscribed stream, set to null;
* This is called in the constructor; correct values will be set later on by calling advanceLastDiscoveredShardOfStream().
*
* @param streams the list of subscribed streams
* @return the initial map for subscribedStreamsToLastDiscoveredShardIds
*/
protected static HashMap<String, String> createInitialSubscribedStreamsToLastDiscoveredShardsState(List<String> streams) {
HashMap<String, String> initial = new HashMap<>();
for (String stream : streams) {
initial.put(stream, null);
}
return initial;
}
/**
* Utility function to convert {@link StreamShardHandle} into {@link StreamShardMetadata}.
*
* @param streamShardHandle the {@link StreamShardHandle} to be converted
* @return a {@link StreamShardMetadata} object
*/
public static StreamShardMetadata convertToStreamShardMetadata(StreamShardHandle streamShardHandle) {
StreamShardMetadata streamShardMetadata = new StreamShardMetadata();
streamShardMetadata.setStreamName(streamShardHandle.getStreamName());
streamShardMetadata.setShardId(streamShardHandle.getShard().getShardId());
streamShardMetadata.setParentShardId(streamShardHandle.getShard().getParentShardId());
streamShardMetadata.setAdjacentParentShardId(streamShardHandle.getShard().getAdjacentParentShardId());
if (streamShardHandle.getShard().getHashKeyRange() != null) {
streamShardMetadata.setStartingHashKey(streamShardHandle.getShard().getHashKeyRange().getStartingHashKey());
streamShardMetadata.setEndingHashKey(streamShardHandle.getShard().getHashKeyRange().getEndingHashKey());
}
if (streamShardHandle.getShard().getSequenceNumberRange() != null) {
streamShardMetadata.setStartingSequenceNumber(streamShardHandle.getShard().getSequenceNumberRange().getStartingSequenceNumber());
streamShardMetadata.setEndingSequenceNumber(streamShardHandle.getShard().getSequenceNumberRange().getEndingSequenceNumber());
}
return streamShardMetadata;
}
/**
* Utility function to convert {@link StreamShardMetadata} into {@link StreamShardHandle}.
*
* @param streamShardMetadata the {@link StreamShardMetadata} to be converted
* @return a {@link StreamShardHandle} object
*/
public static StreamShardHandle convertToStreamShardHandle(StreamShardMetadata streamShardMetadata) {
Shard shard = new Shard();
shard.withShardId(streamShardMetadata.getShardId());
shard.withParentShardId(streamShardMetadata.getParentShardId());
shard.withAdjacentParentShardId(streamShardMetadata.getAdjacentParentShardId());
HashKeyRange hashKeyRange = new HashKeyRange();
hashKeyRange.withStartingHashKey(streamShardMetadata.getStartingHashKey());
hashKeyRange.withEndingHashKey(streamShardMetadata.getEndingHashKey());
shard.withHashKeyRange(hashKeyRange);
SequenceNumberRange sequenceNumberRange = new SequenceNumberRange();
sequenceNumberRange.withStartingSequenceNumber(streamShardMetadata.getStartingSequenceNumber());
sequenceNumberRange.withEndingSequenceNumber(streamShardMetadata.getEndingSequenceNumber());
shard.withSequenceNumberRange(sequenceNumberRange);
return new StreamShardHandle(streamShardMetadata.getStreamName(), shard);
}
}
| |
package net.glowstone.util.config;
import lombok.Getter;
import net.glowstone.GlowServer;
import net.glowstone.util.CompatibilityBundle;
import net.glowstone.util.DynamicallyTypedMap;
import org.bukkit.Difficulty;
import org.bukkit.GameMode;
import org.bukkit.WorldType;
import org.bukkit.configuration.ConfigurationSection;
import org.bukkit.configuration.InvalidConfigurationException;
import org.bukkit.configuration.file.YamlConfiguration;
import org.bukkit.util.FileUtil;
import org.jetbrains.annotations.NonNls;
import org.yaml.snakeyaml.error.YAMLException;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.net.URL;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.function.Predicate;
import java.util.logging.Level;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkNotNull;
import static net.glowstone.util.config.ServerConfig.Validators.typeCheck;
/**
* Utilities for handling the server configuration files.
*/
public class ServerConfig implements DynamicallyTypedMap<ServerConfig.Key> {
public static final int DEFAULT_PORT = 25565;
/**
* The directory configurations are stored in.
*/
@Getter
private final File directory;
/**
* The main configuration file.
*/
private final File configFile;
/**
* The actual configuration data.
*/
private final YamlConfiguration config = new YamlConfiguration();
/**
* Extra configuration files (help, permissions, commands).
*/
private final Map<String, YamlConfiguration> extraConfig = new HashMap<>();
/**
* Parameters with which the server is ran.
*/
private final Map<Key, Object> parameters;
/**
* Initialize a new ServerConfig and associated settings.
*
* @param directory The config directory, or null for default.
* @param configFile The config file, or null for default.
* @param parameters The command-line parameters used as overrides.
*/
public ServerConfig(File directory, File configFile, Map<Key, Object> parameters) {
checkNotNull(directory);
checkNotNull(configFile);
checkNotNull(parameters);
this.directory = directory;
this.configFile = configFile;
this.parameters = parameters;
config.options().indent(4).copyHeader(true).header(
"glowstone.yml is the main configuration file for a Glowstone server\n"
+ "It contains everything from server.properties and bukkit.yml in a\n"
+ "normal CraftBukkit installation.\n\n"
+ "Configuration entries are documented on the wiki: "
+ "https://docs.glowstone.net/en/latest/Configuration_Guide/index.html\n"
+ "For help, join us on Discord: https://discord.gg/TFJqhsC");
}
////////////////////////////////////////////////////////////////////////////
// Modification
/**
* Save the configuration back to file.
*/
public void save() {
try {
config.save(configFile);
} catch (IOException e) {
GlowServer.logger.log(Level.SEVERE, "Failed to write config: " + configFile, e);
}
}
/**
* Change a configuration value at runtime.
*
* @param key the config key to write the value to
* @param value value to write to config key
* @see ServerConfig#save()
*/
public void set(Key key, Object value) {
parameters.replace(key, value);
config.set(key.path, value);
}
////////////////////////////////////////////////////////////////////////////
// Value getters
@Override
public String getString(Key key) {
if (parameters.containsKey(key)) {
return parameters.get(key).toString();
}
String string = config.getString(key.path, key.def.toString());
parameters.put(key, string);
return string;
}
@Override
public int getInt(Key key) {
if (parameters.containsKey(key)) {
return (Integer) parameters.get(key);
}
int integer = config.getInt(key.path, (Integer) key.def);
parameters.put(key, integer);
return integer;
}
@Override
public boolean getBoolean(Key key) {
if (parameters.containsKey(key)) {
return (Boolean) parameters.get(key);
}
boolean bool = config.getBoolean(key.path, (Boolean) key.def);
parameters.put(key, bool);
return bool;
}
/**
* Retrieves a section as a list of maps.
*
* @param key the key to look up
* @return the value as a list of maps
*/
@SuppressWarnings("unchecked")
public List<Map<?, ?>> getMapList(Key key) {
if (parameters.containsKey(key)) {
return (List<Map<?, ?>>) parameters.get(key);
}
// there's no get or default method for the getMapList method, so using contains.
if (!config.contains(key.path)) {
parameters.put(key, key.def);
return (List<Map<?, ?>>) key.def;
}
return config.getMapList(key.path);
}
////////////////////////////////////////////////////////////////////////////
// Fancy stuff
/**
* Returns the file that contains a given setting. If it doesn't exist, it is created and
* populated with defaults.
*
* @param key the configuration setting
* @return the file containing that setting
*/
public ConfigurationSection getConfigFile(Key key) {
String filename = getString(key);
if (extraConfig.containsKey(filename)) {
return extraConfig.get(filename);
}
YamlConfiguration conf = new YamlConfiguration();
File file = getFile(filename);
File migrateFrom = new File(key.def.toString());
// create file if it doesn't exist
if (!file.exists()) {
if (migrateFrom.exists()) {
FileUtil.copy(migrateFrom, file);
} else {
copyDefaults(key.def.toString(), file);
}
}
// read in config
try {
conf.load(file);
} catch (IOException e) {
GlowServer.logger.log(Level.SEVERE, "Failed to read config: " + file, e);
} catch (InvalidConfigurationException e) {
report(file, e);
}
extraConfig.put(filename, conf);
return conf;
}
public ConfigurationSection getWorlds() {
return config.getConfigurationSection("worlds");
}
public File getFile(@NonNls String filename) {
return new File(directory, filename);
}
////////////////////////////////////////////////////////////////////////////
// Load and internals
/**
* Loads the server config from disk. If it doesn't exist, the default config is written,
* creating the folder if necessary. If it's in the old bukkit.yml format and/or incomplete, it
* is converted to canonical form and saved.
*/
public void load() {
// load extra config files again next time they're needed
extraConfig.clear();
boolean changed = false;
// create default file if needed
if (!configFile.exists()) {
GlowServer.logger.info("Creating default config: " + configFile);
// create config directory
if (!directory.isDirectory() && !directory.mkdirs()) {
GlowServer.logger.severe("Cannot create directory: " + directory);
return;
}
// load default config
for (Key key : Key.values()) {
config.set(key.path, key.def);
}
// attempt to migrate
if (migrate()) {
GlowServer.logger.info("Migrated configuration from previous installation");
}
changed = true;
} else {
// load config
try {
config.load(configFile);
} catch (IOException e) {
GlowServer.logger.log(Level.SEVERE, "Failed to read config: " + configFile, e);
} catch (InvalidConfigurationException e) {
report(configFile, e);
}
// add missing keys to the current config
for (Key key : Key.values()) {
if (!config.contains(key.path)) {
config.set(key.path, key.def);
changed = true;
} else if (key.validator != null) {
// validate existing values
Object val = config.get(key.path);
if (!key.validator.test(val)) {
GlowServer.logger.warning(
"Invalid config value for '" + key.path + "' (" + val + "), "
+ "resetting to default (" + key.def + ")");
config.set(key.path, key.def);
changed = true;
}
}
}
}
if (changed) {
save();
}
}
private void copyDefaults(String source, File dest) {
URL resource = getClass().getClassLoader().getResource("defaults/" + source);
if (resource == null) {
GlowServer.logger.warning("Could not find default " + source + " on classpath");
return;
}
try (final InputStream in = resource.openStream();
final OutputStream out = new FileOutputStream(dest)) {
byte[] buf = new byte[2048];
int len;
while ((len = in.read(buf)) > 0) {
out.write(buf, 0, len);
}
} catch (IOException e) {
GlowServer.logger.log(Level.WARNING, "Could not save default config: " + dest, e);
return;
}
GlowServer.logger.info("Created default config: " + dest);
}
private void report(File file, InvalidConfigurationException e) {
if (e.getCause() instanceof YAMLException) {
GlowServer.logger.severe("Config file " + file + " isn't valid! " + e.getCause());
} else if (e.getCause() == null || e.getCause() instanceof ClassCastException) {
GlowServer.logger.severe("Config file " + file + " isn't valid!");
} else {
GlowServer.logger
.log(Level.SEVERE, "Cannot load " + file + ": " + e.getCause().getClass(), e);
}
}
public YamlConfiguration getConfig() {
return config;
}
private boolean migrate() {
boolean migrateStatus = false;
File bukkitYml = new File("bukkit.yml");
if (bukkitYml.exists()) {
YamlConfiguration bukkit = new YamlConfiguration();
try {
bukkit.load(bukkitYml);
} catch (InvalidConfigurationException e) {
report(bukkitYml, e);
} catch (IOException e) {
GlowServer.logger.log(Level.WARNING, "Could not migrate from " + bukkitYml, e);
}
for (Key key : Key.values()) {
if (key.migrate == Migrate.BUKKIT && bukkit.contains(key.migratePath)) {
config.set(key.path, bukkit.get(key.migratePath));
migrateStatus = true;
}
}
config.set("aliases", bukkit.get("aliases"));
config.set("worlds", bukkit.get("worlds"));
}
File serverProps = new File("server.properties");
if (serverProps.exists()) {
Properties props = new Properties();
try {
props.load(new FileInputStream(serverProps));
} catch (IOException e) {
GlowServer.logger.log(Level.WARNING, "Could not migrate from " + serverProps, e);
}
for (Key key : Key.values()) {
if (key.migrate == Migrate.PROPS && props.containsKey(key.migratePath)) {
String value = props.getProperty(key.migratePath);
if (key.def instanceof Integer) {
try {
config.set(key.path, Integer.parseInt(value));
} catch (NumberFormatException e) {
GlowServer.logger.log(Level.WARNING,
"Could not migrate " + key.migratePath + " from "
+ serverProps, e);
continue;
}
} else if (key.def instanceof Boolean) {
config.set(key.path, Boolean.parseBoolean(value));
} else {
config.set(key.path, value);
}
migrateStatus = true;
}
}
}
return migrateStatus;
}
/**
* An enum containing configuration keys used by the server.
*
* <p>NOTE: Do not use Collections.emptyList() as a default value because Jackson will alias it
* with any other instances of emptyLIst. Use a new instance of an empty ArrayList instead.
*/
public enum Key {
// server
SERVER_IP("server.ip", "", Migrate.PROPS, "server-ip", String.class::isInstance),
SERVER_PORT("server.port", DEFAULT_PORT, Migrate.PROPS, "server-port", Validators.PORT),
SERVER_NAME("server.name", "Glowstone Server", Migrate.PROPS, "server-name",
String.class::isInstance),
LOG_FILE("server.log-file", "logs/log-%D.txt", String.class::isInstance),
ONLINE_MODE("server.online-mode", true, Migrate.PROPS, "online-mode",
Boolean.class::isInstance),
MAX_PLAYERS("server.max-players", 20, Migrate.PROPS, "max-players",
Validators.POSITIVE_INTEGER),
WHITELIST("server.whitelisted", false, Migrate.PROPS, "white-list",
Boolean.class::isInstance),
MOTD("server.motd", "A Glowstone server", Migrate.PROPS, "motd",
String.class::isInstance),
SHUTDOWN_MESSAGE("server.shutdown-message", "Server shutting down.", Migrate.BUKKIT,
"settings.shutdown-message", String.class::isInstance),
ALLOW_CLIENT_MODS("server.allow-client-mods", true, Boolean.class::isInstance),
DNS_OVERRIDES("server.dns", new ArrayList<>()),
// console
USE_JLINE("console.use-jline", true, Boolean.class::isInstance),
CONSOLE_PROMPT("console.prompt", "> ", String.class::isInstance),
CONSOLE_DATE("console.date-format", "HH:mm:ss", String.class::isInstance),
CONSOLE_LOG_DATE("console.log-date-format", "yyyy/MM/dd HH:mm:ss",
String.class::isInstance),
// game props
GAMEMODE("game.gamemode", "SURVIVAL", Migrate.PROPS, "gamemode",
Validators.forEnum(GameMode.class)),
FORCE_GAMEMODE("game.gamemode-force", false, Migrate.PROPS, "force-gamemode",
Boolean.class::isInstance),
DIFFICULTY("game.difficulty", "NORMAL", Migrate.PROPS, "difficulty",
Validators.forEnum(Difficulty.class)),
HARDCORE("game.hardcore", false, Migrate.PROPS, "hardcore",
Boolean.class::isInstance),
PVP_ENABLED("game.pvp", true, Migrate.PROPS, "pvp",
Boolean.class::isInstance),
MAX_BUILD_HEIGHT("game.max-build-height", 256, Migrate.PROPS, "max-build-height",
Validators.POSITIVE_INTEGER),
// server.properties keys
ALLOW_FLIGHT("game.allow-flight", false, Migrate.PROPS, "allow-flight",
Boolean.class::isInstance),
ENABLE_COMMAND_BLOCK("game.command-blocks", false, Migrate.PROPS, "enable-command-block",
Boolean.class::isInstance),
//OP_PERMISSION_LEVEL(null, Migrate.PROPS, "op-permission-level"),
RESOURCE_PACK("game.resource-pack", "", Migrate.PROPS, "resource-pack",
String.class::isInstance),
RESOURCE_PACK_HASH("game.resource-pack-hash", "", Migrate.PROPS, "resource-pack-hash",
String.class::isInstance),
SNOOPER_ENABLED("server.snooper-enabled", false, Migrate.PROPS, "snooper-enabled",
Boolean.class::isInstance),
PREVENT_PROXY("server.prevent-proxy-connections", true, Migrate.PROPS,
"prevent-proxy-connections", Boolean.class::isInstance),
// creatures
SPAWN_MONSTERS("creatures.enable.monsters", true, Migrate.PROPS, "spawn-monsters",
Boolean.class::isInstance),
SPAWN_ANIMALS("creatures.enable.animals", true, Migrate.PROPS, "spawn-animals",
Boolean.class::isInstance),
SPAWN_NPCS("creatures.enable.npcs", true, Migrate.PROPS, "spawn-npcs",
Boolean.class::isInstance),
MONSTER_LIMIT("creatures.limit.monsters", 70, Migrate.BUKKIT, "spawn-limits.monsters",
Validators.NON_NEGATIVE_INTEGER),
ANIMAL_LIMIT("creatures.limit.animals", 15, Migrate.BUKKIT, "spawn-limits.animals",
Validators.NON_NEGATIVE_INTEGER),
WATER_ANIMAL_LIMIT("creatures.limit.water", 5, Migrate.BUKKIT,
"spawn-limits.water-animals",
Validators.NON_NEGATIVE_INTEGER),
AMBIENT_LIMIT("creatures.limit.ambient", 15, Migrate.BUKKIT, "spawn-limits.ambient",
Validators.NON_NEGATIVE_INTEGER),
WATER_AMBIENT_LIMIT("creatures.limit.water-ambient", 20, Migrate.BUKKIT,
"spawn-limits.water-ambient",
Validators.NON_NEGATIVE_INTEGER),
MONSTER_TICKS("creatures.ticks.monsters", 1, Migrate.BUKKIT, "ticks-per.monster-spawns",
Validators.NON_NEGATIVE_INTEGER),
ANIMAL_TICKS("creatures.ticks.animal", 400, Migrate.BUKKIT, "ticks-per.animal-spawns",
Validators.NON_NEGATIVE_INTEGER),
WATER_TICKS("creatures.ticks.water", 1, Migrate.BUKKIT, "ticks-per.water-spawns",
Validators.NON_NEGATIVE_INTEGER),
WATER_AMBIENT_TICKS("creatures.ticks.water-ambient", 1, Migrate.BUKKIT,
"ticks-per.water-ambient-spawns",
Validators.NON_NEGATIVE_INTEGER),
AMBIENT_TICKS("creatures.ticks.ambient", 1, Migrate.BUKKIT, "ticks-per.ambient-spawns",
Validators.NON_NEGATIVE_INTEGER),
// folders
PLUGIN_FOLDER("folders.plugins", "plugins", Validators.PATH),
UPDATE_FOLDER("folders.update", "update", Migrate.BUKKIT, "settings.update-folder",
Validators.PATH),
WORLD_FOLDER("folders.worlds", "worlds", Migrate.BUKKIT, "settings.world-container",
Validators.PATH),
LIBRARIES_FOLDER("folders.libraries", "lib", Validators.PATH),
// files
PERMISSIONS_FILE("files.permissions", "permissions.yml", Migrate.BUKKIT,
"settings.permissions-file", Validators.PATH),
COMMANDS_FILE("files.commands", "commands.yml", Validators.PATH),
HELP_FILE("files.help", "help.yml", Validators.PATH),
// advanced
CONNECTION_THROTTLE("advanced.connection-throttle", 4000, Migrate.BUKKIT,
"settings.connection-throttle",
Validators.NON_NEGATIVE_INTEGER),
//PING_PACKET_LIMIT(
// "advanced.ping-packet-limit", 100, Migrate.BUKKIT, "settings.ping-packet-limit"),
PLAYER_IDLE_TIMEOUT("advanced.idle-timeout", 0, Migrate.PROPS, "player-idle-timeout",
Validators.NON_NEGATIVE_INTEGER),
WARN_ON_OVERLOAD("advanced.warn-on-overload", true, Migrate.BUKKIT,
"settings.warn-on-overload", Boolean.class::isInstance),
EXACT_LOGIN_LOCATION("advanced.exact-login-location", false, Migrate.BUKKIT,
"settings.use-exact-login-location", Boolean.class::isInstance),
PLUGIN_PROFILING("advanced.plugin-profiling", false, Migrate.BUKKIT,
"settings.plugin-profiling", Boolean.class::isInstance),
WARNING_STATE("advanced.deprecated-verbose", "false", Migrate.BUKKIT,
"settings.deprecated-verbose"),
COMPRESSION_THRESHOLD("advanced.compression-threshold", 256, Migrate.PROPS,
"network-compression-threshold",
typeCheck(Integer.class).and(value -> value >= -1)),
PROXY_SUPPORT("advanced.proxy-support", false, Boolean.class::isInstance),
PLAYER_SAMPLE_COUNT("advanced.player-sample-count", 12,
Validators.NON_NEGATIVE_INTEGER),
GRAPHICS_COMPUTE("advanced.graphics-compute.enable", false),
GRAPHICS_COMPUTE_ANY_DEVICE("advanced.graphics-compute.use-any-device", false,
Boolean.class::isInstance),
REGION_CACHE_SIZE("advanced.region-file.cache-size", 256,
Validators.NON_NEGATIVE_INTEGER),
REGION_COMPRESSION("advanced.region-file.compression", true,
Boolean.class::isInstance),
PROFILE_LOOKUP_TIMEOUT("advanced.profile-lookup-timeout", 5,
Validators.NON_NEGATIVE_INTEGER),
SUGGEST_PLAYER_NAMES_WHEN_NULL_TAB_COMPLETIONS(
"advanced.suggest-player-name-when-null-tab-completions", true,
Boolean.class::isInstance),
MAX_WORLD_SIZE("advanced.max-world-size", 29999984, Validators.POSITIVE_INTEGER),
// query rcon etc
QUERY_ENABLED("extras.query-enabled", false, Migrate.PROPS, "enable-query",
Boolean.class::isInstance),
QUERY_PORT("extras.query-port", 25614, Migrate.PROPS, "query.port", Validators.PORT),
QUERY_PLUGINS("extras.query-plugins", true, Migrate.BUKKIT, "settings.query-plugins",
Boolean.class::isInstance),
RCON_ENABLED("extras.rcon-enabled", false, Migrate.PROPS, "enable-rcon",
Boolean.class::isInstance),
RCON_PASSWORD("extras.rcon-password", "glowstone", Migrate.PROPS, "rcon.password",
String.class::isInstance),
RCON_PORT("extras.rcon-port", 25575, Migrate.PROPS, "rcon.port", Validators.PORT),
RCON_COLORS("extras.rcon-colors", true,
Boolean.class::isInstance),
// level props
LEVEL_NAME("world.name", "world", Migrate.PROPS, "level-name",
String.class::isInstance),
LEVEL_SEED("world.seed", "", Migrate.PROPS, "level-seed"),
LEVEL_TYPE("world.level-type", "DEFAULT", Migrate.PROPS, "level-type", Validators
.WORLD_TYPE),
SPAWN_RADIUS("world.spawn-radius", 16, Migrate.PROPS, "spawn-protection",
Validators.NON_NEGATIVE_INTEGER),
VIEW_DISTANCE("world.view-distance", 8, Migrate.PROPS, "view-distance",
Validators.POSITIVE_INTEGER),
GENERATE_STRUCTURES("world.gen-structures", true, Migrate.PROPS, "generate-structures",
Boolean.class::isInstance),
ALLOW_NETHER("world.allow-nether", true, Migrate.PROPS, "allow-nether",
Boolean.class::isInstance),
ALLOW_END("world.allow-end", true, Migrate.BUKKIT, "settings.allow-end",
Boolean.class::isInstance),
PERSIST_SPAWN("world.keep-spawn-loaded", true,
Boolean.class::isInstance),
POPULATE_ANCHORED_CHUNKS("world.populate-anchored-chunks", true,
Boolean.class::isInstance),
WATER_CLASSIC("world.classic-style-water", false,
Boolean.class::isInstance),
DISABLE_GENERATION("world.disable-generation", false,
Boolean.class::isInstance),
// libraries
LIBRARY_CHECKSUM_VALIDATION("libraries.checksum-validation", true,
Boolean.class::isInstance),
LIBRARY_REPOSITORY_URL("libraries.repository-url",
"https://repo.glowstone.net/repository/maven-public/",
String.class::isInstance),
LIBRARY_DOWNLOAD_ATTEMPTS("libraries.download-attempts", 2,
Validators.POSITIVE_INTEGER),
COMPATIBILITY_BUNDLE("libraries.compatibility-bundle",
CompatibilityBundle.CRAFTBUKKIT.name(), String.class::isInstance),
LIBRARIES_LIST("libraries.list", new ArrayList<>());
@Getter
private final String path;
private final Object def;
private final Migrate migrate;
private final String migratePath;
private final Predicate validator;
Key(String path, Object def) {
this(path, def, null, null);
}
Key(String path, Object def, Predicate<?> validator) {
this(path, def, null, null, validator);
}
Key(String path, Object def, Migrate migrate, String migratePath) {
this(path, def, migrate, migratePath, null);
}
Key(String path, Object def, Migrate migrate, String migratePath, Predicate<?> validator) {
this.path = path;
this.def = def;
this.migrate = migrate;
this.migratePath = migratePath;
this.validator = validator;
}
@Override
public String toString() {
return name() + "(" + path + ", " + def + ")";
}
}
private enum Migrate {
BUKKIT, PROPS
}
/**
* A predicate wrapper to check if a value is a valid element of an enum.
*
* <p>See {@link Validators#forEnum(Class)}
*
* @param <T> the type of the enum
*/
static final class EnumPredicate<T extends Enum<T>> implements Predicate<String> {
final Class<T> enumClass;
EnumPredicate(Class<T> enumClass) {
checkNotNull(enumClass);
checkArgument(enumClass.isEnum());
this.enumClass = enumClass;
}
@Override
public boolean test(String value) {
if (!typeCheck(String.class).test(value)) {
return false;
}
if (value == null || value.isEmpty()) {
return false;
}
try {
Enum.valueOf(enumClass, value);
} catch (Exception e) {
return false;
}
return true;
}
}
static class Validators {
/**
* Checks if the value is positive (over zero).
*/
static final Predicate<Number> POSITIVE = (number) -> number.doubleValue() > 0;
/**
* Checks if the value is integer-typed and positive.
*/
static final Predicate<Integer> POSITIVE_INTEGER = typeCheck(Integer.class).and(
POSITIVE);
/**
* Checks if the value is zero.
*/
static final Predicate<Number> ZERO = (number) -> number.doubleValue() == 0;
/**
* Checks if the value is greater than (positive) or equal to zero.
*/
static final Predicate<Number> ABSOLUTE = POSITIVE.or(ZERO);
/**
* Checks if the value is integer-typed and either positive or zero.
*/
static final Predicate<?> NON_NEGATIVE_INTEGER = typeCheck(Integer.class).and(ABSOLUTE);
/**
* Checks if the value is a valid port number.
*/
static final Predicate<Integer> PORT = typeCheck(Integer.class)
.and(POSITIVE).and((number) -> number < 49152);
/**
* Checks if the value is a valid {@link WorldType} name.
*/
static final Predicate<String> WORLD_TYPE = typeCheck(String.class)
.and((value) -> WorldType.getByName(value) != null);
/**
* Checks if the value is a valid file/directory path.
*
* <p>Note that the behavior of this predicate may be platform-dependent.
*/
static final Predicate<String> PATH = typeCheck(String.class).and((value) -> {
try {
if (Paths.get(value) == null) {
return false;
}
} catch (Exception ex) {
return false;
}
return true;
});
/**
* Creates a {@link EnumPredicate} that checks if the value is a member of the given enum
* class.
*
* @param enumClass the enum class
* @param <T> the type of the enum
* @return the predicate
*/
static <T extends Enum<T>> EnumPredicate<T> forEnum(Class<T> enumClass) {
return new EnumPredicate<>(enumClass);
}
/**
* Creates a {@link Predicate} that checks if the value is an instance of the
* specified class.
*
* @param expected the expected class.
* @return the predicate
*/
static <T> Predicate<T> typeCheck(Class<T> expected) {
return expected::isInstance;
}
}
}
| |
package io.fabric8.openshift.api.model.installer.baremetal.v1;
import java.util.HashMap;
import java.util.Map;
import com.fasterxml.jackson.annotation.JsonAnyGetter;
import com.fasterxml.jackson.annotation.JsonAnySetter;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.annotation.JsonPropertyOrder;
import com.fasterxml.jackson.databind.annotation.JsonDeserialize;
import io.fabric8.kubernetes.api.model.Container;
import io.fabric8.kubernetes.api.model.IntOrString;
import io.fabric8.kubernetes.api.model.KubernetesResource;
import io.fabric8.kubernetes.api.model.LabelSelector;
import io.fabric8.kubernetes.api.model.LocalObjectReference;
import io.fabric8.kubernetes.api.model.ObjectMeta;
import io.fabric8.kubernetes.api.model.ObjectReference;
import io.fabric8.kubernetes.api.model.PersistentVolumeClaim;
import io.fabric8.kubernetes.api.model.PodTemplateSpec;
import io.fabric8.kubernetes.api.model.ResourceRequirements;
import io.sundr.builder.annotations.Buildable;
import io.sundr.builder.annotations.BuildableReference;
import lombok.EqualsAndHashCode;
import lombok.Setter;
import lombok.ToString;
import lombok.experimental.Accessors;
@JsonDeserialize(using = com.fasterxml.jackson.databind.JsonDeserializer.None.class)
@JsonInclude(JsonInclude.Include.NON_NULL)
@JsonPropertyOrder({
"apiVersion",
"kind",
"metadata",
"deviceName",
"hctl",
"minSizeGigabytes",
"model",
"rotational",
"serialNumber",
"vendor",
"wwn",
"wwnVendorExtension",
"wwnWithExtension"
})
@ToString
@EqualsAndHashCode
@Setter
@Accessors(prefix = {
"_",
""
})
@Buildable(editableEnabled = false, validationEnabled = false, generateBuilderPackage = false, lazyCollectionInitEnabled = false, builderPackage = "io.fabric8.kubernetes.api.builder", refs = {
@BuildableReference(ObjectMeta.class),
@BuildableReference(LabelSelector.class),
@BuildableReference(Container.class),
@BuildableReference(PodTemplateSpec.class),
@BuildableReference(ResourceRequirements.class),
@BuildableReference(IntOrString.class),
@BuildableReference(ObjectReference.class),
@BuildableReference(LocalObjectReference.class),
@BuildableReference(PersistentVolumeClaim.class)
})
public class RootDeviceHints implements KubernetesResource
{
@JsonProperty("deviceName")
private String deviceName;
@JsonProperty("hctl")
private String hctl;
@JsonProperty("minSizeGigabytes")
private Integer minSizeGigabytes;
@JsonProperty("model")
private String model;
@JsonProperty("rotational")
private Boolean rotational;
@JsonProperty("serialNumber")
private String serialNumber;
@JsonProperty("vendor")
private String vendor;
@JsonProperty("wwn")
private String wwn;
@JsonProperty("wwnVendorExtension")
private String wwnVendorExtension;
@JsonProperty("wwnWithExtension")
private String wwnWithExtension;
@JsonIgnore
private Map<String, Object> additionalProperties = new HashMap<String, Object>();
/**
* No args constructor for use in serialization
*
*/
public RootDeviceHints() {
}
/**
*
* @param wwnVendorExtension
* @param hctl
* @param serialNumber
* @param vendor
* @param wwnWithExtension
* @param model
* @param rotational
* @param deviceName
* @param minSizeGigabytes
* @param wwn
*/
public RootDeviceHints(String deviceName, String hctl, Integer minSizeGigabytes, String model, Boolean rotational, String serialNumber, String vendor, String wwn, String wwnVendorExtension, String wwnWithExtension) {
super();
this.deviceName = deviceName;
this.hctl = hctl;
this.minSizeGigabytes = minSizeGigabytes;
this.model = model;
this.rotational = rotational;
this.serialNumber = serialNumber;
this.vendor = vendor;
this.wwn = wwn;
this.wwnVendorExtension = wwnVendorExtension;
this.wwnWithExtension = wwnWithExtension;
}
@JsonProperty("deviceName")
public String getDeviceName() {
return deviceName;
}
@JsonProperty("deviceName")
public void setDeviceName(String deviceName) {
this.deviceName = deviceName;
}
@JsonProperty("hctl")
public String getHctl() {
return hctl;
}
@JsonProperty("hctl")
public void setHctl(String hctl) {
this.hctl = hctl;
}
@JsonProperty("minSizeGigabytes")
public Integer getMinSizeGigabytes() {
return minSizeGigabytes;
}
@JsonProperty("minSizeGigabytes")
public void setMinSizeGigabytes(Integer minSizeGigabytes) {
this.minSizeGigabytes = minSizeGigabytes;
}
@JsonProperty("model")
public String getModel() {
return model;
}
@JsonProperty("model")
public void setModel(String model) {
this.model = model;
}
@JsonProperty("rotational")
public Boolean getRotational() {
return rotational;
}
@JsonProperty("rotational")
public void setRotational(Boolean rotational) {
this.rotational = rotational;
}
@JsonProperty("serialNumber")
public String getSerialNumber() {
return serialNumber;
}
@JsonProperty("serialNumber")
public void setSerialNumber(String serialNumber) {
this.serialNumber = serialNumber;
}
@JsonProperty("vendor")
public String getVendor() {
return vendor;
}
@JsonProperty("vendor")
public void setVendor(String vendor) {
this.vendor = vendor;
}
@JsonProperty("wwn")
public String getWwn() {
return wwn;
}
@JsonProperty("wwn")
public void setWwn(String wwn) {
this.wwn = wwn;
}
@JsonProperty("wwnVendorExtension")
public String getWwnVendorExtension() {
return wwnVendorExtension;
}
@JsonProperty("wwnVendorExtension")
public void setWwnVendorExtension(String wwnVendorExtension) {
this.wwnVendorExtension = wwnVendorExtension;
}
@JsonProperty("wwnWithExtension")
public String getWwnWithExtension() {
return wwnWithExtension;
}
@JsonProperty("wwnWithExtension")
public void setWwnWithExtension(String wwnWithExtension) {
this.wwnWithExtension = wwnWithExtension;
}
@JsonAnyGetter
public Map<String, Object> getAdditionalProperties() {
return this.additionalProperties;
}
@JsonAnySetter
public void setAdditionalProperty(String name, Object value) {
this.additionalProperties.put(name, value);
}
}
| |
package org.shaolin.uimaster.page.flow;
import java.io.IOException;
import java.io.UnsupportedEncodingException;
import java.util.Enumeration;
import java.util.List;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.servlet.http.HttpSession;
import org.apache.log4j.NDC;
import org.shaolin.bmdp.datamodel.pagediagram.NextType;
import org.shaolin.bmdp.datamodel.pagediagram.OutType;
import org.shaolin.bmdp.exceptions.BusinessOperationException;
import org.shaolin.bmdp.i18n.LocaleContext;
import org.shaolin.bmdp.persistence.HibernateUtil;
import org.shaolin.bmdp.runtime.AppContext;
import org.shaolin.bmdp.runtime.ce.IConstantEntity;
import org.shaolin.bmdp.runtime.security.IPermissionService;
import org.shaolin.bmdp.runtime.security.UserContext;
import org.shaolin.bmdp.runtime.spi.IServerServiceManager;
import org.shaolin.javacc.exception.EvaluationException;
import org.shaolin.javacc.exception.ParsingException;
import org.shaolin.uimaster.page.AjaxContextHelper;
import org.shaolin.uimaster.page.MobilitySupport;
import org.shaolin.uimaster.page.WebConfig;
import org.shaolin.uimaster.page.cache.UIFlowCacheManager;
import org.shaolin.uimaster.page.exception.AjaxException;
import org.shaolin.uimaster.page.exception.NoWebflowAPException;
import org.shaolin.uimaster.page.exception.NoWebflowNodeAPException;
import org.shaolin.uimaster.page.exception.UIPageException;
import org.shaolin.uimaster.page.exception.WebFlowException;
import org.shaolin.uimaster.page.flow.error.WebflowError;
import org.shaolin.uimaster.page.flow.error.WebflowErrorUtil;
import org.shaolin.uimaster.page.flow.nodes.WebNode;
import org.shaolin.uimaster.page.javacc.HttpRequestEvaluationContext;
import org.shaolin.uimaster.page.javacc.WebFlowContext;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.RestController;
@RestController
public class WebflowController {
private static Logger logger = LoggerFactory.getLogger(WebflowController.class);
protected String charset = "UTF-8";
protected boolean nocache = false;
public static class AttributesAccessor {
private final transient HttpServletRequest request;
public String chunkName;
public String nodeName;
public String outName;
public String entityName;
public String destnodename;
public String destchunkname;
String orgId;
String orgCode;
public AttributesAccessor(HttpServletRequest request) {
this.request = request;
this.chunkName = (String)request.getAttribute(WebflowConstants.SOURCE_CHUNK_NAME);
if (this.chunkName == null) {
this.chunkName = request.getParameter(WebflowConstants.SOURCE_CHUNK_NAME);
}
this.nodeName = (String)request.getAttribute(WebflowConstants.SOURCE_NODE_NAME);
if (this.nodeName == null) {
this.nodeName = request.getParameter(WebflowConstants.SOURCE_NODE_NAME);
}
this.outName = (String)request.getAttribute(WebflowConstants.OUT_NAME);
if (this.outName == null) {
this.outName = request.getParameter(WebflowConstants.OUT_NAME);
}
this.entityName = (String)request.getAttribute(WebflowConstants.SOURCE_ENTITY_NAME);
if (this.entityName == null) {
this.entityName = request.getParameter(WebflowConstants.SOURCE_ENTITY_NAME);
}
this.destnodename = (String)request.getAttribute(WebflowConstants.DEST_NODE_NAME);
if (this.destnodename == null) {
this.destnodename = request.getParameter(WebflowConstants.DEST_NODE_NAME);
}
this.destchunkname = (String)request.getAttribute(WebflowConstants.DEST_CHUNK_NAME);
if (this.destchunkname == null) {
this.destchunkname = request.getParameter(WebflowConstants.DEST_CHUNK_NAME);
}
this.orgId = (String)request.getAttribute(WebflowConstants.USER_ORGID);
if (this.orgId == null) {
this.orgId = request.getParameter(WebflowConstants.USER_ORGID);
}
this.orgCode = (String)request.getAttribute(WebflowConstants.USER_ORGNAME);
if (this.orgCode == null) {
this.orgCode = request.getParameter(WebflowConstants.USER_ORGNAME);
}
}
public void setAttribute(String constant, Object obj) {
request.setAttribute(constant, obj);
}
public void setFlag(Boolean flag) {
request.setAttribute(WebflowConstants.ATTRIBUTE_FLAG, flag);
}
public boolean getFlag() {
Boolean attributeFlag = (Boolean)
request.getAttribute(WebflowConstants.ATTRIBUTE_FLAG);
if(attributeFlag == null) {
attributeFlag = Boolean.TRUE;
setFlag(Boolean.TRUE);
}
return attributeFlag.booleanValue();
}
public void setOut(String outName){
request.setAttribute(WebflowConstants.OUT_NAME, outName);
}
public String getOut(){
return (String)request.getAttribute(WebflowConstants.OUT_NAME);
}
}
/**
* a normal link action.
*
* @param request
* @param response
* @param _chunkname
* @param _nodename
* @throws IOException
*/
@RequestMapping(name="/webflow.do", method=RequestMethod.GET)
public void doWebflowGet(HttpServletRequest request, HttpServletResponse response,
@RequestParam(value="_chunkname", required=true) String _chunkname,
@RequestParam(value="_nodename", required=true) String _nodename)
throws IOException {
if (WebConfig.isJAAS()) {
if (request.getParameter("_login") == null) {
HttpSession session = request.getSession(false);
if ((session == null) || (session.getAttribute("indexPageVisited") == null)) {
ProcessHelper.processDirectForward(WebConfig.replaceWebContext(WebConfig.getIndexPage()), request,response);
return;
}
}
}
HttpSession session = request.getSession();
UserContext currentUserContext = (UserContext)session.getAttribute(WebflowConstants.USER_SESSION_KEY);
if (currentUserContext == null) {
// create a fake user context for guest users when access our website at the first time.
currentUserContext = new UserContext();
currentUserContext.setOrgCode(null);
currentUserContext.setOrgId(-1);
currentUserContext.setUserRequestIP(request.getRemoteAddr());
session.setAttribute(WebflowConstants.USER_SESSION_KEY, currentUserContext);
}
String userLocale = WebConfig.getUserLocale(request);
List userRoles = (List)session.getAttribute(WebflowConstants.USER_ROLE_KEY);
String userAgent = request.getHeader("user-agent");
boolean isMobile = MobilitySupport.isMobileRequest(userAgent);
//add user-context thread bind
UserContext.register(session, currentUserContext, userLocale, userRoles, isMobile);
UserContext.setAppClient(request);
//add request thread bind
HttpRequestEvaluationContext.registerCurrentRequest(request);
AppContext.register(IServerServiceManager.INSTANCE);
String locale = UserContext.getUserLocale();
if (logger.isDebugEnabled()) {
logger.debug("Detected user locale:" + locale);
}
LocaleContext.createLocaleContext(locale);
WebNode destNode = UIFlowCacheManager.getInstance().findWebNode(_chunkname, _nodename);
if (destNode == null) {
ProcessHelper.processResponseSendError(response, HttpServletResponse.SC_BAD_REQUEST,
"can't find destination node");
return;
}
if (!checkAccessPermission(_chunkname, _nodename, request)) {
ProcessHelper.processDirectForward(WebConfig.getNoPermissionPage(), request, response);
return;
}
ProcessHelper.convertParameter2Attribute(request, destNode.getType());
if (logger.isInfoEnabled()) {
logger.info("Process destination node " + destNode.toString());
}
WebFlowContext flowContext = null;
try {
while (destNode != null) {
flowContext = new WebFlowContext(destNode, request, response);
WebNode nextNode = destNode.execute(flowContext);
destNode = nextNode;
}
HibernateUtil.releaseSession(true);
} catch (Throwable ex) {
HibernateUtil.releaseSession(false);
handleFlowException(request, response, destNode, flowContext, ex);
} finally {
UserContext.unregister();
LocaleContext.clearLocaleContext();
}
}
/**
* the page submit action.
*
* @param request
* @param response
* @throws IOException
* @throws ServletException
*/
@RequestMapping(name="/webflow.post", method=RequestMethod.POST)
public void doWebflowPost(HttpServletRequest request, HttpServletResponse response)
throws IOException, ServletException {
AttributesAccessor attrAccessor = new AttributesAccessor(request);
if(attrAccessor.outName == null || attrAccessor.outName.trim().length() == 0) {
ProcessHelper.processResponseSendError(response, HttpServletResponse.SC_BAD_REQUEST,
"page out does not specify!");
return;
}
if (checkSessionTimeout(request)) {
ProcessHelper.processDirectForward(WebConfig.getTimeoutPage(), request, response);
return;
}
HttpSession session = request.getSession();
UserContext currentUserContext = (UserContext)session.getAttribute(WebflowConstants.USER_SESSION_KEY);
if (currentUserContext == null) {
// user context must be existing before submits a page.
ProcessHelper.processDirectForward(WebConfig.getTimeoutPage(), request, response);
return;
}
try
{
String userLocale = WebConfig.getUserLocale(request);
List userRoles = (List)session.getAttribute(WebflowConstants.USER_ROLE_KEY);
String userAgent = request.getHeader("user-agent");
boolean isMobile = MobilitySupport.isMobileRequest(userAgent);
//add user-context thread bind
UserContext.register(session, currentUserContext, userLocale, userRoles, isMobile);
UserContext.setAppClient(request);
//add request thread bind
HttpRequestEvaluationContext.registerCurrentRequest(request);
AppContext.register(IServerServiceManager.INSTANCE);
String locale = UserContext.getUserLocale();
if (logger.isDebugEnabled()) {
logger.debug("Detected user locale:" + locale);
}
LocaleContext.createLocaleContext(locale);
if (!checkAccessPermission(attrAccessor.chunkName, attrAccessor.nodeName, request)) {
ProcessHelper.processDirectForward(WebConfig.getNoPermissionPage(), request, response);
return;
}
_processPageSubmit(request, response, attrAccessor);
}
catch(RuntimeException e)
{
logger.warn("Error while processing webflow:" + e.getMessage(), e);
throw e;
}
catch(Error e)
{
logger.warn("Error while processing webflow:" + e.getMessage(), e);
throw e;
}
finally {
UserContext.unregister();
LocaleContext.clearLocaleContext();
}
}
private boolean checkAccessPermission(final String chunkName, final String nodeName, final HttpServletRequest request)
{
String orgCode = (String)UserContext.getUserData(UserContext.CURRENT_USER_ORGNAME);
if (orgCode == null) {
orgCode = IServerServiceManager.INSTANCE.getMasterNodeName();
}
IPermissionService permiService = AppContext.get().getService(IPermissionService.class);
List<IConstantEntity> roleIds = (List<IConstantEntity>)request.getSession().getAttribute(WebflowConstants.USER_ROLE_KEY);
int decision = permiService.checkModule(orgCode, chunkName, nodeName, roleIds);
return IPermissionService.ACCEPTABLE == decision || IPermissionService.NOT_SPECIFIED == decision;
}
private void _processPageSubmit(HttpServletRequest request, HttpServletResponse response, AttributesAccessor attrAccessor)
{
//the flag whether current method should call NDC.pop before returning
boolean needNDCPop = false;
if(NDC.getDepth() == 0)
{
NDC.push(request.getRemoteAddr());
needNDCPop = true;
}
try
{
try {
request.setCharacterEncoding(charset);
} catch (UnsupportedEncodingException e1) {
}
// Identify the path component
String path = ProcessHelper.processPath(request);
if(logger.isDebugEnabled())
logger.debug("Processing a " + request.getMethod() + " for " + path);
if (logger.isDebugEnabled())
{
for (Enumeration<String> enup = request.getParameterNames();
enup.hasMoreElements();) {
String paramName = enup.nextElement();
String paramValue = request.getParameter(paramName);
logger.debug("Parameter:{}={}", new Object[] {paramName, paramValue});
}
}
try {
AjaxContextHelper.getAjaxWidgetMap(request.getSession());
} catch (NullPointerException e) {
logger.info("Session time out or submit duplication. forward to login page");
WebflowErrorUtil.addError(request, "submit.error",
new WebflowError(e.getMessage(), e));
WebNode srcNode = processSourceWebNode(request, attrAccessor);
ProcessHelper.processForwardError(srcNode, request, response);
return;
}
// sync value
try {
ProcessHelper.processSyncValues(request);
} catch (AjaxException e) {
logger.error("Error occurs when synchronize the widget values: "
+ e.getMessage(), e);
WebflowErrorUtil.addError(request, "ajax.sync.error",
new WebflowError(e.getMessage(), e));
WebNode srcNode = processSourceWebNode(request, attrAccessor);
ProcessHelper.processForwardError(srcNode, request, response);
return;
}
// Set the content type and no-caching headers if requested
processNoCache(response);
// find the source WebNode first.
WebNode srcNode = processSourceWebNode(request, attrAccessor);
if (logger.isInfoEnabled()) {
logger.info("source node " + srcNode.toString());
}
try
{
WebFlowContext flowContext = new WebFlowContext(srcNode, request, response);
//validate and convert the output data of DisplayNode srcNode
srcNode.prepareOutputData(flowContext);
}
catch (Throwable ex)
{
HibernateUtil.releaseSession(false);
if (ex instanceof ParsingException)
{
logger.error("ParsingException when prepare OutputData for node "
+ srcNode.toString(), ex);
WebflowErrorUtil.addError(request, srcNode.getName() + ".parsing.error",
new WebflowError(ex.getMessage(), ex));
ProcessHelper.processForwardError(srcNode, request, response);
}
else if (ex instanceof EvaluationException)
{
logger.error("EvaluationException when prepare OutputData for node "
+ srcNode.toString(), ex);
WebflowErrorUtil.addError(request, srcNode.getName() + ".evaluation.error",
new WebflowError(ex.getMessage(), ex));
ProcessHelper.processForwardError(srcNode, request, response);
}
else if (ex instanceof UIPageException)
{
logger.error("UIPageException when prepare OutputData for node "
+ srcNode.toString(), ex);
WebflowErrorUtil.addError(request, srcNode.getName() + ".uipage.error",
new WebflowError(ex.getMessage(), ex));
ProcessHelper.processForwardError(srcNode, request, response);
}
else
{
logger.error("Exception when prepare OutputData for node "
+ srcNode.toString(), ex);
WebflowErrorUtil.addError(request, srcNode.getName() + ".prepareOutputData.error",
new WebflowError(ex.getMessage(), ex));
ProcessHelper.processForwardError(srcNode, request, response);
}
return;
}
// find the page out's dest node
WebNode destNode = processDestWebNode(srcNode, request, attrAccessor);
if (destNode == null)
{
ProcessHelper.processResponseSendError(response,
HttpServletResponse.SC_BAD_REQUEST,
"can't find destination node");
return;
}
ProcessHelper.convertParameter2Attribute(request, destNode.getType());
if(logger.isInfoEnabled()) {
logger.info("Process destination node " + destNode.toString());
}
WebFlowContext flowContext = null;
try
{
while(destNode != null)
{
flowContext = new WebFlowContext(destNode, request, response);
WebNode nextNode = destNode.execute(flowContext);
destNode = nextNode;
}
HibernateUtil.releaseSession(true);
}
catch (Throwable ex)
{
HibernateUtil.releaseSession(false);
handleFlowException(request, response, destNode, flowContext, ex);
}
}
finally
{
if (needNDCPop)
{
NDC.pop();
}
}
}
private void handleFlowException(HttpServletRequest request, HttpServletResponse response, WebNode destNode,
WebFlowContext flowContext, Throwable ex) {
if (destNode == null) {
logger.error("*******webflow access error!", ex);
return;
}
if(ex instanceof NoWebflowAPException)
{
String key = destNode.getChunk().getEntityName() + ".access.error";
String message = "----PermissionError: access the webflow chunk" +
destNode.getChunk().getEntityName() + " error ";
logger.error("*******" + destNode.getChunk().getEntityName() + ".webflow access denied!", ex);
WebflowErrorUtil.addError(request, key, new WebflowError(message, ex));
ProcessHelper.processForwardError(destNode, request, response);
rollbackTransaction(request, flowContext);
}
else if(ex instanceof NoWebflowNodeAPException)
{
String key = destNode.getName() + ".access.error";
String message = "----PermissionError: access the webflow node" +
destNode.getName() + " error ";
logger.error("*******" + destNode.getName() + ".webflowNode access denied!", ex);
WebflowErrorUtil.addError(request, key, new WebflowError(message, ex));
ProcessHelper.processForwardError(destNode, request, response);
rollbackTransaction(request, flowContext);
}
else if (ex instanceof WebFlowException)
{
String nodename = destNode.getName();
String message = "execute the node " + destNode.toString() + " error ";
String key = nodename + ".execute.error";
Throwable t = ((WebFlowException)ex).getNestedThrowable();
String nestedMessage = "";
if (t != null && t.getMessage() != null) {
nestedMessage = t.getMessage();
}
if (t instanceof ParsingException) {
key = nodename + ".parsing.error";
message += "----ParsingError:" + nestedMessage;
} else if (t instanceof BusinessOperationException) {
key = nodename + ".bo.error";
message += "----BusinessOperationError:"
+ nestedMessage;
} else if (t instanceof UIPageException) {
key = nodename + ".uipage.error";
message += "----UIPageError:" + nestedMessage;
}
else if (t != null) {
message += "----" + nestedMessage;
}
logger.error("*******execute the node " + destNode.toString() + " error ", ex);
WebflowErrorUtil.addError(request, key, new WebflowError(message, t));
ProcessHelper.processForwardError(destNode, request, response);
rollbackTransaction(request, flowContext);
}
else
{
String message = "execute the node " + destNode.toString() + " error ";
logger.error("*******" + message, ex);
message = message + "------" + ex.getMessage();
String key = destNode.getName() + ".execute.error";
WebflowErrorUtil.addError(request, key, new WebflowError(message, ex));
ProcessHelper.processForwardError(destNode, request, response);
rollbackTransaction(request, flowContext);
}
}
/**
* Render the HTTP headers to defeat browser caching if requested.
*
* @param response The servlet response we are creating
*
* @exception IOException if an input/output error occurs
* @exception ServletException if a servlet exception occurs
*/
private void processNoCache(HttpServletResponse response)
//throws IOException, ServletException
{
if (!nocache) {
return;
}
response.setHeader("Pragma", "No-cache");
response.setHeader("Cache-Control", "no-cache");
response.setDateHeader("Expires", 1);
}
/**
* find:
* <li> find sourcenode from request attributes
* <li> find sourcenode from request parameters
* <li> find pagename from request parameters
* <li> find
*
* Identify and return an appropriate source WebNode of this reqeust.
* the information about source WebNode is stored in request.
* the information is stored as attributes or parameters in request.
* If no such WebNode can be identified, return <code>null</code>.
* The <code>request</code> parameter is available if you need to make
* decisions on available mappings (such as checking permissions) based
* on request parameters or other properties, but it is not used in the
* default implementation.
*
* @param path Path component used to select a mapping
* @param request The request we are processing
*/
private WebNode processSourceWebNode(HttpServletRequest request, AttributesAccessor attrAccessor)
{
if (logger.isDebugEnabled())
logger.debug("processSourceWebNode()");
UIFlowCacheManager manager = UIFlowCacheManager.getInstance();
//set attribute flag
attrAccessor.setFlag(Boolean.TRUE);
if(attrAccessor.nodeName == null)
{
//....do?_destchunkname=xxx&_destnodename=
if (logger.isInfoEnabled())
logger.info("processSourceWebNode():the nodename is null in request attribute, get nodename from parameter");
//set attribute flag
attrAccessor.setFlag(Boolean.FALSE);
if (logger.isDebugEnabled())
logger.debug("processSourceWebNode():the nodename is null in request parameter, get pagename");
if(attrAccessor.entityName != null)
{
if (logger.isDebugEnabled())
logger.debug("processSourceWebNode(): source node sourceentity name: {}",
new Object[]{attrAccessor.entityName});
return manager.findWebNodeBySourceEntity(attrAccessor.entityName);
}
}
String chunkName = attrAccessor.chunkName;
String nodeName = attrAccessor.nodeName;
if(chunkName == null || nodeName == null)
{
if (logger.isInfoEnabled())
logger.info("processSourceWebNode(): chunkName is {}, nodeName is {}",
new Object[]{chunkName, nodeName});
return null;
}
else
{
return manager.findWebNode(chunkName, nodeName);
}
}
/**
* find dest node:
* <li> destnode in attributes
* <li> srcnode != null: find outName in attributes or parameters
* <li> srcnode == null: find destnode in parameters
* Identify and return an appropriate WebNode with the source node and its
* out. If no such WebNode can be identified, find the webnode
* by request uri path.
*
* @param srcNode the source node
* @param path the request uri path
* @param request The request we are processing
*/
private WebNode processDestWebNode(WebNode srcNode, HttpServletRequest request, AttributesAccessor attrAccessor)
{
if (logger.isDebugEnabled())
logger.debug("processDestWebNode()");
if (logger.isDebugEnabled()) {
logger.debug("processDestWebNode(): get destnode from request attribute");
}
UIFlowCacheManager manager = UIFlowCacheManager.getInstance();
String destnodename = attrAccessor.destnodename;
if(destnodename != null && !destnodename.isEmpty())
{
if (logger.isDebugEnabled())
logger.debug("processDestWebNode(): the destnode in request attribute is " + destnodename);
String destchunkname = attrAccessor.destchunkname;
if((srcNode != null) &&
(destchunkname == null || destchunkname.equals("")))
destchunkname = srcNode.getChunk().getEntityName();
attrAccessor.setAttribute(WebflowConstants.OUT_NAME, null);//don't do the convert
attrAccessor.setAttribute(WebflowConstants.DEST_NODE_NAME, null);
attrAccessor.setAttribute(WebflowConstants.DEST_CHUNK_NAME, null);
if(destchunkname == null)
{
logger.error("processDestWebNode(): the destchunkname is null in request attribute, the destnodename is "
+ destnodename);
return null;
}
return manager.findWebNode(destchunkname, destnodename);
}
//attribute: outname
//parameter: outname
if(srcNode != null)
{
String outName = null;
if(attrAccessor.getFlag())
{
if (logger.isDebugEnabled())
logger.debug("processDestWebNode(): find outname in request attribute");
outName = attrAccessor.getOut();
}
if (outName == null)
{
if (logger.isDebugEnabled())
logger.debug("processDestWebNode(): find outname in request parameter");
outName = request.getParameter(WebflowConstants.OUT_NAME);
}
if(outName != null)
{
if (logger.isDebugEnabled())
logger.debug("processDestWebNode():the outname is " + outName);
attrAccessor.setOut(outName);
//find out
OutType out = srcNode.findOut(outName);
if(out != null)
{
NextType next = out.getNext();
if(next != null)
{
return manager.findNextWebNode(srcNode, next);
}
else
{
logger.error("the next is null, out=" + outName + srcNode.toString());
return null;
}
}
}
}
//no source node or no out name
//parameter: destnode
if (logger.isDebugEnabled())
logger.debug("processDestWebNode():finding destnode in request parameter");
destnodename = attrAccessor.destnodename;
String destchunkname = attrAccessor.destchunkname;
if((srcNode != null) && (destchunkname == null || destchunkname.equals("")))
destchunkname = srcNode.getChunk().getEntityName();
attrAccessor.setAttribute(WebflowConstants.OUT_NAME, null);//don't do the convert
attrAccessor.setAttribute(WebflowConstants.DEST_NODE_NAME, null);
attrAccessor.setAttribute(WebflowConstants.DEST_CHUNK_NAME, null);
if(destchunkname == null || destnodename == null)
{
if (logger.isDebugEnabled())
{
logger.debug("cant find dynamicout destnode:destchunkname=" +
destchunkname + ",destnodename=" + destnodename);
}
return null;
}
return manager.findWebNode(destchunkname, destnodename);
}
private void rollbackTransaction(HttpServletRequest request, WebFlowContext context)
{
try
{
if(context.isInTransaction())
{
if (logger.isInfoEnabled())
logger.info("rollback the userTransaction");
context.rollbackTransaction();
}
}
catch (Exception e)
{
logger.error("error when rollback the user transaction, execute node "
+ toString(), e);
}
request.setAttribute(WebflowConstants.USERTRANSACTION_KEY, null);
}
protected boolean checkSessionTimeout(HttpServletRequest request)
{
//add request parameter check
String needCheckInRequest = request.getParameter("_needCheckSessionTimeOut");
if(!"true".equals(needCheckInRequest))
{
return false;
}
HttpSession session = request.getSession(false);
if (session == null)
{
return true;
}
return (session.getAttribute(WebflowConstants.USER_SESSION_KEY) == null);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ambari.server.controller.internal;
import java.util.ArrayList;
import java.util.Collections;
import java.util.EnumSet;
import java.util.HashSet;
import java.util.Iterator;
import java.util.Map;
import java.util.Set;
import org.apache.ambari.server.AmbariException;
import org.apache.ambari.server.DuplicateResourceException;
import org.apache.ambari.server.StaticallyInject;
import org.apache.ambari.server.configuration.Configuration;
import org.apache.ambari.server.controller.predicate.EqualsPredicate;
import org.apache.ambari.server.controller.spi.NoSuchParentResourceException;
import org.apache.ambari.server.controller.spi.NoSuchResourceException;
import org.apache.ambari.server.controller.spi.Predicate;
import org.apache.ambari.server.controller.spi.Request;
import org.apache.ambari.server.controller.spi.RequestStatus;
import org.apache.ambari.server.controller.spi.Resource;
import org.apache.ambari.server.controller.spi.ResourceAlreadyExistsException;
import org.apache.ambari.server.controller.spi.SystemException;
import org.apache.ambari.server.controller.spi.UnsupportedPropertyException;
import org.apache.ambari.server.orm.dao.RemoteAmbariClusterDAO;
import org.apache.ambari.server.orm.entities.RemoteAmbariClusterEntity;
import org.apache.ambari.server.orm.entities.RemoteAmbariClusterServiceEntity;
import org.apache.ambari.server.security.authorization.RoleAuthorization;
import org.apache.ambari.server.view.RemoteAmbariClusterRegistry;
import org.apache.ambari.view.MaskException;
import org.apache.commons.lang.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.base.Strings;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Sets;
import com.google.inject.Inject;
/**
* Resource Provider for Remote Cluster
*/
@StaticallyInject
public class RemoteClusterResourceProvider extends AbstractAuthorizedResourceProvider {
/**
* Remote Cluster property id constants.
*/
public static final String CLUSTER_NAME_PROPERTY_ID = "ClusterInfo/name";
public static final String CLUSTER_ID_PROPERTY_ID = "ClusterInfo/cluster_id";
public static final String CLUSTER_URL_PROPERTY_ID = "ClusterInfo/url";
public static final String USERNAME_PROPERTY_ID = "ClusterInfo/username";
public static final String PASSWORD_PROPERTY_ID = "ClusterInfo/password";
public static final String SERVICES_PROPERTY_ID = "ClusterInfo/services";
/**
* The logger.
*/
private final static Logger LOG = LoggerFactory.getLogger(RemoteClusterResourceProvider.class);
/**
* The key property ids for a Remote Cluster resource.
*/
private static final Map<Resource.Type, String> keyPropertyIds = ImmutableMap.<Resource.Type, String>builder()
.put(Resource.Type.RemoteCluster, CLUSTER_NAME_PROPERTY_ID)
.build();
/**
* The property ids for a Remote Cluster resource.
*/
private static final Set<String> propertyIds = Sets.newHashSet(
CLUSTER_NAME_PROPERTY_ID,
CLUSTER_ID_PROPERTY_ID,
CLUSTER_URL_PROPERTY_ID,
USERNAME_PROPERTY_ID,
PASSWORD_PROPERTY_ID,
SERVICES_PROPERTY_ID);
@Inject
private static RemoteAmbariClusterDAO remoteAmbariClusterDAO;
@Inject
private static Configuration configuration;
@Inject
private static RemoteAmbariClusterRegistry remoteAmbariClusterRegistry;
/**
* Create a new resource provider.
*/
protected RemoteClusterResourceProvider() {
super(Resource.Type.RemoteCluster, propertyIds, keyPropertyIds);
EnumSet<RoleAuthorization> requiredAuthorizations = EnumSet.of(RoleAuthorization.AMBARI_ADD_DELETE_CLUSTERS);
setRequiredCreateAuthorizations(requiredAuthorizations);
setRequiredDeleteAuthorizations(requiredAuthorizations);
setRequiredUpdateAuthorizations(requiredAuthorizations);
}
@Override
public Map<Resource.Type, String> getKeyPropertyIds() {
return keyPropertyIds;
}
@Override
protected Set<String> getPKPropertyIds() {
return new HashSet<>(keyPropertyIds.values());
}
@Override
public RequestStatus createResourcesAuthorized(Request request) throws SystemException, UnsupportedPropertyException, ResourceAlreadyExistsException, NoSuchParentResourceException {
for (Map<String, Object> properties : request.getProperties()) {
createResources(getCreateCommand(properties));
}
notifyCreate(Resource.Type.RemoteCluster, request);
return getRequestStatus(null);
}
@Override
public Set<Resource> getResources(Request request, Predicate predicate) throws SystemException, UnsupportedPropertyException, NoSuchResourceException, NoSuchParentResourceException {
Set<Resource> resources = new HashSet<>();
Set<String> requestedIds = getRequestPropertyIds(request, predicate);
Set<Map<String, Object>> propertyMaps = getPropertyMaps(predicate);
if (propertyMaps.isEmpty()) {
propertyMaps.add(Collections.emptyMap());
}
for (Map<String, Object> propertyMap : propertyMaps) {
String clusterName = (String) propertyMap.get(CLUSTER_NAME_PROPERTY_ID);
if(!Strings.isNullOrEmpty(clusterName)){
RemoteAmbariClusterEntity cluster = remoteAmbariClusterDAO.findByName(clusterName);
if(cluster == null) {
throw new NoSuchResourceException(String.format("Cluster with name %s cannot be found",clusterName) );
}
resources.add(toResource(requestedIds, cluster));
}else {
for (RemoteAmbariClusterEntity cluster : remoteAmbariClusterDAO.findAll()){
Resource resource = toResource(requestedIds, cluster);
resources.add(resource);
}
}
}
return resources;
}
protected Resource toResource(Set<String> requestedIds, RemoteAmbariClusterEntity cluster) {
Resource resource = new ResourceImpl(Resource.Type.RemoteCluster);
setResourceProperty(resource, CLUSTER_NAME_PROPERTY_ID, cluster.getName(), requestedIds);
setResourceProperty(resource, CLUSTER_ID_PROPERTY_ID, cluster.getId(), requestedIds);
setResourceProperty(resource, CLUSTER_URL_PROPERTY_ID, cluster.getUrl(), requestedIds);
setResourceProperty(resource, USERNAME_PROPERTY_ID, cluster.getUsername(), requestedIds);
ArrayList<String> services = new ArrayList<>();
for (RemoteAmbariClusterServiceEntity remoteClusterServiceEntity : cluster.getServices()) {
services.add(remoteClusterServiceEntity.getServiceName());
}
setResourceProperty(resource, SERVICES_PROPERTY_ID,services, requestedIds);
return resource;
}
@Override
public RequestStatus updateResourcesAuthorized(Request request, Predicate predicate) throws SystemException, UnsupportedPropertyException, NoSuchResourceException, NoSuchParentResourceException {
Iterator<Map<String,Object>> iterator = request.getProperties().iterator();
if (iterator.hasNext()) {
for (Map<String, Object> propertyMap : getPropertyMaps(iterator.next(), predicate)) {
modifyResources(getUpdateCommand(propertyMap));
}
}
notifyUpdate(Resource.Type.RemoteCluster, request, predicate);
return getRequestStatus(null);
}
@Override
protected RequestStatus deleteResourcesAuthorized(Request request, Predicate predicate)
throws SystemException, UnsupportedPropertyException, NoSuchResourceException, NoSuchParentResourceException {
modifyResources(getDeleteCommand(predicate));
notifyDelete(Resource.Type.ViewInstance, predicate);
return getRequestStatus(null);
}
/**
* Get the command to create the RemoteAmbariCluster
* @param properties
* @return A command to create the RemoteAmbariCluster
*/
private Command<Void> getCreateCommand(final Map<String, Object> properties) {
return new Command<Void>() {
@Override
public Void invoke() throws AmbariException {
String name = (String)properties.get(CLUSTER_NAME_PROPERTY_ID);
if(StringUtils.isEmpty(name)){
throw new IllegalArgumentException("Cluster Name cannot ne null or Empty");
}
if(remoteAmbariClusterDAO.findByName(name) != null){
throw new DuplicateResourceException(String.format("Remote cluster with name %s already exists",name));
}
saveOrUpdateRemoteAmbariClusterEntity(properties,false);
return null;
}
};
}
/**
* Get the command to update the RemoteAmbariCluster
* @param properties
* @return A command to update the RemoteAmbariCluster
*/
private Command<Void> getUpdateCommand(final Map<String, Object> properties) {
return new Command<Void>() {
@Override
public Void invoke() throws AmbariException {
String name = (String)properties.get(CLUSTER_NAME_PROPERTY_ID);
if (StringUtils.isEmpty(name)) {
throw new IllegalArgumentException("Cluster Name cannot be null or Empty");
}
String id = (String)properties.get(CLUSTER_ID_PROPERTY_ID);
if (StringUtils.isEmpty(id)) {
throw new IllegalArgumentException("Cluster Id cannot be null or Empty");
}
saveOrUpdateRemoteAmbariClusterEntity(properties,true);
return null;
}
};
}
/**
* Save or update Remote Ambari Cluster Entity to database
*
* @param properties
* @param update
* @throws AmbariException
*/
private void saveOrUpdateRemoteAmbariClusterEntity(Map<String, Object> properties,boolean update) throws AmbariException {
String name = (String)properties.get(CLUSTER_NAME_PROPERTY_ID);
String url = (String)properties.get(CLUSTER_URL_PROPERTY_ID);
String username = (String)properties.get(USERNAME_PROPERTY_ID);
String password = (String)properties.get(PASSWORD_PROPERTY_ID);
if (StringUtils.isEmpty(url) && StringUtils.isEmpty(username)) {
throw new IllegalArgumentException("Url or username cannot be null");
}
RemoteAmbariClusterEntity entity ;
if (update) {
Long id = Long.valueOf((String) properties.get(CLUSTER_ID_PROPERTY_ID));
entity = remoteAmbariClusterDAO.findById(id);
if (entity == null) {
throw new IllegalArgumentException(String.format("Cannot find cluster with Id : \"%s\"", id));
}
} else {
entity = remoteAmbariClusterDAO.findByName(name);
if (entity != null) {
throw new DuplicateResourceException(String.format("Cluster with name : \"%s\" already exists", name));
}
}
// Check Password not null for create
//Check username matches the entity username if password not present
if(StringUtils.isBlank(password) && !update){
throw new IllegalArgumentException("Password cannot be null");
}else if(StringUtils.isBlank(password) && update && !username.equals(entity.getUsername())){
throw new IllegalArgumentException("Failed to update. Username does not match.");
}
if (entity == null) {
entity = new RemoteAmbariClusterEntity();
}
entity.setName(name);
entity.setUrl(url);
try {
if (password != null) {
entity.setUsername(username);
entity.setPassword(password);
}
} catch (MaskException e) {
throw new IllegalArgumentException("Failed to create new Remote Cluster " + name + ". Illegal Password");
}
try {
remoteAmbariClusterRegistry.saveOrUpdate(entity,update);
} catch (Exception e) {
throw new IllegalArgumentException("Failed to create new Remote Cluster " + name +". " + e.getMessage(),e);
}
}
/**
* Get the command to delete the Cluster
* @param predicate
* @return The delete command
*/
private Command<Void> getDeleteCommand(final Predicate predicate) {
return new Command<Void>() {
@Override
public Void invoke() throws AmbariException {
Comparable deletedCluster = ((EqualsPredicate) predicate).getValue();
String toDelete = deletedCluster.toString();
RemoteAmbariClusterEntity clusterEntity = remoteAmbariClusterDAO.findByName(toDelete);
if(clusterEntity == null){
throw new IllegalArgumentException("The Cluster "+ toDelete +" does not exist");
}
remoteAmbariClusterRegistry.delete(clusterEntity);
return null;
}
};
}
}
| |
/*
* Copyright 2017 ThoughtWorks, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.thoughtworks.go.server.domain;
import com.thoughtworks.go.config.AgentConfig;
import com.thoughtworks.go.config.Agents;
import com.thoughtworks.go.domain.AgentInstance;
import com.thoughtworks.go.domain.AgentStatus;
import com.thoughtworks.go.domain.NullAgentInstance;
import com.thoughtworks.go.domain.exception.MaxPendingAgentsLimitReachedException;
import com.thoughtworks.go.helper.AgentInstanceMother;
import com.thoughtworks.go.listener.AgentStatusChangeListener;
import com.thoughtworks.go.server.service.AgentRuntimeInfo;
import com.thoughtworks.go.util.SystemEnvironment;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
import org.mockito.Mock;
import java.util.Arrays;
import java.util.Date;
import java.util.List;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.instanceOf;
import static org.junit.Assert.assertThat;
import static org.junit.matchers.JUnitMatchers.hasItems;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
import static org.mockito.MockitoAnnotations.initMocks;
public class AgentInstancesTest {
@Rule
public ExpectedException thrown = ExpectedException.none();
private AgentInstance idle;
private AgentInstance building;
private AgentInstance pending;
private AgentInstance disabled;
private AgentInstance local;
@Mock
private SystemEnvironment systemEnvironment;
private AgentStatusChangeListener agentStatusChangeListener;
@Before
public void setUp() throws Exception {
initMocks(this);
idle = AgentInstanceMother.idle(new Date(), "CCeDev01", systemEnvironment);
AgentInstanceMother.updateOS(idle, "linux");
building = AgentInstanceMother.building("buildLocator", systemEnvironment);
AgentInstanceMother.updateOS(building, "macOS");
pending = AgentInstanceMother.pending(systemEnvironment);
AgentInstanceMother.updateOS(pending, "windows");
disabled = AgentInstanceMother.disabled("10.18.5.4", systemEnvironment);
local = AgentInstanceMother.local(systemEnvironment);
agentStatusChangeListener = mock(AgentStatusChangeListener.class);
}
@Test
public void shouldUnderstandFilteringAgentListBasedOnUuid() {
AgentInstances instances = new AgentInstances(mock(AgentStatusChangeListener.class));
AgentRuntimeInfo agent1 = AgentRuntimeInfo.fromServer(new AgentConfig("uuid-1", "host-1", "192.168.1.2"), true, "/foo/bar", 100l, "linux", false);
AgentRuntimeInfo agent2 = AgentRuntimeInfo.fromServer(new AgentConfig("uuid-2", "host-2", "192.168.1.3"), true, "/bar/baz", 200l, "linux", false);
AgentRuntimeInfo agent3 = AgentRuntimeInfo.fromServer(new AgentConfig("uuid-3", "host-3", "192.168.1.4"), true, "/baz/quux", 300l, "linux", false);
AgentInstance instance1 = AgentInstance.createFromLiveAgent(agent1, systemEnvironment, mock(AgentStatusChangeListener.class));
instances.add(instance1);
instances.add(AgentInstance.createFromLiveAgent(agent2, systemEnvironment, mock(AgentStatusChangeListener.class)));
AgentInstance instance3 = AgentInstance.createFromLiveAgent(agent3, systemEnvironment, mock(AgentStatusChangeListener.class));
instances.add(instance3);
List<AgentInstance> agents = instances.filter(Arrays.asList("uuid-1", "uuid-3"));
assertThat(agents, hasItems(instance1, instance3));
assertThat(agents.size(), is(2));
}
@Test
public void shouldFindEnabledAgents() {
AgentInstances agentInstances = sample();
AgentInstances enabledAgents = agentInstances.findEnabledAgents();
assertThat(enabledAgents.size(), is(2));
assertThat(enabledAgents.findAgentAndRefreshStatus("uuid2"), is(idle));
assertThat(enabledAgents.findAgentAndRefreshStatus("uuid3"), is(building));
}
@Test
public void shouldFindRegisteredAgents() {
AgentInstances agentInstances = sample();
AgentInstances agents = agentInstances.findRegisteredAgents();
assertThat(agents.size(), is(3));
assertThat(agents.findAgentAndRefreshStatus("uuid2"), is(idle));
assertThat(agents.findAgentAndRefreshStatus("uuid3"), is(building));
assertThat(agents.findAgentAndRefreshStatus("uuid5"), is(disabled));
}
@Test
public void shouldFindAgentsByItHostName() throws Exception {
AgentInstance idle = AgentInstanceMother.idle(new Date(), "ghost-name");
AgentInstances agentInstances = new AgentInstances(systemEnvironment, agentStatusChangeListener, idle, AgentInstanceMother.building());
AgentInstance byHostname = agentInstances.findFirstByHostname("ghost-name");
assertThat(byHostname, is(idle));
}
@Test
public void shouldReturnNullAgentsWhenHostNameIsNotFound() throws Exception {
AgentInstances agentInstances = new AgentInstances(systemEnvironment, agentStatusChangeListener, AgentInstanceMother.building());
agentInstances.add(idle);
agentInstances.add(building);
AgentInstance byHostname = agentInstances.findFirstByHostname("not-exist");
assertThat(byHostname, is(instanceOf(NullAgentInstance.class)));
}
@Test
public void shouldReturnFirstMatchedAgentsWhenHostNameHasMoreThanOneMatch() throws Exception {
AgentInstance agent = AgentInstance.createFromConfig(new AgentConfig("uuid20", "CCeDev01", "10.18.5.20"), systemEnvironment, null);
AgentInstance duplicatedAgent = AgentInstance.createFromConfig(new AgentConfig("uuid21", "CCeDev01", "10.18.5.20"), systemEnvironment, null);
AgentInstances agentInstances = new AgentInstances(systemEnvironment, agentStatusChangeListener, agent, duplicatedAgent);
AgentInstance byHostname = agentInstances.findFirstByHostname("CCeDev01");
assertThat(byHostname, is(agent));
}
@Test
public void shouldAddAgentIntoMemoryAfterAgentIsManuallyAddedInConfigFile() throws Exception {
AgentInstances agentInstances = new AgentInstances(mock(AgentStatusChangeListener.class));
AgentConfig agentConfig = new AgentConfig("uuid20", "CCeDev01", "10.18.5.20");
agentInstances.sync(new Agents(agentConfig));
assertThat(agentInstances.size(), is(1));
assertThat(agentInstances.findAgentAndRefreshStatus("uuid20").agentConfig(), is(agentConfig));
}
@Test
public void shouldRemoveAgentWhenAgentIsRemovedFromConfigFile() throws Exception {
AgentInstances agentInstances = new AgentInstances(systemEnvironment, agentStatusChangeListener, idle, building);
Agents oneAgentIsRemoved = new Agents(new AgentConfig("uuid2", "CCeDev01", "10.18.5.1"));
agentInstances.sync(oneAgentIsRemoved);
assertThat(agentInstances.size(), is(1));
assertThat(agentInstances.findAgentAndRefreshStatus("uuid2"), is(idle));
assertThat(agentInstances.findAgentAndRefreshStatus("uuid1"), is(new NullAgentInstance("uuid1")));
}
@Test
public void shouldSyncAgent() throws Exception {
AgentInstances agentInstances = new AgentInstances(systemEnvironment, agentStatusChangeListener, AgentInstanceMother.building(), idle);
AgentConfig agentConfig = new AgentConfig("uuid2", "CCeDev01", "10.18.5.1");
agentConfig.setDisabled(true);
Agents oneAgentIsRemoved = new Agents(agentConfig);
agentInstances.sync(oneAgentIsRemoved);
assertThat(agentInstances.findAgentAndRefreshStatus("uuid2").getStatus(), is(AgentStatus.Disabled));
}
@Test
public void shouldNotRemovePendingAgentDuringSync() throws Exception {
AgentInstances agentInstances = new AgentInstances(systemEnvironment, agentStatusChangeListener, AgentInstanceMother.building());
agentInstances.add(pending);
Agents agents = new Agents();
agentInstances.sync(agents);
assertThat(agentInstances.size(), is(1));
assertThat(agentInstances.findAgentAndRefreshStatus("uuid4").getStatus(), is(AgentStatus.Pending));
}
@Test
public void agentHostnameShouldBeUnique() {
AgentConfig agentConfig = new AgentConfig("uuid2", "CCeDev01", "10.18.5.1");
AgentInstances agentInstances = new AgentInstances(mock(AgentStatusChangeListener.class));
agentInstances.register(AgentRuntimeInfo.fromServer(agentConfig, false, "/var/lib", 0L, "linux", false));
agentInstances.register(AgentRuntimeInfo.fromServer(agentConfig, false, "/var/lib", 0L, "linux", false));
}
@Test(expected = MaxPendingAgentsLimitReachedException.class)
public void registerShouldErrorOutIfMaxPendingAgentsLimitIsReached() {
AgentConfig agentConfig = new AgentConfig("uuid2", "CCeDev01", "10.18.5.1");
AgentInstances agentInstances = new AgentInstances(systemEnvironment, agentStatusChangeListener, AgentInstanceMother.pending());
when(systemEnvironment.get(SystemEnvironment.MAX_PENDING_AGENTS_ALLOWED)).thenReturn(1);
agentInstances.register(AgentRuntimeInfo.fromServer(agentConfig, false, "/var/lib", 0L, "linux", false));
}
@Test
public void shouldRemovePendingAgentThatIsTimedOut() {
when(systemEnvironment.getAgentConnectionTimeout()).thenReturn(-1);
AgentInstances agentInstances = new AgentInstances(systemEnvironment, agentStatusChangeListener, pending, building, disabled);
agentInstances.refresh();
assertThat(agentInstances.findAgentAndRefreshStatus("uuid4"), is(instanceOf(NullAgentInstance.class)));
}
@Test
public void shouldSupportConcurrentOperations() throws Exception {
final AgentInstances agentInstances = new AgentInstances(mock(AgentStatusChangeListener.class));
// register 100 agents
for (int i = 0; i < 100; i++) {
AgentConfig agentConfig = new AgentConfig("uuid" + i, "CCeDev_" + i, "10.18.5." + i);
agentInstances.register(AgentRuntimeInfo.fromServer(agentConfig, false, "/var/lib", Long.MAX_VALUE, "linux", false));
}
thrown.expect(MaxPendingAgentsLimitReachedException.class);
thrown.expectMessage("Max pending agents allowed 100, limit reached");
AgentConfig agentConfig = new AgentConfig("uuid" + 200, "CCeDev_" + 200, "10.18.5." + 200);
agentInstances.register(AgentRuntimeInfo.fromServer(agentConfig, false, "/var/lib", Long.MAX_VALUE, "linux", false));
}
private AgentInstances sample() {
AgentInstances agentInstances = new AgentInstances(null);
agentInstances.add(idle);
agentInstances.add(building);
agentInstances.add(pending);
agentInstances.add(disabled);
return agentInstances;
}
private static class AgentAdder implements Runnable {
private final AgentInstances agentInstances;
private boolean stop;
public static AgentAdder startAdding(AgentInstances agentInstances) {
AgentAdder agentAdder = new AgentAdder(agentInstances);
Thread thread = new Thread(agentAdder);
thread.setDaemon(true);
thread.start();
return agentAdder;
}
private AgentAdder(AgentInstances agentInstances) {
this.agentInstances = agentInstances;
}
public void run() {
int count = 0;
while (!stop) {
AgentConfig agentConfig = new AgentConfig("uuid" + count, "CCeDev_" + count, "10.18.5." + count);
agentInstances.register(AgentRuntimeInfo.fromServer(agentConfig, false, "/var/lib", Long.MAX_VALUE, "linux", false));
count++;
}
}
public void stop() {
this.stop = true;
}
}
}
| |
package edu.gemini.wdba.tcc;
import edu.gemini.shared.util.immutable.DefaultImList;
import edu.gemini.shared.util.immutable.ImCollections;
import edu.gemini.shared.util.immutable.ImList;
import edu.gemini.shared.util.immutable.Pair;
import edu.gemini.spModel.ext.ObservationNode;
import edu.gemini.spModel.ext.TargetNode;
import edu.gemini.spModel.gemini.flamingos2.Flamingos2;
import edu.gemini.spModel.guide.GuideProbe;
import edu.gemini.spModel.target.SPTarget;
import edu.gemini.spModel.target.env.GuideProbeTargets;
import edu.gemini.spModel.target.env.TargetEnvironment;
import edu.gemini.spModel.target.obsComp.PwfsGuideProbe;
import edu.gemini.spModel.target.obsComp.TargetObsComp;
import edu.gemini.spModel.telescope.IssPort;
import org.junit.Test;
import scala.actors.threadpool.Arrays;
import java.util.ArrayList;
import java.util.List;
import static edu.gemini.spModel.gemini.flamingos2.Flamingos2.Disperser;
import static edu.gemini.spModel.gemini.flamingos2.Flamingos2.Filter;
/**
* Test cases for {@link Flamingos2Support}.
*/
public final class Flamingos2SupportTest extends InstrumentSupportTestBase<Flamingos2> {
private SPTarget base;
public Flamingos2SupportTest() {
super(Flamingos2.SP_TYPE);
base = new SPTarget();
base.setName("Base Pos");
}
private static GuideProbeTargets createGuideTargets(GuideProbe probe) {
final SPTarget target = new SPTarget();
return GuideProbeTargets.create(probe, target).withExistingPrimary(target);
}
private TargetEnvironment create(GuideProbe... probes) {
ImList<GuideProbeTargets> gtCollection = createGuideTargetsList(probes);
ImList<SPTarget> userTargets = ImCollections.emptyList();
return TargetEnvironment.create(base).setAllPrimaryGuideProbeTargets(gtCollection).setUserTargets(userTargets);
}
private static ImList<GuideProbeTargets> createGuideTargetsList(GuideProbe... probes) {
List<GuideProbeTargets> res = new ArrayList<GuideProbeTargets>();
for (GuideProbe probe : probes) {
res.add(createGuideTargets(probe));
}
return DefaultImList.create(res);
}
private void setTargetEnv(GuideProbe... probes) throws Exception {
TargetEnvironment env = create(probes);
// Store the target environment.
ObservationNode obsNode = getObsNode();
TargetNode targetNode = obsNode.getTarget();
TargetObsComp obsComp = targetNode.getDataObject();
obsComp.setTargetEnvironment(env);
targetNode.getRemoteNode().setDataObject(obsComp);
}
public void testF2_SIDE() throws Exception {
final Flamingos2 flam2 = getInstrument();
flam2.setIssPort(IssPort.SIDE_LOOKING);
assertEquals(flam2.getDisperser(), Flamingos2.Disperser.NONE);
setInstrument(flam2);
verifyInstrumentConfig(getSouthResults(), "F25");
}
public void testF2_UP() throws Exception {
final Flamingos2 flam2 = getInstrument();
flam2.setIssPort(IssPort.UP_LOOKING);
assertEquals(flam2.getDisperser(), Flamingos2.Disperser.NONE);
setInstrument(flam2);
verifyInstrumentConfig(getSouthResults(), "F2");
}
public void testF2_P2_SIDE() throws Exception {
final Flamingos2 flam2 = getInstrument();
flam2.setIssPort(IssPort.SIDE_LOOKING);
assertEquals(flam2.getDisperser(), Flamingos2.Disperser.NONE);
setInstrument(flam2);
setTargetEnv(PwfsGuideProbe.pwfs2);
verifyInstrumentConfig(getSouthResults(), "F25_P2");
}
public void testF2_P2_UP() throws Exception {
final Flamingos2 flam2 = getInstrument();
flam2.setIssPort(IssPort.UP_LOOKING);
assertEquals(flam2.getDisperser(), Flamingos2.Disperser.NONE);
setInstrument(flam2);
setTargetEnv(PwfsGuideProbe.pwfs2);
verifyInstrumentConfig(getSouthResults(), "F2_P2");
}
public void testF2_SIDE_SPEC() throws Exception {
final Flamingos2 flam2 = getInstrument();
flam2.setIssPort(IssPort.SIDE_LOOKING);
flam2.setDisperser(Flamingos2.Disperser.R3000);
setInstrument(flam2);
// verifyInstrumentConfig(getSouthResults(), "F25_SPEC");
verifyInstrumentConfig(getSouthResults(), "F25");
}
public void testF2_UP_SPEC() throws Exception {
final Flamingos2 flam2 = getInstrument();
flam2.setIssPort(IssPort.UP_LOOKING);
flam2.setDisperser(Flamingos2.Disperser.R3000);
setInstrument(flam2);
// verifyInstrumentConfig(getSouthResults(), "F2_SPEC");
verifyInstrumentConfig(getSouthResults(), "F2");
}
public void testWavelength() throws Exception {
final Flamingos2 flam2 = getInstrument();
flam2.setFilter(Filter.OPEN);
flam2.setDisperser(Disperser.NONE);
setInstrument(flam2);
// Use imaging mode wavelength if no disperser and no filter.
assertEquals("1.6", getWavelength(getSouthResults()));
// Use the disperser wavelength in spectroscopy mode with no filter
flam2.setFilter(Filter.OPEN);
final Pair[] dtA = new Pair[] {
new Pair<>(Disperser.R1200JH, "1.39"),
new Pair<>(Disperser.R1200HK, "1.871"),
new Pair<>(Disperser.R3000, "1.65"),
};
//noinspection unchecked
for (final Pair<Disperser, String> t : (Pair<Disperser, String>[]) dtA ) {
flam2.setDisperser(t._1());
setInstrument(flam2);
assertEquals(t._2(), getWavelength(getSouthResults()));
}
// Use the filter wavelength even when in spectroscopy mode when the
// filter is specified.
flam2.setDisperser(Disperser.R3000);
final Pair[] ftA = new Pair[] {
new Pair<>(Filter.Y, "1.02"),
new Pair<>(Filter.J_LOW, "1.15"),
new Pair<>(Filter.J, "1.25"),
new Pair<>(Filter.H, "1.65"),
new Pair<>(Filter.K_LONG, "2.2" ),
new Pair<>(Filter.K_SHORT, "2.15"),
new Pair<>(Filter.JH, "1.39"),
new Pair<>(Filter.HK, "1.871"),
};
//noinspection unchecked
for (final Pair<Filter, String> t : (Pair<Filter, String>[]) ftA ) {
flam2.setFilter(t._1());
setInstrument(flam2);
assertEquals(t._2(), getWavelength(getSouthResults()));
}
// Use the filter when in imaging mode.
flam2.setDisperser(Disperser.NONE);
//noinspection unchecked
for (final Pair<Filter, String> t : (Pair<Filter, String>[]) ftA ) {
flam2.setFilter(t._1());
setInstrument(flam2);
assertEquals(t._2(), getWavelength(getSouthResults()));
}
// For Darks, anything goes but make sure it doesn't crash.
flam2.setFilter(Filter.DARK);
setInstrument(flam2);
try {
Double.parseDouble(getWavelength(getSouthResults()));
} catch (Exception ex) {
fail("dark wavelength not set");
}
}
@Test public void testNoAoPointOrig() throws Exception {
verifyPointOrig(getSouthResults(), "f2");
}
@Test public void testLgsPointOrig() throws Exception {
addGems();
verifyPointOrig(getSouthResults(), "lgs2f2");
}
}
| |
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* Copyright (c) 2008, Red Hat Middleware LLC or third-party contributors as
* indicated by the @author tags or express copyright attribution
* statements applied by the authors. All third-party contributions are
* distributed under license by Red Hat Middleware LLC.
*
* This copyrighted material is made available to anyone wishing to use, modify,
* copy, or redistribute it subject to the terms and conditions of the GNU
* Lesser General Public License, as published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
* for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this distribution; if not, write to:
* Free Software Foundation, Inc.
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
*/
package org.hibernate.envers.test.integration.manytomany;
import java.util.Arrays;
import java.util.Iterator;
import java.util.Map;
import java.util.SortedMap;
import java.util.SortedSet;
import javax.persistence.EntityManager;
import org.junit.Test;
import org.hibernate.ejb.Ejb3Configuration;
import org.hibernate.envers.test.BaseEnversJPAFunctionalTestCase;
import org.hibernate.envers.test.Priority;
import org.hibernate.envers.test.entities.StrTestEntity;
import org.hibernate.envers.test.entities.StrTestEntityComparator;
import org.hibernate.envers.test.entities.manytomany.SortedSetEntity;
import static org.junit.Assert.assertEquals;
/**
* @author Michal Skowronek (mskowr at o2 pl)
*/
public class CustomComparatorEntityTest extends BaseEnversJPAFunctionalTestCase {
private Integer id1;
private Integer id2;
private Integer id3;
private Integer id4;
public void configure(Ejb3Configuration cfg) {
cfg.addAnnotatedClass(StrTestEntity.class);
cfg.addAnnotatedClass(SortedSetEntity.class);
}
@Test
@Priority(10)
public void initData() {
EntityManager em = getEntityManager();
SortedSetEntity entity1 = new SortedSetEntity(1, "sortedEntity1");
// Revision 1
em.getTransaction().begin();
em.persist(entity1);
em.getTransaction().commit();
// Revision 2
em.getTransaction().begin();
entity1 = em.find(SortedSetEntity.class, 1);
final StrTestEntity strTestEntity1 = new StrTestEntity("abc");
em.persist(strTestEntity1);
id1 = strTestEntity1.getId();
entity1.getSortedSet().add(strTestEntity1);
entity1.getSortedMap().put(strTestEntity1, "abc");
em.getTransaction().commit();
// Revision 3
em.getTransaction().begin();
entity1 = em.find(SortedSetEntity.class, 1);
final StrTestEntity strTestEntity2 = new StrTestEntity("aaa");
em.persist(strTestEntity2);
id2 = strTestEntity2.getId();
entity1.getSortedSet().add(strTestEntity2);
entity1.getSortedMap().put(strTestEntity2, "aaa");
em.getTransaction().commit();
// Revision 4
em.getTransaction().begin();
entity1 = em.find(SortedSetEntity.class, 1);
final StrTestEntity strTestEntity3 = new StrTestEntity("aba");
em.persist(strTestEntity3);
id3 = strTestEntity3.getId();
entity1.getSortedSet().add(strTestEntity3);
entity1.getSortedMap().put(strTestEntity3, "aba");
em.getTransaction().commit();
// Revision 5
em.getTransaction().begin();
entity1 = em.find(SortedSetEntity.class, 1);
final StrTestEntity strTestEntity4 = new StrTestEntity("aac");
em.persist(strTestEntity4);
id4 = strTestEntity4.getId();
entity1.getSortedSet().add(strTestEntity4);
entity1.getSortedMap().put(strTestEntity4, "aac");
em.getTransaction().commit();
}
@Test
public void testRevisionsCounts() {
assertEquals(Arrays.asList(1, 2, 3, 4, 5), getAuditReader().getRevisions(SortedSetEntity.class, 1));
assertEquals(Arrays.asList(2), getAuditReader().getRevisions(StrTestEntity.class, id1));
assertEquals(Arrays.asList(3), getAuditReader().getRevisions(StrTestEntity.class, id2));
assertEquals(Arrays.asList(4), getAuditReader().getRevisions(StrTestEntity.class, id3));
assertEquals(Arrays.asList(5), getAuditReader().getRevisions(StrTestEntity.class, id4));
}
@Test
public void testCurrentStateOfEntity1() {
final SortedSetEntity entity1 = getEntityManager().find(SortedSetEntity.class, 1);
assertEquals("sortedEntity1", entity1.getData());
assertEquals(Integer.valueOf(1), entity1.getId());
final SortedSet<StrTestEntity> sortedSet = entity1.getSortedSet();
assertEquals(StrTestEntityComparator.class, sortedSet.comparator().getClass());
assertEquals(4, sortedSet.size());
final Iterator<StrTestEntity> iterator = sortedSet.iterator();
checkStrTestEntity(iterator.next(), id2, "aaa");
checkStrTestEntity(iterator.next(), id4, "aac");
checkStrTestEntity(iterator.next(), id3, "aba");
checkStrTestEntity(iterator.next(), id1, "abc");
final SortedMap<StrTestEntity, String> sortedMap = entity1.getSortedMap();
assertEquals(StrTestEntityComparator.class, sortedMap.comparator().getClass());
assertEquals(4, sortedMap.size());
Iterator<Map.Entry<StrTestEntity, String>> mapIterator = sortedMap.entrySet().iterator();
checkStrTestEntity(mapIterator.next().getKey(), id2, "aaa");
checkStrTestEntity(mapIterator.next().getKey(), id4, "aac");
checkStrTestEntity(mapIterator.next().getKey(), id3, "aba");
checkStrTestEntity(mapIterator.next().getKey(), id1, "abc");
mapIterator = sortedMap.entrySet().iterator();
assertEquals(mapIterator.next().getValue(), "aaa");
assertEquals(mapIterator.next().getValue(), "aac");
assertEquals(mapIterator.next().getValue(), "aba");
assertEquals(mapIterator.next().getValue(), "abc");
}
private void checkStrTestEntity(StrTestEntity entity, Integer id, String sortKey) {
assertEquals(id, entity.getId());
assertEquals(sortKey, entity.getStr());
}
@Test
public void testHistoryOfEntity1() throws Exception {
SortedSetEntity entity1 = getAuditReader().find(SortedSetEntity.class, 1, 1);
assertEquals("sortedEntity1", entity1.getData());
assertEquals(Integer.valueOf(1), entity1.getId());
SortedSet<StrTestEntity> sortedSet = entity1.getSortedSet();
assertEquals(StrTestEntityComparator.class, sortedSet.comparator().getClass());
assertEquals(0, sortedSet.size());
SortedMap<StrTestEntity, String> sortedMap = entity1.getSortedMap();
assertEquals(StrTestEntityComparator.class, sortedMap.comparator().getClass());
assertEquals(0, sortedMap.size());
entity1 = getAuditReader().find(SortedSetEntity.class, 1, 2);
assertEquals("sortedEntity1", entity1.getData());
assertEquals(Integer.valueOf(1), entity1.getId());
sortedSet = entity1.getSortedSet();
assertEquals(StrTestEntityComparator.class, sortedSet.comparator().getClass());
assertEquals(1, sortedSet.size());
Iterator<StrTestEntity> iterator = sortedSet.iterator();
checkStrTestEntity(iterator.next(), id1, "abc");
sortedMap = entity1.getSortedMap();
assertEquals(StrTestEntityComparator.class, sortedMap.comparator().getClass());
assertEquals(1, sortedMap.size());
Iterator<Map.Entry<StrTestEntity, String>> mapIterator = sortedMap.entrySet().iterator();
checkStrTestEntity(mapIterator.next().getKey(), id1, "abc");
mapIterator = sortedMap.entrySet().iterator();
assertEquals(mapIterator.next().getValue(), "abc");
entity1 = getAuditReader().find(SortedSetEntity.class, 1, 3);
assertEquals("sortedEntity1", entity1.getData());
assertEquals(Integer.valueOf(1), entity1.getId());
sortedSet = entity1.getSortedSet();
assertEquals(StrTestEntityComparator.class, sortedSet.comparator().getClass());
assertEquals(2, sortedSet.size());
iterator = sortedSet.iterator();
checkStrTestEntity(iterator.next(), id2, "aaa");
checkStrTestEntity(iterator.next(), id1, "abc");
sortedMap = entity1.getSortedMap();
assertEquals(StrTestEntityComparator.class, sortedMap.comparator().getClass());
assertEquals(2, sortedMap.size());
mapIterator = sortedMap.entrySet().iterator();
checkStrTestEntity(mapIterator.next().getKey(), id2, "aaa");
checkStrTestEntity(mapIterator.next().getKey(), id1, "abc");
mapIterator = sortedMap.entrySet().iterator();
assertEquals(mapIterator.next().getValue(), "aaa");
assertEquals(mapIterator.next().getValue(), "abc");
entity1 = getAuditReader().find(SortedSetEntity.class, 1, 4);
assertEquals("sortedEntity1", entity1.getData());
assertEquals(Integer.valueOf(1), entity1.getId());
sortedSet = entity1.getSortedSet();
assertEquals(StrTestEntityComparator.class, sortedSet.comparator().getClass());
assertEquals(3, sortedSet.size());
iterator = sortedSet.iterator();
checkStrTestEntity(iterator.next(), id2, "aaa");
checkStrTestEntity(iterator.next(), id3, "aba");
checkStrTestEntity(iterator.next(), id1, "abc");
sortedMap = entity1.getSortedMap();
assertEquals(StrTestEntityComparator.class, sortedMap.comparator().getClass());
assertEquals(3, sortedMap.size());
mapIterator = sortedMap.entrySet().iterator();
checkStrTestEntity(mapIterator.next().getKey(), id2, "aaa");
checkStrTestEntity(mapIterator.next().getKey(), id3, "aba");
checkStrTestEntity(mapIterator.next().getKey(), id1, "abc");
mapIterator = sortedMap.entrySet().iterator();
assertEquals(mapIterator.next().getValue(), "aaa");
assertEquals(mapIterator.next().getValue(), "aba");
assertEquals(mapIterator.next().getValue(), "abc");
entity1 = getAuditReader().find(SortedSetEntity.class, 1, 5);
assertEquals("sortedEntity1", entity1.getData());
assertEquals(Integer.valueOf(1), entity1.getId());
sortedSet = entity1.getSortedSet();
assertEquals(StrTestEntityComparator.class, sortedSet.comparator().getClass());
assertEquals(4, sortedSet.size());
iterator = sortedSet.iterator();
checkStrTestEntity(iterator.next(), id2, "aaa");
checkStrTestEntity(iterator.next(), id4, "aac");
checkStrTestEntity(iterator.next(), id3, "aba");
checkStrTestEntity(iterator.next(), id1, "abc");
sortedMap = entity1.getSortedMap();
assertEquals(StrTestEntityComparator.class, sortedMap.comparator().getClass());
assertEquals(4, sortedMap.size());
mapIterator = sortedMap.entrySet().iterator();
checkStrTestEntity(mapIterator.next().getKey(), id2, "aaa");
checkStrTestEntity(mapIterator.next().getKey(), id4, "aac");
checkStrTestEntity(mapIterator.next().getKey(), id3, "aba");
checkStrTestEntity(mapIterator.next().getKey(), id1, "abc");
mapIterator = sortedMap.entrySet().iterator();
assertEquals(mapIterator.next().getValue(), "aaa");
assertEquals(mapIterator.next().getValue(), "aac");
assertEquals(mapIterator.next().getValue(), "aba");
assertEquals(mapIterator.next().getValue(), "abc");
}
}
| |
import java.io.BufferedReader;
import org.apache.hadoop.conf.Configuration;
import java.io.BufferedWriter;
import java.io.DataInputStream;
import java.io.FileInputStream;
import java.io.FileReader;
import java.io.FileWriter;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.OutputStreamWriter;
import java.util.*;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.permission.FsAction;
import org.apache.hadoop.conf.*;
import org.apache.hadoop.io.*;
import org.apache.hadoop.mapred.*;
import org.apache.hadoop.util.*;
import org.apache.hadoop.fs.*;
import java.io.*;
public class MainDriver {
/**
* @param args
*/
public static void main(String[] args) throws Exception{
// TODO Auto-generated method stub
DriverOne d1 = new DriverOne();
d1.run(args); //passing the arguments
DriverTwo d2 = new DriverTwo(); //this is for user.
d2.run(args);
DriverGender d3 = new DriverGender();
d3.run(args);
DriverAge d8 = new DriverAge();
d8.run(args);
DriverTall d4 = new DriverTall();
d4.run(args);
DriverWeight d5 = new DriverWeight();
d5.run(args);
DriverBMI d6 = new DriverBMI();
d6.run(args);
DriverContinuous d7 = new DriverContinuous();
d7.run(args);
System.out.println("Done making the two folders.");
//parse the first one and find Probability of Y.
FileSystem fs = FileSystem.get(new Configuration());
System.out.println(">>>>>>>>>>>>>>>>>>>>>>>> BETA PATH IS <<<<<<<<<<< \n "+fs.getName()+args[2]+"_one/part-00000");
Path path = new Path(fs.getName()+args[2]+"_one/part-00000");
BufferedReader br = new BufferedReader(new InputStreamReader(fs.open(path)));
//writing.
String line;
Path pathWrite = new Path(fs.getName()+args[2]+"/prob_y");
FSDataOutputStream fsOutStream = fs.create(pathWrite);
Path pathWriteTwo = new Path(fs.getName()+args[2]+"/prob_xGivenY");
FSDataOutputStream fsOutStreamTwo = fs.create(pathWriteTwo);
HashMap<String,Integer> CountLabels = new HashMap<String,Integer>();
int linenum = 0;
while((line = br.readLine())!=null){
if(linenum==0){linenum++;continue;}
String [] token = line.split("\t");
System.out.println("Token[0] is : "+token[0]+" and Token 1 is : "+token[1]);
String classlabel = token[0];
int count = Integer.parseInt(token[1]);
CountLabels.put(token[0], Integer.parseInt(token[1]));
double current_prob = count/((double)110421);
String toWriteLine = classlabel+"="+current_prob+"\n";
fsOutStream.write(toWriteLine.toString().getBytes());
System.out.println("Probability of "+classlabel+" :"+current_prob);
}
br.close();
// String breaks = "\n\n\n";
// fsOutStream.write(breaks.toString().getBytes());
//
// Probability of x1 given y i.e. user given y.
Path pathUser = new Path(fs.getName()+args[2]+"_user/part-00000");
BufferedReader brUser = new BufferedReader(new InputStreamReader(fs.open(pathUser)));
linenum=0;
while((line = brUser.readLine())!=null){
// if(linenum==0){linenum++;continue;}
String [] separate = line.split("\t");
String [] token = separate[0].split(";");
if(token[1].equals("class")){continue;}
//token[1] is the classlabel.
double numerator = Double.parseDouble(separate[1]);
double denominator = CountLabels.get(token[1]);
double result = numerator/denominator;
System.out.println(token[0]+"|"+token[1]+"="+result);
String WhatToWrite = token[0]+"|"+token[1]+"="+result;
String newLine = "\n";
System.out.println(">>>>>>"+line);
fsOutStreamTwo.write(WhatToWrite.toString().getBytes());
fsOutStreamTwo.write(newLine.toString().getBytes());
}
brUser.close();
//probability of x2 given y i.e. gender given y.
Path pathGender = new Path(fs.getName()+args[2]+"_gender/part-00000");
BufferedReader brGender = new BufferedReader(new InputStreamReader(fs.open(pathGender)));
linenum=0;
while((line = brGender.readLine())!=null){
// if(linenum==0){linenum++;continue;}
String [] separate = line.split("\t");
String [] token = separate[0].split(";");
//token[1] is the classlabel.
if(token[1].equals("class")){continue;}
double numerator = Double.parseDouble(separate[1]);
double denominator = CountLabels.get(token[1]);
double result = numerator/denominator;
System.out.println(token[0]+"|"+token[1]+"="+result);
String WhatToWrite = token[0]+"|"+token[1]+"="+result;
String newLine = "\n";
fsOutStreamTwo.write(WhatToWrite.toString().getBytes());
fsOutStreamTwo.write(newLine.toString().getBytes());
}
brGender.close();
//x3 given y i.e. age given y.
Path pathAge = new Path(fs.getName()+args[2]+"_age/part-00000");
BufferedReader brAge = new BufferedReader(new InputStreamReader(fs.open(pathAge)));
linenum=0;
while((line = brAge.readLine())!=null){
// if(linenum==0){linenum++;continue;}
String [] separate = line.split("\t");
String [] token = separate[0].split(";");
//token[1] is the classlabel.
if(token[1].equals("class")){continue;}
double numerator = Double.parseDouble(separate[1]);
double denominator = CountLabels.get(token[1]);
double result = numerator/denominator;
System.out.println(token[0]+"|"+token[1]+"="+result);
String WhatToWrite = token[0]+"|"+token[1]+"="+result;
String newLine = "\n";
fsOutStreamTwo.write(WhatToWrite.toString().getBytes());
fsOutStreamTwo.write(newLine.toString().getBytes());
}
brAge.close();
//x4 given y i.e. Tall given y
Path pathTall = new Path(fs.getName()+args[2]+"_tall/part-00000");
BufferedReader brTall = new BufferedReader(new InputStreamReader(fs.open(pathTall)));
linenum=0;
while((line = brTall.readLine())!=null){
// if(linenum==0){linenum++;continue;}
String [] separate = line.split("\t");
String [] token = separate[0].split(";");
//token[1] is the classlabel.
if(token[1].equals("class")){continue;}
double numerator = Double.parseDouble(separate[1]);
double denominator = CountLabels.get(token[1]);
double result = numerator/denominator;
System.out.println(token[0]+"|"+token[1]+"="+result);
String WhatToWrite = token[0]+"|"+token[1]+"="+result;
String newLine = "\n";
fsOutStreamTwo.write(WhatToWrite.toString().getBytes());
fsOutStreamTwo.write(newLine.toString().getBytes());
}
brTall.close();
//x5 given y i.e. Weight given y.
Path pathWeight = new Path(fs.getName()+args[2]+"_weight/part-00000");
BufferedReader brWeight = new BufferedReader(new InputStreamReader(fs.open(pathWeight)));
linenum=0;
while((line = brWeight.readLine())!=null){
// if(linenum==0){linenum++;continue;}
String [] separate = line.split("\t");
String [] token = separate[0].split(";");
//token[1] is the classlabel.
if(token[1].equals("class")){continue;}
double numerator = Double.parseDouble(separate[1]);
double denominator = CountLabels.get(token[1]);
double result = numerator/denominator;
System.out.println(token[0]+"|"+token[1]+"="+result);
String WhatToWrite = token[0]+"|"+token[1]+"="+result;
String newLine = "\n";
fsOutStreamTwo.write(WhatToWrite.toString().getBytes());
fsOutStreamTwo.write(newLine.toString().getBytes());
}
brWeight.close();
//x6 given y i.e. BMI given y.
Path pathBMI = new Path(fs.getName()+args[2]+"_bmi/part-00000");
BufferedReader brBMI = new BufferedReader(new InputStreamReader(fs.open(pathBMI)));
linenum=0;
while((line = brBMI.readLine())!=null){
// if(linenum==0){linenum++;continue;}
String [] separate = line.split("\t");
String [] token = separate[0].split(";");
//token[1] is the classlabel.
if(token[1].equals("class")){continue;}
double numerator = Double.parseDouble(separate[1]);
double denominator = CountLabels.get(token[1]);
double result = numerator/denominator;
System.out.println(token[0]+"|"+token[1]+"="+result);
String WhatToWrite = token[0]+"|"+token[1]+"="+result;
String newLine = "\n";
fsOutStreamTwo.write(WhatToWrite.toString().getBytes());
fsOutStreamTwo.write(newLine.toString().getBytes());
}
brBMI.close();
//continuous attributes given y. Taken together since they can't be assumed to be independent.
//represent the sensors outputs. x1,y1,z1,x2,y2,z2,x3,y3,z3,x4,y4,z4 given y.
Path pathCont = new Path(fs.getName()+args[2]+"_cont/part-00000");
BufferedReader brCont = new BufferedReader(new InputStreamReader(fs.open(pathCont)));
int countContinuous = 0;
linenum=0;
while((line = brCont.readLine())!=null){
// if(linenum==0){linenum++;continue;}
countContinuous++;
String [] separate = line.split("\t");
String [] token = separate[0].split(";");
//token[1] is the classlabel.
if(token[1].equals("class")){continue;}
double numerator = Double.parseDouble(separate[1]);
double denominator = CountLabels.get(token[1]);
double result = numerator/denominator;
//System.out.println(token[0]+"|"+token[1]+"="+result);
String WhatToWrite = token[0]+"|"+token[1]+"="+result;
String newLine = "\n";
fsOutStreamTwo.write(WhatToWrite.toString().getBytes());
fsOutStreamTwo.write(newLine.toString().getBytes());
}
System.out.println("The unique count of continuous attributes are : "+countContinuous);
brCont.close();
//testing happening serially as if we go for parallelization, it may give undesired results.
ClassifyTest c1 = new ClassifyTest();
c1.run(args);
}
}
| |
// Copyright 2000-2021 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.siyeh.ig.resources;
import com.intellij.codeInspection.ProblemDescriptor;
import com.intellij.codeInspection.dataFlow.JavaMethodContractUtil;
import com.intellij.codeInspection.resources.ImplicitResourceCloser;
import com.intellij.codeInspection.ui.ListTable;
import com.intellij.codeInspection.ui.ListWrappingTableModel;
import com.intellij.codeInspection.ui.MultipleCheckboxOptionsPanel;
import com.intellij.java.JavaBundle;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.InvalidDataException;
import com.intellij.openapi.util.WriteExternalException;
import com.intellij.psi.*;
import com.intellij.psi.util.InheritanceUtil;
import com.intellij.psi.util.PsiTreeUtil;
import com.intellij.psi.util.PsiUtil;
import com.intellij.ui.ScrollPaneFactory;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.util.ui.CheckBox;
import com.intellij.util.ui.UI;
import com.siyeh.InspectionGadgetsBundle;
import com.siyeh.ig.BaseInspectionVisitor;
import com.siyeh.ig.InspectionGadgetsFix;
import com.siyeh.ig.callMatcher.CallMatcher;
import com.siyeh.ig.psiutils.ExpressionUtils;
import com.siyeh.ig.psiutils.MethodMatcher;
import com.siyeh.ig.psiutils.TypeUtils;
import com.siyeh.ig.ui.UiUtils;
import org.jdom.Element;
import org.jetbrains.annotations.Nls;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import java.awt.event.ItemEvent;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import static com.intellij.util.ObjectUtils.tryCast;
/**
* @author Bas Leijdekkers
*/
public class AutoCloseableResourceInspection extends ResourceInspection {
private static final CallMatcher CLOSE = CallMatcher.instanceCall(CommonClassNames.JAVA_LANG_AUTO_CLOSEABLE, "close");
private static final List<String> DEFAULT_IGNORED_TYPES =
Arrays.asList("java.util.stream.Stream",
"java.util.stream.IntStream",
"java.util.stream.LongStream",
"java.util.stream.DoubleStream",
"java.io.ByteArrayOutputStream",
"java.io.ByteArrayInputStream",
"java.io.StringBufferInputStream",
"java.io.CharArrayWriter",
"java.io.CharArrayReader",
"java.io.StringWriter",
"java.io.StringReader",
"java.util.Formatter",
"java.util.Scanner");
protected final MethodMatcher myMethodMatcher;
final List<String> ignoredTypes = new ArrayList<>(DEFAULT_IGNORED_TYPES);
@SuppressWarnings("PublicField")
public boolean ignoreFromMethodCall = false;
public boolean ignoreConstructorMethodReferences = true;
public boolean ignoreGettersReturningResource = true;
CallMatcher STREAM_HOLDING_RESOURCE = CallMatcher.staticCall("java.nio.file.Files", "lines", "walk", "list", "find");
public AutoCloseableResourceInspection() {
myMethodMatcher = new MethodMatcher()
.add("java.util.Formatter", "format")
.add("java.io.Writer", "append")
.add("com.google.common.base.Preconditions", "checkNotNull")
.add("org.hibernate.Session", "close")
.add("java.io.PrintWriter", "printf")
.add("java.io.PrintStream", "printf")
.finishDefault();
}
/**
* Warning! This class has to manually save settings to xml using its {@code readSettings()} and {@code writeSettings()} methods
*/
@NotNull
@Override
public JComponent createOptionsPanel() {
final MultipleCheckboxOptionsPanel panel = new MultipleCheckboxOptionsPanel(this);
final ListTable table =
new ListTable(new ListWrappingTableModel(ignoredTypes, InspectionGadgetsBundle.message("ignored.autocloseable.types.column.label")));
final JPanel tablePanel =
UiUtils.createAddRemoveTreeClassChooserPanel(
InspectionGadgetsBundle.message("choose.autocloseable.type.to.ignore.title"),
InspectionGadgetsBundle.message("ignored.autocloseable.types.label"),
table,
true,
"java.lang.AutoCloseable");
final ListTable table2 = new ListTable(
new ListWrappingTableModel(Arrays.asList(myMethodMatcher.getClassNames(), myMethodMatcher.getMethodNamePatterns()),
InspectionGadgetsBundle.message("result.of.method.call.ignored.class.column.title"),
InspectionGadgetsBundle.message("method.name.regex")));
table2.setEnabled(!ignoreFromMethodCall);
final JPanel tablePanel2 =
UI.PanelFactory.panel(UiUtils.createAddRemoveTreeClassChooserPanel(table2, JavaBundle.message("dialog.title.choose.class")))
.withLabel(InspectionGadgetsBundle.message("inspection.autocloseable.resource.ignored.methods.title")).moveLabelOnTop()
.resizeY(true).createPanel();
panel.add(tablePanel, "growx, wrap");
panel.add(tablePanel2, "growx, wrap");
final CheckBox checkBox =
new CheckBox(InspectionGadgetsBundle.message("auto.closeable.resource.returned.option"), this, "ignoreFromMethodCall");
checkBox.addItemListener(e -> table2.setEnabled(e.getStateChange() == ItemEvent.DESELECTED));
panel.add(checkBox, "growx, wrap");
panel.addCheckbox(InspectionGadgetsBundle.message("any.method.may.close.resource.argument"), "anyMethodMayClose");
panel.addCheckbox(InspectionGadgetsBundle.message("ignore.constructor.method.references"), "ignoreConstructorMethodReferences");
panel.addCheckbox(InspectionGadgetsBundle.message("ignore.getters.returning.resource"), "ignoreGettersReturningResource");
return ScrollPaneFactory.createScrollPane(panel);
}
@NotNull
@Override
public String getID() {
return "resource"; // matches Eclipse inspection
}
@NotNull
@Override
protected String buildErrorString(Object... infos) {
final PsiType type = (PsiType)infos[0];
final String text = type.getPresentableText();
return InspectionGadgetsBundle.message("auto.closeable.resource.problem.descriptor", text);
}
@Nullable
@Override
protected InspectionGadgetsFix buildFix(Object... infos) {
final boolean buildQuickfix = ((Boolean)infos[1]).booleanValue();
if (!buildQuickfix) {
return null;
}
return new AutoCloseableResourceFix();
}
@Override
public void readSettings(@NotNull Element node) throws InvalidDataException {
super.readSettings(node);
for (Element option : node.getChildren("option")) {
final @NonNls String name = option.getAttributeValue("name");
if ("ignoredTypes".equals(name)) {
final String ignoredTypesString = option.getAttributeValue("value");
if (ignoredTypesString != null) {
ignoredTypes.clear();
parseString(ignoredTypesString, ignoredTypes);
}
}
}
myMethodMatcher.readSettings(node);
}
@Override
public void writeSettings(@NotNull Element node) throws WriteExternalException {
writeBooleanOption(node, "ignoreFromMethodCall", false);
writeBooleanOption(node, "anyMethodMayClose", true);
writeBooleanOption(node, "ignoreConstructorMethodReferences", true);
writeBooleanOption(node, "ignoreGettersReturningResource", true);
if (!DEFAULT_IGNORED_TYPES.equals(ignoredTypes)) {
final String ignoredTypesString = formatString(ignoredTypes);
node.addContent(new Element("option").setAttribute("name", "ignoredTypes").setAttribute("value", ignoredTypesString));
}
myMethodMatcher.writeSettings(node);
}
@Override
protected boolean isResourceCreation(PsiExpression expression) {
return TypeUtils.expressionHasTypeOrSubtype(expression, CommonClassNames.JAVA_LANG_AUTO_CLOSEABLE) &&
(isStreamHoldingResource(expression)
|| !TypeUtils.expressionHasTypeOrSubtype(expression, ignoredTypes)) &&
(!ignoreGettersReturningResource || !isGetter(expression));
}
private static boolean isGetter(@NotNull PsiExpression expression) {
PsiMethodCallExpression call = tryCast(expression, PsiMethodCallExpression.class);
if (call == null) return false;
String callName = call.getMethodExpression().getReferenceName();
if (callName == null) return false;
return callName.startsWith("get") && !callName.equals("getClass") && !callName.equals("getResourceAsStream");
}
@Override
protected boolean canTakeOwnership(@NotNull PsiExpression expression) {
return TypeUtils.expressionHasTypeOrSubtype(expression, CommonClassNames.JAVA_LANG_AUTO_CLOSEABLE);
}
private boolean isStreamHoldingResource(PsiExpression expression) {
return STREAM_HOLDING_RESOURCE.matches(tryCast(expression, PsiMethodCallExpression.class));
}
@Override
public boolean shouldInspect(@NotNull PsiFile file) {
return PsiUtil.isLanguageLevel7OrHigher(file);
}
@Override
public BaseInspectionVisitor buildVisitor() {
return new AutoCloseableResourceVisitor();
}
private class AutoCloseableResourceFix extends InspectionGadgetsFix {
@Override
public boolean startInWriteAction() {
return false;
}
@Nls
@NotNull
@Override
public String getFamilyName() {
return InspectionGadgetsBundle.message("auto.closeable.resource.quickfix");
}
@Override
protected void doFix(Project project, ProblemDescriptor descriptor) {
final PsiElement element = descriptor.getPsiElement();
final PsiMethodCallExpression methodCallExpression = PsiTreeUtil.getParentOfType(element, PsiMethodCallExpression.class);
if (methodCallExpression == null) {
return;
}
myMethodMatcher.add(methodCallExpression);
}
}
private class AutoCloseableResourceVisitor extends BaseInspectionVisitor {
@Override
public void visitNewExpression(PsiNewExpression expression) {
super.visitNewExpression(expression);
if (isSafelyClosedResource(expression)) {
return;
}
registerNewExpressionError(expression, expression.getType(), Boolean.FALSE);
}
@Override
public void visitMethodCallExpression(PsiMethodCallExpression expression) {
super.visitMethodCallExpression(expression);
if (ignoreFromMethodCall || myMethodMatcher.matches(expression) || isSafelyClosedResource(expression)) {
return;
}
if (isReturnedByContract(expression)) return;
registerMethodCallError(expression, expression.getType(), !isStreamHoldingResource(expression));
}
private boolean isReturnedByContract(PsiMethodCallExpression expression) {
PsiExpression returnedValue = JavaMethodContractUtil.findReturnedValue(expression);
PsiExpression[] arguments = expression.getArgumentList().getExpressions();
PsiExpression qualifier = expression.getMethodExpression().getQualifierExpression();
if (returnedValue != null && qualifier == returnedValue) return true;
for (PsiExpression argument : arguments) {
if (returnedValue == argument) {
return true;
}
}
return false;
}
@Override
public void visitMethodReferenceExpression(PsiMethodReferenceExpression expression) {
super.visitMethodReferenceExpression(expression);
if (ignoreConstructorMethodReferences) return;
if (!expression.isConstructor()) {
return;
}
final PsiType type = PsiMethodReferenceUtil.getQualifierType(expression);
if (!InheritanceUtil.isInheritor(type, CommonClassNames.JAVA_LANG_AUTO_CLOSEABLE)) {
return;
}
for (String ignoredType : ignoredTypes) {
if (InheritanceUtil.isInheritor(type, ignoredType)) {
return;
}
}
registerError(expression, type, Boolean.FALSE);
}
private boolean isSafelyClosedResource(PsiExpression expression) {
if (!isResourceCreation(expression)) {
return true;
}
if (CLOSE.test(ExpressionUtils.getCallForQualifier(expression))) return true;
final PsiVariable variable = ResourceInspection.getVariable(expression);
if (variable instanceof PsiResourceVariable || isResourceEscaping(variable, expression)) return true;
if (variable == null) return false;
return ContainerUtil.or(ImplicitResourceCloser.EP_NAME.getExtensionList(), closer -> closer.isSafelyClosed(variable));
}
}
}
| |
/*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.accessanalyzer.model;
import java.io.Serializable;
import javax.annotation.Generated;
import com.amazonaws.protocol.StructuredPojo;
import com.amazonaws.protocol.ProtocolMarshaller;
/**
* <p>
* Contains the text for the generated policy and its details.
* </p>
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/accessanalyzer-2019-11-01/GeneratedPolicyResult"
* target="_top">AWS API Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class GeneratedPolicyResult implements Serializable, Cloneable, StructuredPojo {
/**
* <p>
* The text to use as the content for the new policy. The policy is created using the <a
* href="https://docs.aws.amazon.com/IAM/latest/APIReference/API_CreatePolicy.html">CreatePolicy</a> action.
* </p>
*/
private java.util.List<GeneratedPolicy> generatedPolicies;
/**
* <p>
* A <code>GeneratedPolicyProperties</code> object that contains properties of the generated policy.
* </p>
*/
private GeneratedPolicyProperties properties;
/**
* <p>
* The text to use as the content for the new policy. The policy is created using the <a
* href="https://docs.aws.amazon.com/IAM/latest/APIReference/API_CreatePolicy.html">CreatePolicy</a> action.
* </p>
*
* @return The text to use as the content for the new policy. The policy is created using the <a
* href="https://docs.aws.amazon.com/IAM/latest/APIReference/API_CreatePolicy.html">CreatePolicy</a> action.
*/
public java.util.List<GeneratedPolicy> getGeneratedPolicies() {
return generatedPolicies;
}
/**
* <p>
* The text to use as the content for the new policy. The policy is created using the <a
* href="https://docs.aws.amazon.com/IAM/latest/APIReference/API_CreatePolicy.html">CreatePolicy</a> action.
* </p>
*
* @param generatedPolicies
* The text to use as the content for the new policy. The policy is created using the <a
* href="https://docs.aws.amazon.com/IAM/latest/APIReference/API_CreatePolicy.html">CreatePolicy</a> action.
*/
public void setGeneratedPolicies(java.util.Collection<GeneratedPolicy> generatedPolicies) {
if (generatedPolicies == null) {
this.generatedPolicies = null;
return;
}
this.generatedPolicies = new java.util.ArrayList<GeneratedPolicy>(generatedPolicies);
}
/**
* <p>
* The text to use as the content for the new policy. The policy is created using the <a
* href="https://docs.aws.amazon.com/IAM/latest/APIReference/API_CreatePolicy.html">CreatePolicy</a> action.
* </p>
* <p>
* <b>NOTE:</b> This method appends the values to the existing list (if any). Use
* {@link #setGeneratedPolicies(java.util.Collection)} or {@link #withGeneratedPolicies(java.util.Collection)} if
* you want to override the existing values.
* </p>
*
* @param generatedPolicies
* The text to use as the content for the new policy. The policy is created using the <a
* href="https://docs.aws.amazon.com/IAM/latest/APIReference/API_CreatePolicy.html">CreatePolicy</a> action.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public GeneratedPolicyResult withGeneratedPolicies(GeneratedPolicy... generatedPolicies) {
if (this.generatedPolicies == null) {
setGeneratedPolicies(new java.util.ArrayList<GeneratedPolicy>(generatedPolicies.length));
}
for (GeneratedPolicy ele : generatedPolicies) {
this.generatedPolicies.add(ele);
}
return this;
}
/**
* <p>
* The text to use as the content for the new policy. The policy is created using the <a
* href="https://docs.aws.amazon.com/IAM/latest/APIReference/API_CreatePolicy.html">CreatePolicy</a> action.
* </p>
*
* @param generatedPolicies
* The text to use as the content for the new policy. The policy is created using the <a
* href="https://docs.aws.amazon.com/IAM/latest/APIReference/API_CreatePolicy.html">CreatePolicy</a> action.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public GeneratedPolicyResult withGeneratedPolicies(java.util.Collection<GeneratedPolicy> generatedPolicies) {
setGeneratedPolicies(generatedPolicies);
return this;
}
/**
* <p>
* A <code>GeneratedPolicyProperties</code> object that contains properties of the generated policy.
* </p>
*
* @param properties
* A <code>GeneratedPolicyProperties</code> object that contains properties of the generated policy.
*/
public void setProperties(GeneratedPolicyProperties properties) {
this.properties = properties;
}
/**
* <p>
* A <code>GeneratedPolicyProperties</code> object that contains properties of the generated policy.
* </p>
*
* @return A <code>GeneratedPolicyProperties</code> object that contains properties of the generated policy.
*/
public GeneratedPolicyProperties getProperties() {
return this.properties;
}
/**
* <p>
* A <code>GeneratedPolicyProperties</code> object that contains properties of the generated policy.
* </p>
*
* @param properties
* A <code>GeneratedPolicyProperties</code> object that contains properties of the generated policy.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public GeneratedPolicyResult withProperties(GeneratedPolicyProperties properties) {
setProperties(properties);
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getGeneratedPolicies() != null)
sb.append("GeneratedPolicies: ").append(getGeneratedPolicies()).append(",");
if (getProperties() != null)
sb.append("Properties: ").append(getProperties());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof GeneratedPolicyResult == false)
return false;
GeneratedPolicyResult other = (GeneratedPolicyResult) obj;
if (other.getGeneratedPolicies() == null ^ this.getGeneratedPolicies() == null)
return false;
if (other.getGeneratedPolicies() != null && other.getGeneratedPolicies().equals(this.getGeneratedPolicies()) == false)
return false;
if (other.getProperties() == null ^ this.getProperties() == null)
return false;
if (other.getProperties() != null && other.getProperties().equals(this.getProperties()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getGeneratedPolicies() == null) ? 0 : getGeneratedPolicies().hashCode());
hashCode = prime * hashCode + ((getProperties() == null) ? 0 : getProperties().hashCode());
return hashCode;
}
@Override
public GeneratedPolicyResult clone() {
try {
return (GeneratedPolicyResult) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e);
}
}
@com.amazonaws.annotation.SdkInternalApi
@Override
public void marshall(ProtocolMarshaller protocolMarshaller) {
com.amazonaws.services.accessanalyzer.model.transform.GeneratedPolicyResultMarshaller.getInstance().marshall(this, protocolMarshaller);
}
}
| |
/*
Derby - Class org.apache.derby.iapi.sql.compile.Optimizable
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to you under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package org.apache.derby.iapi.sql.compile;
import org.apache.derby.iapi.sql.dictionary.ConglomerateDescriptor;
import org.apache.derby.iapi.sql.dictionary.TableDescriptor;
import org.apache.derby.iapi.sql.dictionary.DataDictionary;
import org.apache.derby.shared.common.error.StandardException;
import org.apache.derby.iapi.util.JBitSet;
import java.util.Properties;
/**
* Optimizable provides services for optimizing a table in a query.
*/
public interface Optimizable extends Visitable {
/**
* Choose the next access path to evaluate for this Optimizable.
*
* @param optimizer Optimizer to use.
* @param predList The predicate list for this optimizable.
* The optimizer always passes null, and it is up
* to the optimizable object to pass along its
* own predicate list, if appropriate, when delegating
* this method.
* @param rowOrdering The row ordering for all the outer tables in
* the join order. This method will add the ordering
* of the next access path to the given RowOrdering.
*
* @return true means another access path was chosen, false means
* no more access paths to evaluate.
*
* @exception StandardException Thrown on error
*/
boolean nextAccessPath(Optimizer optimizer,
OptimizablePredicateList predList,
RowOrdering rowOrdering)
throws StandardException;
/**
* Choose the best access path for this Optimizable.
*
* @param optimizer Optimizer to use.
* @param predList The predicate list to optimize against
* @param outerCost The CostEstimate for the outer tables in the join order,
* telling how many times this Optimizable will be scanned.
* @param rowOrdering The row ordering for all the tables in the
* join order, including this one.
*
* @return The optimizer's estimated cost of the best access path.
*
* @exception StandardException Thrown on error
*/
CostEstimate optimizeIt(
Optimizer optimizer,
OptimizablePredicateList predList,
CostEstimate outerCost,
RowOrdering rowOrdering)
throws StandardException;
/**
* Get the current access path under consideration for this Optimizable
*/
AccessPath getCurrentAccessPath();
/**
* Get the best access path for this Optimizable.
*/
AccessPath getBestAccessPath();
/**
* Get the best sort-avoidance path for this Optimizable.
*/
AccessPath getBestSortAvoidancePath();
/**
* Get the best access path overall for this Optimizable.
*/
AccessPath getTrulyTheBestAccessPath();
/**
* Mark this optimizable so that its sort avoidance path will be
* considered.
*/
void rememberSortAvoidancePath();
/**
* Check whether this optimizable's sort avoidance path should
* be considered.
*/
boolean considerSortAvoidancePath();
/**
* Remember the current join strategy as the best one so far in this
* join order.
*/
void rememberJoinStrategyAsBest(AccessPath ap);
/**
* Get the table descriptor for this table (if any). Only base tables
* have table descriptors - for the rest of the optimizables, this
* method returns null.
*/
TableDescriptor getTableDescriptor();
/**
* Get the map of referenced tables for this Optimizable.
*
* @return JBitSet Referenced table map.
*/
JBitSet getReferencedTableMap();
/**
* Push an OptimizablePredicate down, if this node accepts it.
*
* @param optimizablePredicate OptimizablePredicate to push down.
*
* @return Whether or not the predicate was pushed down.
*
* @exception StandardException Thrown on error
*/
boolean pushOptPredicate(OptimizablePredicate optimizablePredicate)
throws StandardException;
/**
* Pull all the OptimizablePredicates from this Optimizable and put them
* in the given OptimizablePredicateList.
*
* @param optimizablePredicates The list to put the pulled predicates
* in.
*
* @exception StandardException Thrown on error
*/
void pullOptPredicates(OptimizablePredicateList optimizablePredicates)
throws StandardException;
/**
* Modify the access path for this Optimizable, as necessary. This includes
* things like adding a result set to translate from index rows to base rows
*
* @param outerTables Bit map of the tables that are outer to this one
* in the join order.
*
* @return The (potentially new) Optimizable at the top of the tree.
*
* @exception StandardException Thrown on error
*/
Optimizable modifyAccessPath(JBitSet outerTables) throws StandardException;
/**
* Return whether or not this is a covering index. We expect to call this
* during generation, after access path selection is complete.
*
* @param cd ConglomerateDesriptor for index to consider
*
* @return boolean Whether or not this is a covering index.
*
* @exception StandardException Thrown on error
*/
public boolean isCoveringIndex(ConglomerateDescriptor cd) throws StandardException;
/**
* Get the Properties list, if any, associated with this optimizable.
*
* @return The Properties list, if any, associated with this optimizable.
*/
public Properties getProperties();
/**
* Set the Properties list for this optimizalbe.
*
* @param tableProperties The Properties list for this optimizable.
*/
public void setProperties(Properties tableProperties);
/**
* Verify that the Properties list with optimizer overrides, if specified, is valid
*
* @param dDictionary The DataDictionary to use.
*
* @exception StandardException Thrown on error
*/
public void verifyProperties(DataDictionary dDictionary) throws StandardException;
/**
* Get the (exposed) name of this Optimizable
*
* @return The name of this Optimizable.
* @exception StandardException Thrown on error
*/
public String getName() throws StandardException;
/**
* Get the table name of this Optimizable. Only base tables have
* table names (by the time we use this method, all views will have
* been resolved).
*/
public String getBaseTableName();
/**
* Convert an absolute to a relative 0-based column position.
* This is useful when generating qualifiers for partial rows
* from the store.
*
* @param absolutePosition The absolute 0-based column position for the column.
*
* @return The relative 0-based column position for the column.
*/
public int convertAbsoluteToRelativeColumnPosition(int absolutePosition);
/**
* When remembering "truly the best" access path for an Optimizable, we
* have to keep track of which OptimizerImpl the "truly the best" access
* is for. In most queries there will only be one OptimizerImpl in
* question, but in cases where there are nested subqueries, there will be
* one OptimizerImpl for every level of nesting, and each OptimizerImpl
* might have its own idea of what this Optimizable's "truly the best path"
* access path really is. In addition, there could be Optimizables
* above this Optimizable that might need to override the best path
* chosen during optimization. So whenever we save a "truly the best" path,
* we take note of which Optimizer/Optimizable told us to do so. Then
* as each level of subquery finishes optimization, the corresponding
* OptimizerImpl/Optimizable can load its preferred access path into this
* Optimizable's trulyTheBestAccessPath field and pass it up the tree, until
* eventually the outer-most OptimizerImpl can choose to either use the best
* path that it received from below (by calling "rememberAsBest()") or else
* use the path that it found to be "best" for itself.
*
* This method is what allows us to keep track of which OptimizerImpl or
* Optimizable saved which "best plan", and allows us to load the
* appropriate plans after each round of optimization.
*
* @param action Indicates whether we're adding, loading, or removing
* a best plan for the OptimizerImpl/Optimizable.
* @param planKey Object to use as the map key when adding/looking up
* a plan. If it is an instance of OptimizerImpl then it corresponds
* to an outer query; otherwise it's some Optimizable above this
* Optimizable that could potentially reject plans chosen by the
* OptimizerImpl to which this Optimizable belongs.
*/
public void updateBestPlanMap(short action,
Object planKey) throws StandardException;
/**
* Remember the current access path as the best one (so far).
*
* @param planType The type of plan (one of Optimizer.NORMAL_PLAN
* or Optimizer.SORT_AVOIDANCE_PLAN)
* @param optimizer The OptimizerImpl that is telling this Optimizable
* to remember its current path as "truly the best".
*
* @exception StandardException thrown on error.
*/
public void rememberAsBest(int planType, Optimizer optimizer)
throws StandardException;
/**
* Begin the optimization process for this Optimizable. This can be
* called many times for an Optimizable while optimizing a query -
* it will typically be called every time the Optimizable is placed
* in a potential join order.
*/
public void startOptimizing(Optimizer optimizer, RowOrdering rowOrdering);
/**
* Estimate the cost of scanning this Optimizable using the given
* predicate list with the given conglomerate. It is assumed that the
* predicate list has already been classified. This cost estimate is
* just for one scan, not for the life of the query.
*
* @see OptimizablePredicateList#classify
*
* @param predList The predicate list to optimize against
* @param cd The conglomerate descriptor to get the cost of
* @param outerCost The estimated cost of the part of the plan outer
* to this optimizable.
* @param optimizer The optimizer to use to help estimate the cost
* @param rowOrdering The row ordering for all the tables in the
* join order, including this one.
*
* @return The estimated cost of doing the scan
*
* @exception StandardException Thrown on error
*/
CostEstimate estimateCost(OptimizablePredicateList predList,
ConglomerateDescriptor cd,
CostEstimate outerCost,
Optimizer optimizer,
RowOrdering rowOrdering)
throws StandardException;
/** Tell whether this Optimizable represents a base table */
boolean isBaseTable();
/** Tell whether this Optimizable is materializable
*
* @exception StandardException Thrown on error
*/
boolean isMaterializable() throws StandardException;
/** Tell whether this Optimizable can be instantiated multiple times */
boolean supportsMultipleInstantiations();
/** Tell whether this Optimizable has any large object (LOB) columns. */
boolean hasLargeObjectColumns();
/** Get this Optimizable's result set number */
int getResultSetNumber();
/** Get this Optimizable's table number */
int getTableNumber();
/** Return true if this Optimizable has a table number */
boolean hasTableNumber();
/** Return true if this is the target table of an update */
public boolean forUpdate();
/** Return the initial capacity of the hash table, for hash join strategy */
public int initialCapacity();
/** Return the load factor of the hash table, for hash join strategy */
public float loadFactor();
/** Return the hash key column numbers, for hash join strategy */
public int[] hashKeyColumns();
/** Set the hash key column numbers, for hash join strategy */
public void setHashKeyColumns(int[] columnNumbers);
/**
* Is the current proposed join strategy for this optimizable feasible
* given the predicate list?
*
* @param predList The predicate list that has been pushed down to
* this optimizable
* @param optimizer The optimizer to use.
*
* @return true means feasible
*
* @exception StandardException Thrown on error
*/
public boolean feasibleJoinStrategy(OptimizablePredicateList predList,
Optimizer optimizer)
throws StandardException;
/**
* @param rowCount
* @param maxMemoryPerTable
* @return true if the memory usage of the proposed access path is OK, false if not.
*
* @exception StandardException standard error policy
*/
public boolean memoryUsageOK( double rowCount, int maxMemoryPerTable)
throws StandardException;
/**
* Return the maximum capacity of the hash table, for hash join strategy
*
* @param maxMemoryPerTable The maximum number of bytes to be used. Ignored if the user has set a maximum
* number of rows for the Optimizable.
*
* @exception StandardException Standard error policy
*/
public int maxCapacity( JoinStrategy joinStrategy, int maxMemoryPerTable) throws StandardException;
/**
* Can this Optimizable appear at the current location in the join order.
* In other words, have the Optimizable's dependencies been satisfied?
*
* @param assignedTableMap The tables that have been placed so far in the join order.
*
* @return Where or not this Optimizable can appear at the current location in the join order.
*/
public boolean legalJoinOrder(JBitSet assignedTableMap);
/**
* Get the DataDictionary from this Optimizable. This is useful for code generation
* because we need to get the constraint name if scanning a back index so that
* RunTimeStatistics can display the correct info.
*
* @return The DataDictionary to use.
*
* @exception StandardException Thrown on error
*/
public DataDictionary getDataDictionary() throws StandardException;
/**
* Is the optimizable the target table of an update or delete?
*
* @return Whether or not the optimizable the target table of an update or delete.
*/
public boolean isTargetTable();
/**
* Get the number of the number of columns returned by this Optimizable.
*
* @return The number of the number of columns returned by this Optimizable.
*/
public int getNumColumnsReturned();
/**
* Will the optimizable return at most 1 row per scan?
*
* @return Whether or not the optimizable will return at most 1 row per scan?
*
* @exception StandardException Thrown on error
*/
public boolean isOneRowScan() throws StandardException;
/**
* Init the access paths for this optimizable.
*
* @param optimizer The optimizer being used.
*/
public void initAccessPaths(Optimizer optimizer);
/**
* Does this optimizable have a uniqueness condition on the
* given predicate list, and if so, how many unique keys will be
* returned per scan.
*
* @param predList The predicate list to check
*
* @return <= 0 means there is no uniqueness condition
* > 0 means there is a uniqueness condition,
* and the return value is the number of rows per scan.
*
* @exception StandardException Thrown on error
*/
public double uniqueJoin(OptimizablePredicateList predList)
throws StandardException;
/** Get the optimizer tracer, if any */
public OptTrace getOptimizerTracer();
/** Report whether optimizer tracing is on */
public boolean optimizerTracingIsOn();
}
| |
package gui;
import java.awt.BorderLayout;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.awt.event.MouseEvent;
import java.awt.event.MouseListener;
import javax.swing.JCheckBoxMenuItem;
import javax.swing.JFrame;
import javax.swing.JMenu;
import javax.swing.JMenuBar;
import javax.swing.JOptionPane;
import javax.swing.JPanel;
import main.Dic;
import main.Game;
import main.AI;
import pawn.APawn;
import pawn.Bomb;
import pawn.Flag;
import com.esotericsoftware.kryonet.Client;
import com.esotericsoftware.kryonet.Connection;
import com.esotericsoftware.kryonet.Listener;
/**
* This class creates the window of the game.
*
* @author CAREDDA Giuliano, DUCOBU Alexandre
*/
public class WindowGame extends JFrame {
private static final long serialVersionUID = 1L;
public PaneGame pane;
public PaneGamePawn paneRed, paneBlue;
public int posX, posY;
public Game game;
public APawn focus;
public APawn Flag = new Flag(1), Bomb = new Bomb(1);
public int[] arrow = { -1, -1, -1, -1, -1, -1 };
public final int[] arrowN = { -1, -1, -1, -1, -1, -1 };
public boolean att = false;
JOptionPane jop, jopWin;
Dic startTeam;
public final String[] resultName = { "Rouge", "Bleu" };
public boolean playGame = true;
public AI ia;
public Client client;
public int Oplayer;
private JMenuBar menuBar = new JMenuBar();
private JMenu menu1 = new JMenu("Triche");
private JCheckBoxMenuItem showGrid = new JCheckBoxMenuItem(
"Montrer toute la grille");
private JCheckBoxMenuItem showPawn = new JCheckBoxMenuItem(
"Montrer les pions connus");
/**
* Main constructor of the class.
*/
public WindowGame() {
}
/**
* Constructor of the class.
*
* @param ngame
* The 'game' object of the current game.
*/
public WindowGame(Game ngame) {
this.game = ngame;
startTeam = game.getStartTeam();
paneRed = new PaneGamePawn(startTeam, game, 1);
paneBlue = new PaneGamePawn(startTeam, game, 2);
pane = new PaneGame(game);
pane.setView(game.getNextTeam());
// When we use the last game, the current team is viewable.
pane.setLayout(new BorderLayout());
this.add(paneRed, BorderLayout.WEST);
this.add(paneBlue, BorderLayout.EAST);
this.setSize(900, 700);
this.setResizable(true);
this.setTitle("Game");
this.setLocationRelativeTo(null); // The window is centered.
this.add(pane, BorderLayout.CENTER);
menu1.add(showPawn);
if (game.getPlayer() != 3) {
menu1.add(showGrid);
}
menuBar.add(menu1);
this.setJMenuBar(menuBar);
showPawn.addActionListener(new PawnListener());
showGrid.addActionListener(new GridListener());
this.setVisible(true);
if (game.getPlayer() == 1) {
ia = new AI(game.getLevel());
}
pane.addMouseListener(new MouseGame());
}
/**
* Constructor of the class when the game is online.
*
* @param ngame
* The 'game' object of the current game.
*
* @param client
* The client object used to communicate with the server.
*
* @param Oplayer
* The number of this player: 1 or 2.
*/
public WindowGame(Game ngame, Client client, int Oplayer) {
this(ngame);
this.game.setGameN(1);
this.client = client;
this.Oplayer = Oplayer;
client.addListener(new Listener() {
@SuppressWarnings("static-access")
public void received(Connection connection, Object object) {
if (object instanceof Game) {
if (game.getGameN() == 1) {
game = (Game) object;
pane.recupGame(game);
repaint();
int result = game.win();
if (result != 0) {
pane.setView(0);
playGame = false;
repaint();
jopWin = new JOptionPane();
jopWin.showMessageDialog(null, "Le joueur "
+ resultName[result - 1] + " gagne !",
"Resultat", JOptionPane.INFORMATION_MESSAGE);
clientClose();
}
}
}
if (object instanceof int[]) {
int[] res = (int[]) object;
APawn pawn = game.getPawn(res[0], res[1]);
pawn.setShow(!pawn.getShow());
pane.recupGame(game);
pane.repaint();
}
}
});
this.setDefaultCloseOperation(EXIT_ON_CLOSE);
pane.setView(Oplayer);
pane.recupGame(game);
repaint();
}
/**
* Closes the widow.
*/
public void clientClose() {
client.close();
}
/**
* Action listener to display (or hide) the known pawns.
*
* @author CAREDDA Giuliano, DUCOBU Alexandre
*/
class PawnListener implements ActionListener {
public void actionPerformed(ActionEvent e) {
if (showPawn.isSelected()) {
pane.setShowKnow(true);
repaint();
} else {
pane.setShowKnow(false);
repaint();
}
}
}
/**
* Action listener to display (or hide) the entire grid.
*
* @author CAREDDA Giuliano, DUCOBU Alexandre
*/
class GridListener implements ActionListener {
public void actionPerformed(ActionEvent e) {
if (showGrid.isSelected()) {
pane.setView(0);
repaint();
} else {
if (game.getPlayer() == 2) {
pane.setView(game.getNextTeam());
} else if (game.getPlayer() == 1) {
pane.setView(1);
}
repaint();
}
}
}
/**
* Transforms the coordinates of the cursor into coordinates of the game.
*
* @param game
* The chosen game.
*
* @param pane
* The panel containing the game.
*
* @param posX
* The abscissa of the cursor.
*
* @param posY
* The ordinate of the cursor.
*
* @return An array with the abscissa and the ordinate in the game.
*/
public int[] getRes(Game game, JPanel pane, int posX, int posY) {
int[] res = { 0, 0 };
res[1] = (posX - (posX % (pane.getWidth() / (game.getRow() + 1))))
/ (pane.getWidth() / (game.getRow() + 1));
res[0] = (posY - (posY % (pane.getHeight() / (game.getLine() + 1))))
/ (pane.getHeight() / (game.getLine() + 1));
return res;
}
/**
* This class redefines the right click event.
*
* @author CAREDDA Giuliano, DUCOBU Alexandre
*/
class MouseGame implements MouseListener {
int posX, posY;
/**
* Redefines the right click event:<br/>
* based the game mode, the known pawns can be displayed (hidden).
*/
@Override
public void mouseReleased(MouseEvent e) {
if (e.getButton() == MouseEvent.BUTTON3 && game.getPlayer() == 2) {
pane.setShowKnow(false);
repaint();
}
if (playGame) {
posX = e.getX();
posY = e.getY();
if (e.getButton() == MouseEvent.BUTTON1
&& game.getPlayer() == 2) {
click2player();
}
if (e.getButton() == MouseEvent.BUTTON1
&& game.getPlayer() == 1) {
click1player();
}
if (e.getButton() == MouseEvent.BUTTON1
&& game.getPlayer() == 3) {
pane.setView(Oplayer);
clickOnline();
}
}
}
/**
* This method contains the 'game' for the 2 players mode.
*/
private void click2player() {
new Thread(new Runnable() {
/**
* Runs the game.
*/
@SuppressWarnings("static-access")
public void run() {
int[] res = getRes(game, pane, posX, posY);
int line = res[0];
int row = res[1];
APawn pawn = game.getPawn(line, row);
if (focus != null) {
if (focus.movePoss(game, line, row)) {
if (game.getPawn(line, row) != null) {
game.getPawn(line, row).setShow(true);
pane.recupArrow(arrowN);
repaint();
try {
Thread.sleep(2000);
} catch (InterruptedException e) {
}
game.getPawn(line, row).setShow(false);
}
game = focus.move(game, line, row);
att = true;
game.addTurn();
pane.recupArrow(arrowN);
game.save();
focus = null;
repaint();
paneRed.upGame(game);
paneBlue.upGame(game);
int result = game.win();
if (result != 0) {
pane.setView(0);
playGame = false;
repaint();
jopWin = new JOptionPane();
jopWin.showMessageDialog(null, "Le joueur "
+ resultName[result - 1] + " gagne !",
"Resultat",
JOptionPane.INFORMATION_MESSAGE);
} else {
pane.setView(3);
repaint();
jop = new JOptionPane();
jop.showMessageDialog(
null,
"C'est votre tour, joueur "
+ resultName[((game.getTurn() + 1) % 2)]
+ " !", "Fin du tour",
JOptionPane.INFORMATION_MESSAGE);
pane.setView((((game.getTurn() + 1) % 2) + 1));
repaint();
paneRed.upGame(game);
paneBlue.upGame(game);
}
}
}
if (pawn != null) {
if (pawn.getTeam() == ((game.getTurn() + 1) % 2) + 1) {
if (pawn != null && !att) {
focus = pawn;
arrow = pawn.focus(game);
pane.recupArrow(arrow);
repaint();
} else {
pane.recupArrow(arrowN);
focus = null;
repaint();
}
att = false;
repaint();
}
}
att = false;
}
}).start();
}
/**
* This method contains the 'game' for the player versus AI mode.
*/
private void click1player() {
new Thread(new Runnable() {
/**
* Runs the game.
*/
@SuppressWarnings("static-access")
public void run() {
if ((((game.getTurn() + 1) % 2) + 1) == 1) {
int[] res = getRes(game, pane, posX, posY);
int line = res[0];
int row = res[1];
APawn pawn = game.getPawn(line, row);
if (focus != null) {
if (focus.movePoss(game, line, row)) {
if (game.getPawn(line, row) != null) {
game.getPawn(line, row).setShow(true);
pane.recupArrow(arrowN);
repaint();
try {
Thread.sleep(2000);
} catch (InterruptedException e) {
}
game.getPawn(line, row).setShow(false);
}
game = focus.move(game, line, row);
att = true;
game.addTurn();
pane.recupArrow(arrowN);
focus = null;
repaint();
paneRed.upGame(game);
paneBlue.upGame(game);
int result = game.win();
if (result != 0) {
pane.setView(0);
playGame = false;
repaint();
jopWin = new JOptionPane();
jopWin.showMessageDialog(null, "Le joueur "
+ resultName[result - 1]
+ " gagne !", "Resultat",
JOptionPane.INFORMATION_MESSAGE);
} else {
try {
Thread.sleep(1000);
} catch (InterruptedException e) {
}
int[][] next = ia.getNext(game);
APawn currentPawn = game.getPawn(
next[0][0], next[0][1]);
if (game.getPawn(next[1][0], next[1][1]) != null) {
currentPawn.setShow(true);
repaint();
try {
Thread.sleep(3000);
} catch (InterruptedException e) {
}
currentPawn.setShow(false);
repaint();
}
currentPawn.move(game, next[1][0],
next[1][1]);
game.addTurn();
game.save();
repaint();
paneRed.upGame(game);
paneBlue.upGame(game);
result = game.win();
if (result != 0) {
pane.setView(0);
playGame = false;
repaint();
jopWin = new JOptionPane();
jopWin.showMessageDialog(
null,
"Le joueur "
+ resultName[result - 1]
+ " gagne !",
"Resultat",
JOptionPane.INFORMATION_MESSAGE);
}
}
}
}
if (pawn != null) {
if (pawn.getTeam() == ((game.getTurn() + 1) % 2) + 1) {
if (pawn != null && !att) {
focus = pawn;
arrow = pawn.focus(game);
pane.recupArrow(arrow);
repaint();
} else {
pane.recupArrow(arrowN);
focus = null;
repaint();
}
att = false;
repaint();
}
}
att = false;
}
}
}).start();
}
/**
* This method contains the 'game' for the online mode.
*/
private void clickOnline() {
new Thread(new Runnable() {
/**
* Runs the game.
*/
@SuppressWarnings("static-access")
public void run() {
pane.recupGame(game);
repaint();
if (game.getNextTeam() == Oplayer) {
int[] res = getRes(game, pane, posX, posY);
int line = res[0];
int row = res[1];
APawn pawn = game.getPawn(line, row);
if (focus != null) {
if (focus.movePoss(game, line, row)) {
if (game.getPawn(line, row) != null) {
game.getPawn(line, row).setShow(true);
int[] att = { focus.posX, focus.posY };
client.sendTCP(att);
pane.recupArrow(arrowN);
repaint();
try {
Thread.sleep(2000);
} catch (InterruptedException e) {
}
client.sendTCP(att);
game.getPawn(line, row).setShow(false);
}
game = focus.move(game, line, row);
att = true;
game.addTurn();
pane.recupArrow(arrowN);
focus = null;
repaint();
paneRed.upGame(game);
paneBlue.upGame(game);
client.sendTCP(game);
int result = game.win();
if (result != 0) {
pane.setView(0);
playGame = false;
repaint();
jopWin = new JOptionPane();
jopWin.showMessageDialog(null, "Le joueur "
+ resultName[result - 1]
+ " gagne !", "Resultat",
JOptionPane.INFORMATION_MESSAGE);
client.close();
}
}
}
if (pawn != null) {
if (pawn.getTeam() == ((game.getTurn() + 1) % 2) + 1) {
if (pawn != null && !att) {
focus = pawn;
arrow = pawn.focus(game);
pane.recupArrow(arrow);
repaint();
} else {
pane.recupArrow(arrowN);
focus = null;
repaint();
}
att = false;
repaint();
}
}
att = false;
}
}
}).start();
}
@Override
public void mouseClicked(MouseEvent e) {
}
@Override
public void mouseEntered(MouseEvent e) {
}
@Override
public void mouseExited(MouseEvent e) {
}
@Override
public void mousePressed(MouseEvent e) {
if (e.getButton() == MouseEvent.BUTTON3 && game.getPlayer() == 2) {
pane.setShowKnow(true);
repaint();
}
}
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.cassandra.cli;
import org.apache.cassandra.db.marshal.AbstractType;
import org.apache.cassandra.db.marshal.BytesType;
import org.apache.thrift.*;
import org.antlr.runtime.tree.*;
import static org.apache.cassandra.thrift.ThriftGlue.*;
import org.apache.cassandra.thrift.Cassandra;
import org.apache.cassandra.thrift.Column;
import org.apache.cassandra.thrift.ColumnOrSuperColumn;
import org.apache.cassandra.thrift.ColumnParent;
import org.apache.cassandra.thrift.ColumnPath;
import org.apache.cassandra.thrift.ConsistencyLevel;
import org.apache.cassandra.thrift.InvalidRequestException;
import org.apache.cassandra.thrift.NotFoundException;
import org.apache.cassandra.thrift.SliceRange;
import org.apache.cassandra.thrift.SuperColumn;
import org.apache.cassandra.thrift.TimedOutException;
import org.apache.cassandra.thrift.UnavailableException;
import java.util.*;
import java.io.UnsupportedEncodingException;
import org.apache.commons.lang.ArrayUtils;
// Cli Client Side Library
public class CliClient
{
private Cassandra.Client thriftClient_ = null;
private CliSessionState css_ = null;
private Map<String, Map<String, Map<String, String>>> keyspacesMap = new HashMap<String, Map<String,Map<String,String>>>();
public CliClient(CliSessionState css, Cassandra.Client thriftClient)
{
css_ = css;
thriftClient_ = thriftClient;
}
// Execute a CLI Statement
public void executeCLIStmt(String stmt) throws TException, NotFoundException, InvalidRequestException, UnavailableException, TimedOutException, IllegalAccessException, ClassNotFoundException, InstantiationException
{
CommonTree ast = null;
ast = CliCompiler.compileQuery(stmt);
try
{
switch (ast.getType()) {
case CliParser.NODE_EXIT:
cleanupAndExit();
break;
case CliParser.NODE_THRIFT_GET:
executeGet(ast);
break;
case CliParser.NODE_HELP:
printCmdHelp();
break;
case CliParser.NODE_THRIFT_SET:
executeSet(ast);
break;
case CliParser.NODE_THRIFT_DEL:
executeDelete(ast);
break;
case CliParser.NODE_THRIFT_COUNT:
executeCount(ast);
break;
case CliParser.NODE_SHOW_CLUSTER_NAME:
executeShowProperty(ast, "cluster name");
break;
case CliParser.NODE_SHOW_CONFIG_FILE:
executeShowProperty(ast, "config file");
break;
case CliParser.NODE_SHOW_VERSION:
executeShowProperty(ast, "version");
break;
case CliParser.NODE_SHOW_TABLES:
executeShowTables(ast);
break;
case CliParser.NODE_DESCRIBE_TABLE:
executeDescribeTable(ast);
break;
case CliParser.NODE_CONNECT:
executeConnect(ast);
break;
case CliParser.NODE_NO_OP:
// comment lines come here; they are treated as no ops.
break;
default:
css_.err.println("Invalid Statement (Type: " + ast.getType() + ")");
break;
}
}
catch (UnsupportedEncodingException e)
{
throw new RuntimeException("Unable to encode string as UTF-8", e);
}
}
private void printCmdHelp()
{
css_.out.println("List of all CLI commands:");
css_.out.println("? Same as help.");
css_.out.println("help Display this help.");
css_.out.println("connect <hostname>/<port> Connect to thrift service.");
css_.out.println("describe keyspace <keyspacename> Describe keyspace.");
css_.out.println("exit Exit CLI.");
css_.out.println("quit Exit CLI.");
css_.out.println("show config file Display contents of config file.");
css_.out.println("show cluster name Display cluster name.");
css_.out.println("show keyspaces Show list of keyspaces.");
css_.out.println("show api version Show server API version.");
css_.out.println("get <ksp>.<cf>['<key>'] Get a slice of columns.");
css_.out.println("get <ksp>.<cf>['<key>']['<super>'] Get a slice of sub columns.");
css_.out.println("get <ksp>.<cf>['<key>']['<col>'] Get a column value.");
css_.out.println("get <ksp>.<cf>['<key>']['<super>']['<col>'] Get a sub column value.");
css_.out.println("set <ksp>.<cf>['<key>']['<col>'] = '<value>' Set a column.");
css_.out.println("set <ksp>.<cf>['<key>']['<super>']['<col>'] = '<value>' Set a sub column.");
css_.out.println("del <ksp>.<cf>['<key>'] Delete record.");
css_.out.println("del <ksp>.<cf>['<key>']['<col>'] Delete column.");
css_.out.println("del <ksp>.<cf>['<key>']['<super>']['<col>'] Delete sub column.");
css_.out.println("count <ksp>.<cf>['<key>'] Count columns in record.");
css_.out.println("count <ksp>.<cf>['<key>']['<super>'] Count columns in a super column.");
}
private void cleanupAndExit()
{
CliMain.disconnect();
System.exit(0);
}
Map<String, Map<String, String>> getCFMetaData(String keyspace) throws NotFoundException, TException
{
// Lazily lookup column family meta-data.
if (!(keyspacesMap.containsKey(keyspace)))
keyspacesMap.put(keyspace, thriftClient_.describe_keyspace(keyspace));
return keyspacesMap.get(keyspace);
}
private void executeCount(CommonTree ast) throws TException, InvalidRequestException, UnavailableException, TimedOutException, UnsupportedEncodingException
{
if (!CliMain.isConnected())
return;
int childCount = ast.getChildCount();
assert(childCount == 1);
CommonTree columnFamilySpec = (CommonTree)ast.getChild(0);
if (!(columnFamilySpec.getType() == CliParser.NODE_COLUMN_ACCESS))
return;
String tableName = CliCompiler.getTableName(columnFamilySpec);
String key = CliCompiler.getKey(columnFamilySpec);
String columnFamily = CliCompiler.getColumnFamily(columnFamilySpec);
int columnSpecCnt = CliCompiler.numColumnSpecifiers(columnFamilySpec);
ColumnParent colParent;
if (columnSpecCnt == 0)
{
colParent = createColumnParent(columnFamily, null);
}
else
{
assert (columnSpecCnt == 1);
colParent = createColumnParent(columnFamily, CliCompiler.getColumn(columnFamilySpec, 0).getBytes("UTF-8"));
}
int count = thriftClient_.get_count(tableName, key, colParent, ConsistencyLevel.ONE);
css_.out.printf("%d columns\n", count);
}
private void executeDelete(CommonTree ast) throws TException, InvalidRequestException, UnavailableException, TimedOutException, UnsupportedEncodingException
{
if (!CliMain.isConnected())
return;
int childCount = ast.getChildCount();
assert(childCount == 1);
CommonTree columnFamilySpec = (CommonTree)ast.getChild(0);
if (!(columnFamilySpec.getType() == CliParser.NODE_COLUMN_ACCESS))
return;
String tableName = CliCompiler.getTableName(columnFamilySpec);
String key = CliCompiler.getKey(columnFamilySpec);
String columnFamily = CliCompiler.getColumnFamily(columnFamilySpec);
int columnSpecCnt = CliCompiler.numColumnSpecifiers(columnFamilySpec);
byte[] superColumnName = null;
byte[] columnName = null;
boolean isSuper;
try
{
if (!(getCFMetaData(tableName).containsKey(columnFamily)))
{
css_.out.println("No such column family: " + columnFamily);
return;
}
isSuper = getCFMetaData(tableName).get(columnFamily).get("Type").equals("Super") ? true : false;
}
catch (NotFoundException nfe)
{
css_.out.printf("No such keyspace: %s\n", tableName);
return;
}
if ((columnSpecCnt < 0) || (columnSpecCnt > 2))
{
css_.out.println("Invalid row, super column, or column specification.");
return;
}
if (columnSpecCnt == 1)
{
// table.cf['key']['column']
if (isSuper)
superColumnName = CliCompiler.getColumn(columnFamilySpec, 0).getBytes("UTF-8");
else
columnName = CliCompiler.getColumn(columnFamilySpec, 0).getBytes("UTF-8");
}
else if (columnSpecCnt == 2)
{
// table.cf['key']['column']['column']
superColumnName = CliCompiler.getColumn(columnFamilySpec, 0).getBytes("UTF-8");
columnName = CliCompiler.getColumn(columnFamilySpec, 1).getBytes("UTF-8");
}
thriftClient_.remove(tableName, key, createColumnPath(columnFamily, superColumnName, columnName),
timestampMicros(), ConsistencyLevel.ONE);
css_.out.println(String.format("%s removed.", (columnSpecCnt == 0) ? "row" : "column"));
}
private static long timestampMicros()
{
// we use microsecond resolution for compatibility with other client libraries, even though
// we can't actually get microsecond precision.
return System.currentTimeMillis() * 1000;
}
private void doSlice(String keyspace, String key, String columnFamily, byte[] superColumnName)
throws InvalidRequestException, UnavailableException, TimedOutException, TException, UnsupportedEncodingException, IllegalAccessException, NotFoundException, InstantiationException, ClassNotFoundException
{
SliceRange range = new SliceRange(ArrayUtils.EMPTY_BYTE_ARRAY, ArrayUtils.EMPTY_BYTE_ARRAY, true, 1000000);
List<ColumnOrSuperColumn> columns = thriftClient_.get_slice(keyspace, key,
createColumnParent(columnFamily, superColumnName),
createSlicePredicate(null, range), ConsistencyLevel.ONE);
int size = columns.size();
// Print out super columns or columns.
for (ColumnOrSuperColumn cosc : columns)
{
if (cosc.isSetSuper_column())
{
SuperColumn superColumn = cosc.super_column;
css_.out.printf("=> (super_column=%s,", formatSuperColumnName(keyspace, columnFamily, superColumn));
for (Column col : superColumn.getColumns())
css_.out.printf("\n (column=%s, value=%s, timestamp=%d)", formatSubcolumnName(keyspace, columnFamily, col),
new String(col.value, "UTF-8"), col.timestamp);
css_.out.println(")");
}
else
{
Column column = cosc.column;
css_.out.printf("=> (column=%s, value=%s, timestamp=%d)\n", formatColumnName(keyspace, columnFamily, column),
new String(column.value, "UTF-8"), column.timestamp);
}
}
css_.out.println("Returned " + size + " results.");
}
private String formatSuperColumnName(String keyspace, String columnFamily, SuperColumn column) throws NotFoundException, TException, ClassNotFoundException, IllegalAccessException, InstantiationException
{
return getFormatTypeForColumn(getCFMetaData(keyspace).get(columnFamily).get("CompareWith")).getString(column.name);
}
private String formatSubcolumnName(String keyspace, String columnFamily, Column subcolumn) throws NotFoundException, TException, ClassNotFoundException, IllegalAccessException, InstantiationException
{
return getFormatTypeForColumn(getCFMetaData(keyspace).get(columnFamily).get("CompareSubcolumnsWith")).getString(subcolumn.name);
}
private String formatColumnName(String keyspace, String columnFamily, Column column) throws ClassNotFoundException, NotFoundException, TException, IllegalAccessException, InstantiationException
{
return getFormatTypeForColumn(getCFMetaData(keyspace).get(columnFamily).get("CompareWith")).getString(column.name);
}
private AbstractType getFormatTypeForColumn(String compareWith) throws ClassNotFoundException, IllegalAccessException, InstantiationException
{
AbstractType type;
try {
type = (AbstractType) Class.forName(compareWith).newInstance();
} catch (ClassNotFoundException e) {
type = BytesType.class.newInstance();
}
return type;
}
// Execute GET statement
private void executeGet(CommonTree ast) throws TException, NotFoundException, InvalidRequestException, UnavailableException, TimedOutException, UnsupportedEncodingException, IllegalAccessException, InstantiationException, ClassNotFoundException
{
if (!CliMain.isConnected())
return;
// This will never happen unless the grammar is broken
assert (ast.getChildCount() == 1) : "serious parsing error (this is a bug).";
CommonTree columnFamilySpec = (CommonTree)ast.getChild(0);
if (!(columnFamilySpec.getType() == CliParser.NODE_COLUMN_ACCESS))
return;
String tableName = CliCompiler.getTableName(columnFamilySpec);
String key = CliCompiler.getKey(columnFamilySpec);
String columnFamily = CliCompiler.getColumnFamily(columnFamilySpec);
int columnSpecCnt = CliCompiler.numColumnSpecifiers(columnFamilySpec);
if (!(getCFMetaData(tableName).containsKey(columnFamily)))
{
css_.out.println("No such column family: " + columnFamily);
return;
}
boolean isSuper = getCFMetaData(tableName).get(columnFamily).get("Type").equals("Super") ? true : false;
byte[] superColumnName = null;
byte[] columnName = null;
// table.cf['key'] -- row slice
if (columnSpecCnt == 0)
{
doSlice(tableName, key, columnFamily, superColumnName);
return;
}
// table.cf['key']['column'] -- slice of a super, or get of a standard
if (columnSpecCnt == 1)
{
if (isSuper)
{
superColumnName = CliCompiler.getColumn(columnFamilySpec, 0).getBytes("UTF-8");
doSlice(tableName, key, columnFamily, superColumnName);
return;
}
else
{
columnName = CliCompiler.getColumn(columnFamilySpec, 0).getBytes("UTF-8");
}
}
// table.cf['key']['column']['column'] -- get of a sub-column
else if (columnSpecCnt == 2)
{
superColumnName = CliCompiler.getColumn(columnFamilySpec, 0).getBytes("UTF-8");
columnName = CliCompiler.getColumn(columnFamilySpec, 1).getBytes("UTF-8");
}
// The parser groks an arbitrary number of these so it is possible to get here.
else
{
css_.out.println("Invalid row, super column, or column specification.");
return;
}
// Perform a get(), print out the results.
ColumnPath path = createColumnPath(columnFamily, superColumnName, columnName);
Column column = thriftClient_.get(tableName, key, path, ConsistencyLevel.ONE).column;
css_.out.printf("=> (column=%s, value=%s, timestamp=%d)\n", formatColumnName(tableName, columnFamily, column),
new String(column.value, "UTF-8"), column.timestamp);
}
// Execute SET statement
private void executeSet(CommonTree ast) throws TException, InvalidRequestException, UnavailableException, TimedOutException, UnsupportedEncodingException
{
if (!CliMain.isConnected())
return;
assert (ast.getChildCount() == 2) : "serious parsing error (this is a bug).";
CommonTree columnFamilySpec = (CommonTree)ast.getChild(0);
if (!(columnFamilySpec.getType() == CliParser.NODE_COLUMN_ACCESS))
return;
String tableName = CliCompiler.getTableName(columnFamilySpec);
String key = CliCompiler.getKey(columnFamilySpec);
String columnFamily = CliCompiler.getColumnFamily(columnFamilySpec);
int columnSpecCnt = CliCompiler.numColumnSpecifiers(columnFamilySpec);
String value = CliUtils.unescapeSQLString(ast.getChild(1).getText());
byte[] superColumnName = null;
byte[] columnName = null;
// table.cf['key']
if (columnSpecCnt == 0)
{
css_.err.println("No column name specified, (type 'help' or '?' for help on syntax).");
return;
}
// table.cf['key']['column'] = 'value'
else if (columnSpecCnt == 1)
{
// get the column name
columnName = CliCompiler.getColumn(columnFamilySpec, 0).getBytes("UTF-8");
}
// table.cf['key']['super_column']['column'] = 'value'
else
{
assert (columnSpecCnt == 2) : "serious parsing error (this is a bug).";
// get the super column and column names
superColumnName = CliCompiler.getColumn(columnFamilySpec, 0).getBytes("UTF-8");
columnName = CliCompiler.getColumn(columnFamilySpec, 1).getBytes("UTF-8");
}
// do the insert
thriftClient_.insert(tableName, key, createColumnPath(columnFamily, superColumnName, columnName),
value.getBytes(), timestampMicros(), ConsistencyLevel.ONE);
css_.out.println("Value inserted.");
}
private void executeShowProperty(CommonTree ast, String propertyName) throws TException
{
if (!CliMain.isConnected())
return;
String propertyValue = thriftClient_.get_string_property(propertyName);
css_.out.println(propertyValue);
}
// process "show tables" statement
private void executeShowTables(CommonTree ast) throws TException
{
if (!CliMain.isConnected())
return;
List<String> tables = thriftClient_.get_string_list_property("keyspaces");
for (String table : tables)
{
css_.out.println(table);
}
}
// process a statement of the form: describe table <tablename>
private void executeDescribeTable(CommonTree ast) throws TException
{
if (!CliMain.isConnected())
return;
// Get table name
int childCount = ast.getChildCount();
assert(childCount == 1);
String tableName = ast.getChild(0).getText();
if( tableName == null ) {
css_.out.println("Keyspace argument required");
return;
}
// Describe and display
Map<String, Map<String, String>> columnFamiliesMap;
try {
columnFamiliesMap = thriftClient_.describe_keyspace(tableName);
for (String columnFamilyName: columnFamiliesMap.keySet()) {
Map<String, String> columnMap = columnFamiliesMap.get(columnFamilyName);
String desc = columnMap.get("Desc");
String columnFamilyType = columnMap.get("Type");
String sort = columnMap.get("CompareWith");
String flushperiod = columnMap.get("FlushPeriodInMinutes");
css_.out.println(desc);
css_.out.println("Column Family Type: " + columnFamilyType);
css_.out.println("Column Sorted By: " + sort);
css_.out.println("flush period: " + flushperiod + " minutes");
css_.out.println("------");
}
} catch (NotFoundException e) {
css_.out.println("Keyspace " + tableName + " could not be found.");
}
}
// process a statement of the form: connect hostname/port
private void executeConnect(CommonTree ast)
{
int portNumber = Integer.parseInt(ast.getChild(1).getText());
Tree idList = ast.getChild(0);
StringBuilder hostName = new StringBuilder();
int idCount = idList.getChildCount();
for (int idx = 0; idx < idCount; idx++)
{
hostName.append(idList.getChild(idx).getText());
}
// disconnect current connection, if any.
// This is a no-op, if you aren't currently connected.
CliMain.disconnect();
// now, connect to the newly specified host name and port
css_.hostName = hostName.toString();
css_.thriftPort = portNumber;
CliMain.connect(css_.hostName, css_.thriftPort);
}
}
| |
/*
* Copyright 2020 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/videointelligence/v1/video_intelligence.proto
package com.google.cloud.videointelligence.v1;
/**
*
*
* <pre>
* Normalized bounding box.
* The normalized vertex coordinates are relative to the original image.
* Range: [0, 1].
* </pre>
*
* Protobuf type {@code google.cloud.videointelligence.v1.NormalizedBoundingBox}
*/
public final class NormalizedBoundingBox extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.videointelligence.v1.NormalizedBoundingBox)
NormalizedBoundingBoxOrBuilder {
private static final long serialVersionUID = 0L;
// Use NormalizedBoundingBox.newBuilder() to construct.
private NormalizedBoundingBox(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private NormalizedBoundingBox() {}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new NormalizedBoundingBox();
}
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet getUnknownFields() {
return this.unknownFields;
}
private NormalizedBoundingBox(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 13:
{
left_ = input.readFloat();
break;
}
case 21:
{
top_ = input.readFloat();
break;
}
case 29:
{
right_ = input.readFloat();
break;
}
case 37:
{
bottom_ = input.readFloat();
break;
}
default:
{
if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.videointelligence.v1.VideoIntelligenceServiceProto
.internal_static_google_cloud_videointelligence_v1_NormalizedBoundingBox_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.videointelligence.v1.VideoIntelligenceServiceProto
.internal_static_google_cloud_videointelligence_v1_NormalizedBoundingBox_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.videointelligence.v1.NormalizedBoundingBox.class,
com.google.cloud.videointelligence.v1.NormalizedBoundingBox.Builder.class);
}
public static final int LEFT_FIELD_NUMBER = 1;
private float left_;
/**
*
*
* <pre>
* Left X coordinate.
* </pre>
*
* <code>float left = 1;</code>
*
* @return The left.
*/
@java.lang.Override
public float getLeft() {
return left_;
}
public static final int TOP_FIELD_NUMBER = 2;
private float top_;
/**
*
*
* <pre>
* Top Y coordinate.
* </pre>
*
* <code>float top = 2;</code>
*
* @return The top.
*/
@java.lang.Override
public float getTop() {
return top_;
}
public static final int RIGHT_FIELD_NUMBER = 3;
private float right_;
/**
*
*
* <pre>
* Right X coordinate.
* </pre>
*
* <code>float right = 3;</code>
*
* @return The right.
*/
@java.lang.Override
public float getRight() {
return right_;
}
public static final int BOTTOM_FIELD_NUMBER = 4;
private float bottom_;
/**
*
*
* <pre>
* Bottom Y coordinate.
* </pre>
*
* <code>float bottom = 4;</code>
*
* @return The bottom.
*/
@java.lang.Override
public float getBottom() {
return bottom_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (left_ != 0F) {
output.writeFloat(1, left_);
}
if (top_ != 0F) {
output.writeFloat(2, top_);
}
if (right_ != 0F) {
output.writeFloat(3, right_);
}
if (bottom_ != 0F) {
output.writeFloat(4, bottom_);
}
unknownFields.writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (left_ != 0F) {
size += com.google.protobuf.CodedOutputStream.computeFloatSize(1, left_);
}
if (top_ != 0F) {
size += com.google.protobuf.CodedOutputStream.computeFloatSize(2, top_);
}
if (right_ != 0F) {
size += com.google.protobuf.CodedOutputStream.computeFloatSize(3, right_);
}
if (bottom_ != 0F) {
size += com.google.protobuf.CodedOutputStream.computeFloatSize(4, bottom_);
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.videointelligence.v1.NormalizedBoundingBox)) {
return super.equals(obj);
}
com.google.cloud.videointelligence.v1.NormalizedBoundingBox other =
(com.google.cloud.videointelligence.v1.NormalizedBoundingBox) obj;
if (java.lang.Float.floatToIntBits(getLeft())
!= java.lang.Float.floatToIntBits(other.getLeft())) return false;
if (java.lang.Float.floatToIntBits(getTop()) != java.lang.Float.floatToIntBits(other.getTop()))
return false;
if (java.lang.Float.floatToIntBits(getRight())
!= java.lang.Float.floatToIntBits(other.getRight())) return false;
if (java.lang.Float.floatToIntBits(getBottom())
!= java.lang.Float.floatToIntBits(other.getBottom())) return false;
if (!unknownFields.equals(other.unknownFields)) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + LEFT_FIELD_NUMBER;
hash = (53 * hash) + java.lang.Float.floatToIntBits(getLeft());
hash = (37 * hash) + TOP_FIELD_NUMBER;
hash = (53 * hash) + java.lang.Float.floatToIntBits(getTop());
hash = (37 * hash) + RIGHT_FIELD_NUMBER;
hash = (53 * hash) + java.lang.Float.floatToIntBits(getRight());
hash = (37 * hash) + BOTTOM_FIELD_NUMBER;
hash = (53 * hash) + java.lang.Float.floatToIntBits(getBottom());
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.videointelligence.v1.NormalizedBoundingBox parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.videointelligence.v1.NormalizedBoundingBox parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.videointelligence.v1.NormalizedBoundingBox parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.videointelligence.v1.NormalizedBoundingBox parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.videointelligence.v1.NormalizedBoundingBox parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.videointelligence.v1.NormalizedBoundingBox parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.videointelligence.v1.NormalizedBoundingBox parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.videointelligence.v1.NormalizedBoundingBox parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.videointelligence.v1.NormalizedBoundingBox parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.videointelligence.v1.NormalizedBoundingBox parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.videointelligence.v1.NormalizedBoundingBox parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.videointelligence.v1.NormalizedBoundingBox parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.videointelligence.v1.NormalizedBoundingBox prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Normalized bounding box.
* The normalized vertex coordinates are relative to the original image.
* Range: [0, 1].
* </pre>
*
* Protobuf type {@code google.cloud.videointelligence.v1.NormalizedBoundingBox}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.videointelligence.v1.NormalizedBoundingBox)
com.google.cloud.videointelligence.v1.NormalizedBoundingBoxOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.videointelligence.v1.VideoIntelligenceServiceProto
.internal_static_google_cloud_videointelligence_v1_NormalizedBoundingBox_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.videointelligence.v1.VideoIntelligenceServiceProto
.internal_static_google_cloud_videointelligence_v1_NormalizedBoundingBox_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.videointelligence.v1.NormalizedBoundingBox.class,
com.google.cloud.videointelligence.v1.NormalizedBoundingBox.Builder.class);
}
// Construct using com.google.cloud.videointelligence.v1.NormalizedBoundingBox.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {}
}
@java.lang.Override
public Builder clear() {
super.clear();
left_ = 0F;
top_ = 0F;
right_ = 0F;
bottom_ = 0F;
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.videointelligence.v1.VideoIntelligenceServiceProto
.internal_static_google_cloud_videointelligence_v1_NormalizedBoundingBox_descriptor;
}
@java.lang.Override
public com.google.cloud.videointelligence.v1.NormalizedBoundingBox getDefaultInstanceForType() {
return com.google.cloud.videointelligence.v1.NormalizedBoundingBox.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.videointelligence.v1.NormalizedBoundingBox build() {
com.google.cloud.videointelligence.v1.NormalizedBoundingBox result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.videointelligence.v1.NormalizedBoundingBox buildPartial() {
com.google.cloud.videointelligence.v1.NormalizedBoundingBox result =
new com.google.cloud.videointelligence.v1.NormalizedBoundingBox(this);
result.left_ = left_;
result.top_ = top_;
result.right_ = right_;
result.bottom_ = bottom_;
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.videointelligence.v1.NormalizedBoundingBox) {
return mergeFrom((com.google.cloud.videointelligence.v1.NormalizedBoundingBox) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.videointelligence.v1.NormalizedBoundingBox other) {
if (other == com.google.cloud.videointelligence.v1.NormalizedBoundingBox.getDefaultInstance())
return this;
if (other.getLeft() != 0F) {
setLeft(other.getLeft());
}
if (other.getTop() != 0F) {
setTop(other.getTop());
}
if (other.getRight() != 0F) {
setRight(other.getRight());
}
if (other.getBottom() != 0F) {
setBottom(other.getBottom());
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.cloud.videointelligence.v1.NormalizedBoundingBox parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage =
(com.google.cloud.videointelligence.v1.NormalizedBoundingBox) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private float left_;
/**
*
*
* <pre>
* Left X coordinate.
* </pre>
*
* <code>float left = 1;</code>
*
* @return The left.
*/
@java.lang.Override
public float getLeft() {
return left_;
}
/**
*
*
* <pre>
* Left X coordinate.
* </pre>
*
* <code>float left = 1;</code>
*
* @param value The left to set.
* @return This builder for chaining.
*/
public Builder setLeft(float value) {
left_ = value;
onChanged();
return this;
}
/**
*
*
* <pre>
* Left X coordinate.
* </pre>
*
* <code>float left = 1;</code>
*
* @return This builder for chaining.
*/
public Builder clearLeft() {
left_ = 0F;
onChanged();
return this;
}
private float top_;
/**
*
*
* <pre>
* Top Y coordinate.
* </pre>
*
* <code>float top = 2;</code>
*
* @return The top.
*/
@java.lang.Override
public float getTop() {
return top_;
}
/**
*
*
* <pre>
* Top Y coordinate.
* </pre>
*
* <code>float top = 2;</code>
*
* @param value The top to set.
* @return This builder for chaining.
*/
public Builder setTop(float value) {
top_ = value;
onChanged();
return this;
}
/**
*
*
* <pre>
* Top Y coordinate.
* </pre>
*
* <code>float top = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearTop() {
top_ = 0F;
onChanged();
return this;
}
private float right_;
/**
*
*
* <pre>
* Right X coordinate.
* </pre>
*
* <code>float right = 3;</code>
*
* @return The right.
*/
@java.lang.Override
public float getRight() {
return right_;
}
/**
*
*
* <pre>
* Right X coordinate.
* </pre>
*
* <code>float right = 3;</code>
*
* @param value The right to set.
* @return This builder for chaining.
*/
public Builder setRight(float value) {
right_ = value;
onChanged();
return this;
}
/**
*
*
* <pre>
* Right X coordinate.
* </pre>
*
* <code>float right = 3;</code>
*
* @return This builder for chaining.
*/
public Builder clearRight() {
right_ = 0F;
onChanged();
return this;
}
private float bottom_;
/**
*
*
* <pre>
* Bottom Y coordinate.
* </pre>
*
* <code>float bottom = 4;</code>
*
* @return The bottom.
*/
@java.lang.Override
public float getBottom() {
return bottom_;
}
/**
*
*
* <pre>
* Bottom Y coordinate.
* </pre>
*
* <code>float bottom = 4;</code>
*
* @param value The bottom to set.
* @return This builder for chaining.
*/
public Builder setBottom(float value) {
bottom_ = value;
onChanged();
return this;
}
/**
*
*
* <pre>
* Bottom Y coordinate.
* </pre>
*
* <code>float bottom = 4;</code>
*
* @return This builder for chaining.
*/
public Builder clearBottom() {
bottom_ = 0F;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.videointelligence.v1.NormalizedBoundingBox)
}
// @@protoc_insertion_point(class_scope:google.cloud.videointelligence.v1.NormalizedBoundingBox)
private static final com.google.cloud.videointelligence.v1.NormalizedBoundingBox DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.videointelligence.v1.NormalizedBoundingBox();
}
public static com.google.cloud.videointelligence.v1.NormalizedBoundingBox getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<NormalizedBoundingBox> PARSER =
new com.google.protobuf.AbstractParser<NormalizedBoundingBox>() {
@java.lang.Override
public NormalizedBoundingBox parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new NormalizedBoundingBox(input, extensionRegistry);
}
};
public static com.google.protobuf.Parser<NormalizedBoundingBox> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<NormalizedBoundingBox> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.videointelligence.v1.NormalizedBoundingBox getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
| |
/*
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
*/
package com.filetransfer.baidu.bcs;
import java.io.BufferedReader;
import java.io.ByteArrayOutputStream;
import java.io.Closeable;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FilterInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Field;
import java.lang.reflect.Method;
import java.net.HttpURLConnection;
import java.net.URLConnection;
import java.security.cert.CertificateException;
import java.security.cert.X509Certificate;
import java.util.HashMap;
import java.util.Iterator;
import java.util.zip.GZIPInputStream;
import java.util.zip.Inflater;
import javax.net.ssl.HostnameVerifier;
import javax.net.ssl.HttpsURLConnection;
import javax.net.ssl.SSLContext;
import javax.net.ssl.SSLSession;
import javax.net.ssl.SSLSocketFactory;
import javax.net.ssl.TrustManager;
import javax.net.ssl.X509TrustManager;
import org.apache.cordova.Config;
import org.apache.cordova.CallbackContext;
import org.apache.cordova.CordovaPlugin;
import org.apache.cordova.CordovaResourceApi;
import org.apache.cordova.CordovaResourceApi.OpenForReadResult;
import org.apache.cordova.PluginManager;
import org.apache.cordova.PluginResult;
import org.apache.cordova.file.FileUtils;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import android.net.Uri;
import android.os.Build;
import android.util.Log;
import org.xwalk.core.internal.XWalkCookieManager;
public class FileTransferBCS extends CordovaPlugin {
private static final String LOG_TAG = "FileTransferBCS";
private static final String LINE_START = "--";
private static final String LINE_END = "\r\n";
private static final String BOUNDARY = "+++++";
public static int FILE_NOT_FOUND_ERR = 1;
public static int INVALID_URL_ERR = 2;
public static int CONNECTION_ERR = 3;
public static int ABORTED_ERR = 4;
public static int NOT_MODIFIED_ERR = 5;
private static HashMap<String, RequestContext> activeRequests = new HashMap<String, RequestContext>();
private static final int MAX_BUFFER_SIZE = 16 * 1024;
private XWalkCookieManager mCookieManager = null;
public FileTransferBCS() {
mCookieManager = new XWalkCookieManager();
}
private static final class RequestContext {
String source;
String target;
File targetFile;
CallbackContext callbackContext;
HttpURLConnection connection;
boolean aborted;
RequestContext(String source, String target, CallbackContext callbackContext) {
this.source = source;
this.target = target;
this.callbackContext = callbackContext;
}
void sendPluginResult(PluginResult pluginResult) {
synchronized (this) {
if (!aborted) {
callbackContext.sendPluginResult(pluginResult);
}
}
}
}
/**
* Adds an interface method to an InputStream to return the number of bytes
* read from the raw stream. This is used to track total progress against
* the HTTP Content-Length header value from the server.
*/
private static abstract class TrackingInputStream extends FilterInputStream {
public TrackingInputStream(final InputStream in) {
super(in);
}
public abstract long getTotalRawBytesRead();
}
private static class ExposedGZIPInputStream extends GZIPInputStream {
public ExposedGZIPInputStream(final InputStream in) throws IOException {
super(in);
}
public Inflater getInflater() {
return inf;
}
}
/**
* Provides raw bytes-read tracking for a GZIP input stream. Reports the
* total number of compressed bytes read from the input, rather than the
* number of uncompressed bytes.
*/
private static class TrackingGZIPInputStream extends TrackingInputStream {
private ExposedGZIPInputStream gzin;
public TrackingGZIPInputStream(final ExposedGZIPInputStream gzin) throws IOException {
super(gzin);
this.gzin = gzin;
}
public long getTotalRawBytesRead() {
return gzin.getInflater().getBytesRead();
}
}
/**
* Provides simple total-bytes-read tracking for an existing InputStream
*/
private static class SimpleTrackingInputStream extends TrackingInputStream {
private long bytesRead = 0;
public SimpleTrackingInputStream(InputStream stream) {
super(stream);
}
private int updateBytesRead(int newBytesRead) {
if (newBytesRead != -1) {
bytesRead += newBytesRead;
}
return newBytesRead;
}
@Override
public int read() throws IOException {
return updateBytesRead(super.read());
}
// Note: FilterInputStream delegates read(byte[] bytes) to the below method,
// so we don't override it or else double count (CB-5631).
@Override
public int read(byte[] bytes, int offset, int count) throws IOException {
return updateBytesRead(super.read(bytes, offset, count));
}
public long getTotalRawBytesRead() {
return bytesRead;
}
}
@Override
public boolean execute(String action, JSONArray args, final CallbackContext callbackContext) throws JSONException {
if (action.equals("upload") || action.equals("download")) {
String source = args.getString(0);
String target = args.getString(1);
if (action.equals("upload")) {
upload(source, target, args, callbackContext);
} else {
download(source, target, args, callbackContext);
}
return true;
} else if (action.equals("abort")) {
String objectId = args.getString(0);
abort(objectId);
callbackContext.success();
return true;
}
return false;
}
private static void addHeadersToRequest(URLConnection connection, JSONObject headers) {
try {
for (Iterator<?> iter = headers.keys(); iter.hasNext(); ) {
String headerKey = iter.next().toString();
JSONArray headerValues = headers.optJSONArray(headerKey);
if (headerValues == null) {
headerValues = new JSONArray();
headerValues.put(headers.getString(headerKey));
}
connection.setRequestProperty(headerKey, headerValues.getString(0));
for (int i = 1; i < headerValues.length(); ++i) {
connection.addRequestProperty(headerKey, headerValues.getString(i));
}
}
} catch (JSONException e1) {
// No headers to be manipulated!
}
}
/**
* Uploads the specified file to the server URL provided using an HTTP multipart request.
* @param source Full path of the file on the file system
* @param target URL of the server to receive the file
* @param args JSON Array of args
* @param callbackContext callback id for optional progress reports
*
* args[2] fileKey Name of file request parameter
* args[3] fileName File name to be used on server
* args[4] mimeType Describes file content type
* args[5] params key:value pairs of user-defined parameters
* @return FileUploadResult containing result of upload request
*/
private void upload(final String source, final String target, JSONArray args, CallbackContext callbackContext) throws JSONException {
Log.d(LOG_TAG, "upload " + source + " to " + target);
// Setup the options
final String fileKey = getArgument(args, 2, "file");
final String fileName = getArgument(args, 3, "image.jpg");
final String mimeType = getArgument(args, 4, "image/jpeg");
final JSONObject params = args.optJSONObject(5) == null ? new JSONObject() : args.optJSONObject(5);
final boolean trustEveryone = args.optBoolean(6);
// Always use chunked mode unless set to false as per API
final boolean chunkedMode = args.optBoolean(7) || args.isNull(7);
// Look for headers on the params map for backwards compatibility with older Cordova versions.
final JSONObject headers = args.optJSONObject(8) == null ? params.optJSONObject("headers") : args.optJSONObject(8);
final String objectId = args.getString(9);
final String httpMethod = getArgument(args, 10, "POST");
final CordovaResourceApi resourceApi = webView.getResourceApi();
Log.d(LOG_TAG, "fileKey: " + fileKey);
Log.d(LOG_TAG, "fileName: " + fileName);
Log.d(LOG_TAG, "mimeType: " + mimeType);
Log.d(LOG_TAG, "params: " + params);
Log.d(LOG_TAG, "trustEveryone: " + trustEveryone);
Log.d(LOG_TAG, "chunkedMode: " + chunkedMode);
Log.d(LOG_TAG, "headers: " + headers);
Log.d(LOG_TAG, "objectId: " + objectId);
Log.d(LOG_TAG, "httpMethod: " + httpMethod);
final Uri targetUri = resourceApi.remapUri(Uri.parse(target));
// Accept a path or a URI for the source.
Uri tmpSrc = Uri.parse(source);
final Uri sourceUri = resourceApi.remapUri(
tmpSrc.getScheme() != null ? tmpSrc : Uri.fromFile(new File(source)));
int uriType = CordovaResourceApi.getUriType(targetUri);
final boolean useHttps = uriType == CordovaResourceApi.URI_TYPE_HTTPS;
if (uriType != CordovaResourceApi.URI_TYPE_HTTP && !useHttps) {
JSONObject error = createFileTransferError(INVALID_URL_ERR, source, target, null, 0, null);
Log.e(LOG_TAG, "Unsupported URI: " + targetUri);
callbackContext.sendPluginResult(new PluginResult(PluginResult.Status.IO_EXCEPTION, error));
return;
}
final RequestContext context = new RequestContext(source, target, callbackContext);
synchronized (activeRequests) {
activeRequests.put(objectId, context);
}
cordova.getThreadPool().execute(new Runnable() {
public void run() {
if (context.aborted) {
return;
}
HttpURLConnection conn = null;
HostnameVerifier oldHostnameVerifier = null;
SSLSocketFactory oldSocketFactory = null;
int totalBytes = 0;
int fixedLength = -1;
try {
// Create return object
FileUploadBCSResult result = new FileUploadBCSResult();
FileProgressBCSResult progress = new FileProgressBCSResult();
//------------------ CLIENT REQUEST
// Open a HTTP connection to the URL based on protocol
conn = resourceApi.createHttpConnection(targetUri);
if (useHttps && trustEveryone) {
// Setup the HTTPS connection class to trust everyone
HttpsURLConnection https = (HttpsURLConnection)conn;
oldSocketFactory = trustAllHosts(https);
// Save the current hostnameVerifier
oldHostnameVerifier = https.getHostnameVerifier();
// Setup the connection not to verify hostnames
https.setHostnameVerifier(DO_NOT_VERIFY);
}
// Allow Inputs
conn.setDoInput(true);
// Allow Outputs
conn.setDoOutput(true);
// Don't use a cached copy.
conn.setUseCaches(false);
// Use a post method.
conn.setRequestMethod(httpMethod);
//conn.setRequestProperty("Content-Type", "multipart/form-data; boundary=" + BOUNDARY);
// Set the cookies on the response
String cookie = mCookieManager.getCookie(target);
if (cookie != null) {
conn.setRequestProperty("Cookie", cookie);
}
// Handle the other headers
if (headers != null) {
addHeadersToRequest(conn, headers);
}
/*
* Store the non-file portions of the multipart data as a string, so that we can add it
* to the contentSize, since it is part of the body of the HTTP request.
*/
StringBuilder beforeData = new StringBuilder();
try {
for (Iterator<?> iter = params.keys(); iter.hasNext();) {
Object key = iter.next();
if(!String.valueOf(key).equals("headers"))
{
//beforeData.append(LINE_START).append(BOUNDARY).append(LINE_END);
//beforeData.append("Content-Disposition: form-data; name=\"").append(key.toString()).append('"');
//beforeData.append(LINE_END).append(LINE_END);
beforeData.append(params.getString(key.toString()));
//beforeData.append(LINE_END);
}
}
} catch (JSONException e) {
Log.e(LOG_TAG, e.getMessage(), e);
}
//beforeData.append(LINE_START).append(BOUNDARY).append(LINE_END);
//beforeData.append("Content-Disposition: form-data; name=\"").append(fileKey).append("\";");
//beforeData.append(" filename=\"").append(fileName).append('"').append(LINE_END);
//beforeData.append("Content-Type: ").append(mimeType).append(LINE_END).append(LINE_END);
byte[] beforeDataBytes = beforeData.toString().getBytes("UTF-8");
//byte[] tailParamsBytes = (LINE_END + LINE_START + BOUNDARY + LINE_START + LINE_END).getBytes("UTF-8");
// Get a input stream of the file on the phone
OpenForReadResult readResult = resourceApi.openForRead(sourceUri);
//int stringLength = beforeDataBytes.length + tailParamsBytes.length;
if (readResult.length >= 0) {
fixedLength = (int)readResult.length;
progress.setLengthComputable(true);
progress.setTotal(fixedLength);
}
Log.d(LOG_TAG, "Content Length: " + fixedLength);
// setFixedLengthStreamingMode causes and OutOfMemoryException on pre-Froyo devices.
// http://code.google.com/p/android/issues/detail?id=3164
// It also causes OOM if HTTPS is used, even on newer devices.
boolean useChunkedMode = chunkedMode && (Build.VERSION.SDK_INT < Build.VERSION_CODES.FROYO || useHttps);
useChunkedMode = useChunkedMode || (fixedLength == -1);
if (useChunkedMode) {
conn.setChunkedStreamingMode(MAX_BUFFER_SIZE);
// Although setChunkedStreamingMode sets this header, setting it explicitly here works
// around an OutOfMemoryException when using https.
conn.setRequestProperty("Transfer-Encoding", "chunked");
} else {
conn.setFixedLengthStreamingMode(fixedLength);
}
conn.connect();
OutputStream sendStream = null;
try {
sendStream = conn.getOutputStream();
synchronized (context) {
if (context.aborted) {
return;
}
context.connection = conn;
}
//We don't want to change encoding, we just want this to write for all Unicode.
sendStream.write(beforeDataBytes);
totalBytes += beforeDataBytes.length;
// create a buffer of maximum size
int bytesAvailable = readResult.inputStream.available();
int bufferSize = Math.min(bytesAvailable, MAX_BUFFER_SIZE);
byte[] buffer = new byte[bufferSize];
// read file and write it into form...
int bytesRead = readResult.inputStream.read(buffer, 0, bufferSize);
long prevBytesRead = 0;
while (bytesRead > 0) {
result.setBytesSent(totalBytes);
sendStream.write(buffer, 0, bytesRead);
totalBytes += bytesRead;
if (totalBytes > prevBytesRead + 102400) {
prevBytesRead = totalBytes;
Log.d(LOG_TAG, "Uploaded " + totalBytes + " of " + fixedLength + " bytes");
}
bytesAvailable = readResult.inputStream.available();
bufferSize = Math.min(bytesAvailable, MAX_BUFFER_SIZE);
bytesRead = readResult.inputStream.read(buffer, 0, bufferSize);
// Send a progress event.
progress.setLoaded(totalBytes);
PluginResult progressResult = new PluginResult(PluginResult.Status.OK, progress.toJSONObject());
progressResult.setKeepCallback(true);
context.sendPluginResult(progressResult);
}
// send multipart form data necessary after file data...
// sendStream.write(tailParamsBytes);
// totalBytes += tailParamsBytes.length;
sendStream.flush();
} finally {
safeClose(readResult.inputStream);
safeClose(sendStream);
}
synchronized (context) {
context.connection = null;
}
Log.d(LOG_TAG, "Sent " + totalBytes + " of " + fixedLength);
//------------------ read the SERVER RESPONSE
String responseString;
int responseCode = conn.getResponseCode();
Log.d(LOG_TAG, "response code: " + responseCode);
Log.d(LOG_TAG, "response headers: " + conn.getHeaderFields());
TrackingInputStream inStream = null;
try {
inStream = getInputStream(conn);
synchronized (context) {
if (context.aborted) {
return;
}
context.connection = conn;
}
ByteArrayOutputStream out = new ByteArrayOutputStream(Math.max(1024, conn.getContentLength()));
byte[] buffer = new byte[1024];
int bytesRead = 0;
// write bytes to file
while ((bytesRead = inStream.read(buffer)) > 0) {
out.write(buffer, 0, bytesRead);
}
responseString = out.toString("UTF-8");
} finally {
synchronized (context) {
context.connection = null;
}
safeClose(inStream);
}
Log.d(LOG_TAG, "got response from server");
Log.d(LOG_TAG, responseString.substring(0, Math.min(256, responseString.length())));
// send request and retrieve response
result.setResponseCode(responseCode);
result.setResponse(responseString);
context.sendPluginResult(new PluginResult(PluginResult.Status.OK, result.toJSONObject()));
} catch (FileNotFoundException e) {
JSONObject error = createFileTransferError(FILE_NOT_FOUND_ERR, source, target, conn, e);
Log.e(LOG_TAG, error.toString(), e);
context.sendPluginResult(new PluginResult(PluginResult.Status.IO_EXCEPTION, error));
} catch (IOException e) {
JSONObject error = createFileTransferError(CONNECTION_ERR, source, target, conn, e);
Log.e(LOG_TAG, error.toString(), e);
Log.e(LOG_TAG, "Failed after uploading " + totalBytes + " of " + fixedLength + " bytes.");
context.sendPluginResult(new PluginResult(PluginResult.Status.IO_EXCEPTION, error));
} catch (JSONException e) {
Log.e(LOG_TAG, e.getMessage(), e);
context.sendPluginResult(new PluginResult(PluginResult.Status.JSON_EXCEPTION));
} catch (Throwable t) {
// Shouldn't happen, but will
JSONObject error = createFileTransferError(CONNECTION_ERR, source, target, conn, t);
Log.e(LOG_TAG, error.toString(), t);
context.sendPluginResult(new PluginResult(PluginResult.Status.IO_EXCEPTION, error));
} finally {
synchronized (activeRequests) {
activeRequests.remove(objectId);
}
if (conn != null) {
// Revert back to the proper verifier and socket factories
// Revert back to the proper verifier and socket factories
if (trustEveryone && useHttps) {
HttpsURLConnection https = (HttpsURLConnection) conn;
https.setHostnameVerifier(oldHostnameVerifier);
https.setSSLSocketFactory(oldSocketFactory);
}
}
}
}
});
}
private static void safeClose(Closeable stream) {
if (stream != null) {
try {
stream.close();
} catch (IOException e) {
}
}
}
private static TrackingInputStream getInputStream(URLConnection conn) throws IOException {
String encoding = conn.getContentEncoding();
if (encoding != null && encoding.equalsIgnoreCase("gzip")) {
return new TrackingGZIPInputStream(new ExposedGZIPInputStream(conn.getInputStream()));
}
return new SimpleTrackingInputStream(conn.getInputStream());
}
// always verify the host - don't check for certificate
private static final HostnameVerifier DO_NOT_VERIFY = new HostnameVerifier() {
public boolean verify(String hostname, SSLSession session) {
return true;
}
};
// Create a trust manager that does not validate certificate chains
private static final TrustManager[] trustAllCerts = new TrustManager[] { new X509TrustManager() {
public java.security.cert.X509Certificate[] getAcceptedIssuers() {
return new java.security.cert.X509Certificate[] {};
}
public void checkClientTrusted(X509Certificate[] chain,
String authType) throws CertificateException {
}
public void checkServerTrusted(X509Certificate[] chain,
String authType) throws CertificateException {
}
} };
/**
* This function will install a trust manager that will blindly trust all SSL
* certificates. The reason this code is being added is to enable developers
* to do development using self signed SSL certificates on their web server.
*
* The standard HttpsURLConnection class will throw an exception on self
* signed certificates if this code is not run.
*/
private static SSLSocketFactory trustAllHosts(HttpsURLConnection connection) {
// Install the all-trusting trust manager
SSLSocketFactory oldFactory = connection.getSSLSocketFactory();
try {
// Install our all trusting manager
SSLContext sc = SSLContext.getInstance("TLS");
sc.init(null, trustAllCerts, new java.security.SecureRandom());
SSLSocketFactory newFactory = sc.getSocketFactory();
connection.setSSLSocketFactory(newFactory);
} catch (Exception e) {
Log.e(LOG_TAG, e.getMessage(), e);
}
return oldFactory;
}
private static JSONObject createFileTransferError(int errorCode, String source, String target, URLConnection connection, Throwable throwable) {
int httpStatus = 0;
StringBuilder bodyBuilder = new StringBuilder();
String body = null;
if (connection != null) {
try {
if (connection instanceof HttpURLConnection) {
httpStatus = ((HttpURLConnection)connection).getResponseCode();
InputStream err = ((HttpURLConnection) connection).getErrorStream();
if(err != null)
{
BufferedReader reader = new BufferedReader(new InputStreamReader(err, "UTF-8"));
try {
String line = reader.readLine();
while(line != null) {
bodyBuilder.append(line);
line = reader.readLine();
if(line != null) {
bodyBuilder.append('\n');
}
}
body = bodyBuilder.toString();
} finally {
reader.close();
}
}
}
// IOException can leave connection object in a bad state, so catch all exceptions.
} catch (Throwable e) {
Log.w(LOG_TAG, "Error getting HTTP status code from connection.", e);
}
}
return createFileTransferError(errorCode, source, target, body, httpStatus, throwable);
}
/**
* Create an error object based on the passed in errorCode
* @param errorCode the error
* @return JSONObject containing the error
*/
private static JSONObject createFileTransferError(int errorCode, String source, String target, String body, Integer httpStatus, Throwable throwable) {
JSONObject error = null;
try {
error = new JSONObject();
error.put("code", errorCode);
error.put("source", source);
error.put("target", target);
if(body != null)
{
error.put("body", body);
}
if (httpStatus != null) {
error.put("http_status", httpStatus);
}
if (throwable != null) {
String msg = throwable.getMessage();
if (msg == null || "".equals(msg)) {
msg = throwable.toString();
}
error.put("exception", msg);
}
} catch (JSONException e) {
Log.e(LOG_TAG, e.getMessage(), e);
}
return error;
}
/**
* Convenience method to read a parameter from the list of JSON args.
* @param args the args passed to the Plugin
* @param position the position to retrieve the arg from
* @param defaultString the default to be used if the arg does not exist
* @return String with the retrieved value
*/
private static String getArgument(JSONArray args, int position, String defaultString) {
String arg = defaultString;
if (args.length() > position) {
arg = args.optString(position);
if (arg == null || "null".equals(arg)) {
arg = defaultString;
}
}
return arg;
}
/**
* Downloads a file form a given URL and saves it to the specified directory.
*
* @param source URL of the server to receive the file
* @param target Full path of the file on the file system
*/
private void download(final String source, final String target, JSONArray args, CallbackContext callbackContext) throws JSONException {
Log.d(LOG_TAG, "download " + source + " to " + target);
final CordovaResourceApi resourceApi = webView.getResourceApi();
final boolean trustEveryone = args.optBoolean(2);
final String objectId = args.getString(3);
final JSONObject headers = args.optJSONObject(4);
final Uri sourceUri = resourceApi.remapUri(Uri.parse(source));
// Accept a path or a URI for the source.
Uri tmpTarget = Uri.parse(target);
final Uri targetUri = resourceApi.remapUri(
tmpTarget.getScheme() != null ? tmpTarget : Uri.fromFile(new File(target)));
int uriType = CordovaResourceApi.getUriType(sourceUri);
final boolean useHttps = uriType == CordovaResourceApi.URI_TYPE_HTTPS;
final boolean isLocalTransfer = !useHttps && uriType != CordovaResourceApi.URI_TYPE_HTTP;
if (uriType == CordovaResourceApi.URI_TYPE_UNKNOWN) {
JSONObject error = createFileTransferError(INVALID_URL_ERR, source, target, null, 0, null);
Log.e(LOG_TAG, "Unsupported URI: " + targetUri);
callbackContext.sendPluginResult(new PluginResult(PluginResult.Status.IO_EXCEPTION, error));
return;
}
// TODO: refactor to also allow resources & content:
/*
if (!isLocalTransfer) {
Log.w(LOG_TAG, "Source URL is not in white list: '" + source + "'");
JSONObject error = createFileTransferError(CONNECTION_ERR, source, target, null, 401, null);
callbackContext.sendPluginResult(new PluginResult(PluginResult.Status.IO_EXCEPTION, error));
return;
}
*/
final RequestContext context = new RequestContext(source, target, callbackContext);
synchronized (activeRequests) {
activeRequests.put(objectId, context);
}
cordova.getThreadPool().execute(new Runnable() {
public void run() {
if (context.aborted) {
return;
}
HttpURLConnection connection = null;
HostnameVerifier oldHostnameVerifier = null;
SSLSocketFactory oldSocketFactory = null;
File file = null;
PluginResult result = null;
TrackingInputStream inputStream = null;
boolean cached = false;
OutputStream outputStream = null;
try {
OpenForReadResult readResult = null;
file = resourceApi.mapUriToFile(targetUri);
context.targetFile = file;
Log.d(LOG_TAG, "Download file:" + sourceUri);
FileProgressBCSResult progress = new FileProgressBCSResult();
if (isLocalTransfer) {
readResult = resourceApi.openForRead(sourceUri);
if (readResult.length != -1) {
progress.setLengthComputable(true);
progress.setTotal(readResult.length);
}
inputStream = new SimpleTrackingInputStream(readResult.inputStream);
} else {
// connect to server
// Open a HTTP connection to the URL based on protocol
connection = resourceApi.createHttpConnection(sourceUri);
if (useHttps && trustEveryone) {
// Setup the HTTPS connection class to trust everyone
HttpsURLConnection https = (HttpsURLConnection)connection;
oldSocketFactory = trustAllHosts(https);
// Save the current hostnameVerifier
oldHostnameVerifier = https.getHostnameVerifier();
// Setup the connection not to verify hostnames
https.setHostnameVerifier(DO_NOT_VERIFY);
}
connection.setRequestMethod("GET");
// TODO: Make OkHttp use this CookieManager by default.
String cookie = mCookieManager.getCookie(sourceUri.toString());
if(cookie != null)
{
connection.setRequestProperty("cookie", cookie);
}
// This must be explicitly set for gzip progress tracking to work.
connection.setRequestProperty("Accept-Encoding", "gzip");
// Handle the other headers
if (headers != null) {
addHeadersToRequest(connection, headers);
}
connection.connect();
if (connection.getResponseCode() == HttpURLConnection.HTTP_NOT_MODIFIED) {
cached = true;
connection.disconnect();
Log.d(LOG_TAG, "Resource not modified: " + source);
JSONObject error = createFileTransferError(NOT_MODIFIED_ERR, source, target, connection, null);
result = new PluginResult(PluginResult.Status.ERROR, error);
} else {
if (connection.getContentEncoding() == null || connection.getContentEncoding().equalsIgnoreCase("gzip")) {
// Only trust content-length header if we understand
// the encoding -- identity or gzip
if (connection.getContentLength() != -1) {
progress.setLengthComputable(true);
progress.setTotal(connection.getContentLength());
}
}
inputStream = getInputStream(connection);
}
}
if (!cached) {
try {
synchronized (context) {
if (context.aborted) {
return;
}
context.connection = connection;
}
// write bytes to file
byte[] buffer = new byte[MAX_BUFFER_SIZE];
int bytesRead = 0;
outputStream = resourceApi.openOutputStream(targetUri);
while ((bytesRead = inputStream.read(buffer)) > 0) {
outputStream.write(buffer, 0, bytesRead);
// Send a progress event.
progress.setLoaded(inputStream.getTotalRawBytesRead());
PluginResult progressResult = new PluginResult(PluginResult.Status.OK, progress.toJSONObject());
progressResult.setKeepCallback(true);
context.sendPluginResult(progressResult);
}
} finally {
synchronized (context) {
context.connection = null;
}
safeClose(inputStream);
safeClose(outputStream);
}
Log.d(LOG_TAG, "Saved file: " + target);
// create FileEntry object
Class webViewClass = webView.getClass();
PluginManager pm = null;
try {
Method gpm = webViewClass.getMethod("getPluginManager");
pm = (PluginManager) gpm.invoke(webView);
} catch (NoSuchMethodException e) {
} catch (IllegalAccessException e) {
} catch (InvocationTargetException e) {
}
if (pm == null) {
try {
Field pmf = webViewClass.getField("pluginManager");
pm = (PluginManager)pmf.get(webView);
} catch (NoSuchFieldException e) {
} catch (IllegalAccessException e) {
}
}
file = resourceApi.mapUriToFile(targetUri);
context.targetFile = file;
FileUtils filePlugin = (FileUtils) pm.getPlugin("File");
if (filePlugin != null) {
JSONObject fileEntry = filePlugin.getEntryForFile(file);
if (fileEntry != null) {
result = new PluginResult(PluginResult.Status.OK, fileEntry);
} else {
JSONObject error = createFileTransferError(CONNECTION_ERR, source, target, connection, null);
Log.e(LOG_TAG, "File plugin cannot represent download path");
result = new PluginResult(PluginResult.Status.IO_EXCEPTION, error);
}
} else {
Log.e(LOG_TAG, "File plugin not found; cannot save downloaded file");
result = new PluginResult(PluginResult.Status.ERROR, "File plugin not found; cannot save downloaded file");
}
}
} catch (FileNotFoundException e) {
JSONObject error = createFileTransferError(FILE_NOT_FOUND_ERR, source, target, connection, e);
Log.e(LOG_TAG, error.toString(), e);
result = new PluginResult(PluginResult.Status.IO_EXCEPTION, error);
} catch (IOException e) {
JSONObject error = createFileTransferError(CONNECTION_ERR, source, target, connection, e);
Log.e(LOG_TAG, error.toString(), e);
result = new PluginResult(PluginResult.Status.IO_EXCEPTION, error);
} catch (JSONException e) {
Log.e(LOG_TAG, e.getMessage(), e);
result = new PluginResult(PluginResult.Status.JSON_EXCEPTION);
} catch (Throwable e) {
JSONObject error = createFileTransferError(CONNECTION_ERR, source, target, connection, e);
Log.e(LOG_TAG, error.toString(), e);
result = new PluginResult(PluginResult.Status.IO_EXCEPTION, error);
} finally {
synchronized (activeRequests) {
activeRequests.remove(objectId);
}
if (connection != null) {
// Revert back to the proper verifier and socket factories
if (trustEveryone && useHttps) {
HttpsURLConnection https = (HttpsURLConnection) connection;
https.setHostnameVerifier(oldHostnameVerifier);
https.setSSLSocketFactory(oldSocketFactory);
}
}
if (result == null) {
result = new PluginResult(PluginResult.Status.ERROR, createFileTransferError(CONNECTION_ERR, source, target, connection, null));
}
// Remove incomplete download.
if (!cached && result.getStatus() != PluginResult.Status.OK.ordinal() && file != null) {
file.delete();
}
context.sendPluginResult(result);
}
}
});
}
/**
* Abort an ongoing upload or download.
*/
private void abort(String objectId) {
final RequestContext context;
synchronized (activeRequests) {
context = activeRequests.remove(objectId);
}
if (context != null) {
// Closing the streams can block, so execute on a background thread.
cordova.getThreadPool().execute(new Runnable() {
public void run() {
synchronized (context) {
File file = context.targetFile;
if (file != null) {
file.delete();
}
// Trigger the abort callback immediately to minimize latency between it and abort() being called.
JSONObject error = createFileTransferError(ABORTED_ERR, context.source, context.target, null, -1, null);
context.sendPluginResult(new PluginResult(PluginResult.Status.ERROR, error));
context.aborted = true;
if (context.connection != null) {
context.connection.disconnect();
}
}
}
});
}
}
}
| |
/*
* MSWDCoordinates.java
*
* Copyright 2006-2015 James F. Bowring and www.Earth-Time.org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.earthtime.dataDictionaries;
/**
*
* @author James F. Bowring
*/
public class MSWDCoordinates {
// produced by Noah McClean for the ...
/**
*
*/
public final static double[][] valuesByPointCount = new double[][]{
{0, 0, 0, 0, 0, 0},
{1, 0, 0, 0, 0, 0},
{2, 0, 0, 2.995732274, 6.907755279, 0.75},
{3, 0, 0.0010541, 2.60525, 5.422078732, 0.725912174},
{4, 0, 0.021182, 2.382574589, 4.616706738, 0.735758882},
{5, 0, 0.059248, 2.238288991, 4.10300113, 0.770901649},
{6, 0, 0.101167, 2.133739487, 3.742957414, 0.812011699},
{7, 0, 0.141319, 2.052654134, 3.474555193, 0.854290649},
{8, 0, 0.178126, 1.987069967, 3.265560195, 0.896167231},
{9, 0, 0.211399, 1.932471747, 3.097462763, 0.937077973},
{10, 0, 0.241393, 1.88603946, 2.958829845, 0.976834074},
{11, 0, 0.268475, 1.845900814, 2.842193965, 1.015399783},
{12, 0, 0.293014, 1.810738905, 2.742457534, 1.052804219},
{13, 0, 0.315342, 1.779599044, 2.656013767, 1.089103221},
{14, 0, 0.335748, 1.751762345, 2.580233834, 1.124361987},
{15, 0, 0.354476, 1.726684904, 2.513153215, 1.158646987},
{16, 0, 0.371733, 1.703940639, 2.453272174, 1.192022237},
{17, 0, 0.387697, 1.683183648, 2.399424512, 1.224547711},
{18, 0, 0.402515, 1.664146228, 2.350688685, 1.256278788},
{19, 0, 0.416316, 1.646599909, 2.306326103, 1.287266229},
{20, 0, 0.429209, 1.630359852, 2.265737331, 1.3175564},
{21, 0.306984598, 0.441286, 1.615276068, 2.228430383, 1.347191594},
{22, 0.317407656, 0.452631, 1.601211574, 2.193997377, 1.376210393},
{23, 0.327358251, 0.463312, 1.588061878, 2.162097064, 1.40464803},
{24, 0.336870066, 0.473391, 1.575731346, 2.132441574, 1.432536723},
{25, 0.345973745, 0.482923, 1.564136294, 2.104786231, 1.459905984},
{26, 0.354697186, 0.491954, 1.55320959, 2.07892163, 1.486782902},
{27, 0.363065812, 0.500528, 1.542885505, 2.054667415, 1.513192388},
{28, 0.371102826, 0.50868, 1.533115153, 2.031867335, 1.539157399},
{29, 0.378829431, 0.516444, 1.523849471, 2.010385293, 1.564699138},
{30, 0.386265035, 0.523851, 1.515043921, 1.990102143, 1.589837219},
{31, 0.393427431, 0.530926, 1.506664707, 1.970913099, 1.61458983},
{32, 0.400332956, 0.537693, 1.498678584, 1.952725596, 1.638973867},
{33, 0.406996637, 0.544175, 1.491053181, 1.935457531, 1.66300505},
{34, 0.413432317, 0.550389, 1.483766942, 1.919035808, 1.686698038},
{35, 0.41965277, 0.556356, 1.476789573, 1.90339511, 1.710066519},
{36, 0.4256698, 0.562089, 1.470105527, 1.888476879, 1.7331233},
{37, 0.431494332, 0.567605, 1.463690835, 1.874228446, 1.755880379},
{38, 0.437136496, 0.572917, 1.457528592, 1.8606023, 1.778349013},
{39, 0.442605692, 0.578036, 1.451605064, 1.84755546, 1.800539784},
{40, 0.447910663, 0.582974, 1.445904148, 1.835048938, 1.822462649},
{41, 0.453059546, 0.587742, 1.440411216, 1.823047278, 1.844126992},
{42, 0.458059925, 0.592349, 1.435114932, 1.81151816, 1.865541665},
{43, 0.462918883, 0.596805, 1.430001825, 1.800432052, 1.886715036},
{44, 0.467643035, 0.601116, 1.425065322, 1.789761914, 1.907655015},
{45, 0.472238575, 0.605291, 1.420293185, 1.779482934, 1.928369098},
{46, 0.476711304, 0.609337, 1.415676534, 1.769572297, 1.948864388},
{47, 0.481066663, 0.61326, 1.411208027, 1.76000899, 1.969147629},
{48, 0.485309762, 0.617067, 1.406878383, 1.750773619, 1.989225225},
{49, 0.489445403, 0.620763, 1.402681641, 1.741848257, 2.00910327},
{50, 0.493478105, 0.624353, 1.398611663, 1.733216304, 2.028787563},
{51, 0.497412127, 0.627842, 1.394662164, 1.724862362, 2.048283629},
{52, 0.501251482, 0.631235, 1.390826736, 1.716772131, 2.067596738},
{53, 0.504999959, 0.634537, 1.387098876, 1.708932308, 2.08673192},
{54, 0.508661136, 0.637751, 1.383475354, 1.701330498, 2.105693978},
{55, 0.512238398, 0.640881, 1.379951141, 1.693955141, 2.124487507},
{56, 0.515734946, 0.643931, 1.376521129, 1.68679544, 2.143116901},
{57, 0.519153813, 0.646903, 1.373183452, 1.679841295, 2.161586369},
{58, 0.522497876, 0.649803, 1.369929546, 1.673083251, 2.179899942},
{59, 0.525769862, 0.652631, 1.366760696, 1.666512443, 2.198061487},
{60, 0.52897236, 0.655392, 1.363669882, 1.660120551, 2.216074714},
{61, 0.532107831, 0.658087, 1.360656588, 1.653899759, 2.233943185},
{62, 0.535178616, 0.66072, 1.357715345, 1.647842715, 2.251670321},
{63, 0.538186941, 0.663292, 1.354845513, 1.641942497, 2.269259412},
{64, 0.541134925, 0.665807, 1.352041554, 1.636192582, 2.286713625},
{65, 0.544024589, 0.668265, 1.34930433, 1.630586817, 2.304036004},
{66, 0.546857859, 0.67067, 1.346628238, 1.625119391, 2.321229483},
{67, 0.549636573, 0.673023, 1.344012453, 1.619784813, 2.338296889},
{68, 0.552362485, 0.675326, 1.341454509, 1.614577889, 2.355240948},
{69, 0.555037271, 0.67758, 1.338953494, 1.609493704, 2.372064287},
{70, 0.557662534, 0.679788, 1.336505306, 1.604527598, 2.388769444},
{71, 0.560239806, 0.681951, 1.334108979, 1.599675154, 2.405358868},
{72, 0.562770552, 0.68407, 1.33176351, 1.594932182, 2.421834926},
{73, 0.565256177, 0.686148, 1.32946475, 1.5902947, 2.438199905},
{74, 0.567698025, 0.688184, 1.327214765, 1.585758927, 2.454456017},
{75, 0.570097386, 0.690181, 1.32500937, 1.581321264, 2.470605399},
{76, 0.572455496, 0.69214, 1.322847475, 1.576978288, 2.486650123},
{77, 0.574773541, 0.694063, 1.320726433, 1.572726739, 2.502592193},
{78, 0.57705266, 0.695949, 1.318648189, 1.56856351, 2.51843355},
{79, 0.579293945, 0.697801, 1.316608527, 1.564485639, 2.534176075},
{80, 0.581498449, 0.699619, 1.314607817, 1.5604903, 2.549821591},
{81, 0.583667181, 0.701404, 1.31264488, 1.556574794, 2.565371867},
{82, 0.585801111, 0.703158, 1.310717008, 1.552736544, 2.580828616},
{83, 0.587901176, 0.704881, 1.308824522, 1.548973085, 2.596193505},
{84, 0.589968274, 0.706574, 1.306966208, 1.545282062, 2.611468147},
{85, 0.592003271, 0.708239, 1.30513934, 1.541661221, 2.626654113},
{86, 0.594007002, 0.709875, 1.303345699, 1.538108402, 2.641752926},
{87, 0.595980272, 0.711483, 1.301584038, 1.534621538, 2.656766067},
{88, 0.597923856, 0.713065, 1.299851611, 1.531198648, 2.671694976},
{89, 0.599838501, 0.714621, 1.298148658, 1.527837832, 2.686541052},
{90, 0.601724928, 0.716152, 1.296473918, 1.524537268, 2.701305657},
{91, 0.603583836, 0.717659, 1.294826125, 1.521295206, 2.715990116},
{92, 0.605415895, 0.719141, 1.293206969, 1.518109968, 2.730595718},
{93, 0.607221755, 0.720601, 1.291612207, 1.514979938, 2.745123717},
{94, 0.609002045, 0.722037, 1.290044985, 1.511903568, 2.759575337},
{95, 0.610757369, 0.723452, 1.288501061, 1.508879366, 2.773951768},
{96, 0.612488316, 0.724845, 1.286982086, 1.505905898, 2.788254169},
{97, 0.614195451, 0.726217, 1.285486755, 1.502981784, 2.80248367},
{98, 0.615879325, 0.727569, 1.284013767, 1.500105697, 2.816641375},
{99, 0.617540467, 0.728901, 1.282563278, 1.497276355, 2.830728358},
{100, 0.619179392, 0.730213, 1.281135435, 1.494492528, 2.844745665},
{101, 0.620796598, 0.731506, 1.27972892, 1.491753026, 2.858694321},
{102, 0.622392568, 0.732781, 1.278342419, 1.489056706, 2.872575323},
{103, 0.623967768, 0.734038, 1.276976067, 1.486402461, 2.886389644},
{104, 0.625522652, 0.735277, 1.275629989, 1.483789226, 2.900138235},
{105, 0.627057659, 0.736498, 1.274304302, 1.481215973, 2.913822025},
{106, 0.628573214, 0.737703, 1.272996238, 1.478681708, 2.92744192},
{107, 0.630069731, 0.738892, 1.271705915, 1.476185471, 2.940998806},
{108, 0.631547611, 0.740064, 1.270434877, 1.473726336, 2.954493549},
{109, 0.633007242, 0.741221, 1.269180358, 1.471303407, 2.967926994},
{110, 0.634449003, 0.742362, 1.267943892, 1.468915817, 2.981299969},
{111, 0.63587326, 0.743488, 1.266724143, 1.466562729, 2.994613282},
{112, 0.63728037, 0.744599, 1.265521204, 1.464243332, 3.007867723},
{113, 0.638670678, 0.745696, 1.26433374, 1.461956842, 3.021064067},
{114, 0.640044521, 0.746779, 1.26316184, 1.459702501, 3.034203069},
{115, 0.641402226, 0.747849, 1.26200417, 1.457479573, 3.047285471},
{116, 0.642744111, 0.748904, 1.260863646, 1.455287346, 3.060311996},
{117, 0.644070485, 0.749947, 1.259736095, 1.45312513, 3.073283354},
{118, 0.645381648, 0.750976, 1.258624423, 1.450992258, 3.086200239},
{119, 0.646677892, 0.751993, 1.257525876, 1.448888081, 3.099063331},
{120, 0.647959504, 0.752997, 1.256441937, 1.44681197, 3.111873297},
{121, 0.649226758, 0.753989, 1.255371264, 1.444763318, 3.124630788},
{122, 0.650479926, 0.75497, 1.25431252, 1.442741532, 3.137336444},
{123, 0.65171927, 0.7563, 1.25276148, 1.440746039, 3.149990891},
{124, 0.652945045, 0.756895, 1.252236687, 1.438776283, 3.162594743},
{125, 0.654157501, 0.757841, 1.251216915, 1.436831722, 3.1751486},
{126, 0.655356881, 0.758776, 1.250209319, 1.434911832, 3.187653054},
{127, 0.65654342, 0.7597, 1.249213954, 1.433016104, 3.200108682},
{128, 0.657717349, 0.760614, 1.248229481, 1.431144042, 3.212516051},
{129, 0.658878894, 0.761517, 1.247257345, 1.429295164, 3.224875718},
{130, 0.660028273, 0.76241, 1.246296206, 1.427469003, 3.237188228},
{131, 0.6611657, 0.763293, 1.245346113, 1.425665104, 3.249454117},
{132, 0.662291384, 0.764166, 1.244407114, 1.423883025, 3.261673909},
{133, 0.663405527, 0.765029, 1.243479254, 1.422122335, 3.273848121},
{134, 0.664508329, 0.765883, 1.242561194, 1.420382617, 3.285977258},
{135, 0.665599982, 0.766728, 1.241652981, 1.418663463, 3.298061816},
{136, 0.666680677, 0.767563, 1.240756035, 1.416964476, 3.310102284},
{137, 0.667750597, 0.76839, 1.239867641, 1.415285271, 3.32209914},
{138, 0.668809923, 0.769208, 1.238989218, 1.413625472, 3.334052855},
{139, 0.66985883, 0.770017, 1.238120803, 1.411984714, 3.345963889},
{140, 0.670897491, 0.770817, 1.237262431, 1.410362641, 3.357832698},
{141, 0.671926073, 0.77161, 1.236411398, 1.408758904, 3.369659726},
{142, 0.672944741, 0.772394, 1.235570481, 1.407173167, 3.381445412},
{143, 0.673953654, 0.77317, 1.234738344, 1.405605099, 3.393190185},
{144, 0.67495297, 0.773938, 1.23391502, 1.404054379, 3.404894469},
{145, 0.675942842, 0.774698, 1.233100542, 1.402520694, 3.416558679},
{146, 0.676923419, 0.775451, 1.232293579, 1.401003739, 3.428183224},
{147, 0.677894848, 0.776196, 1.231495524, 1.399503215, 3.439768505},
{148, 0.678857271, 0.776934, 1.230705045, 1.398018833, 3.451314918},
{149, 0.67981083, 0.777664, 1.229923533, 1.396550308, 3.462822851},
{150, 0.68075566, 0.778387, 1.229149654, 1.395097365, 3.474292685},
{151, 0.681691897, 0.779103, 1.228383438, 1.393659733, 3.485724797},
{152, 0.682619671, 0.779813, 1.227623558, 1.392237149, 3.497119556},
{153, 0.68353911, 0.780515, 1.226872748, 1.390829356, 3.508477326},
{154, 0.68445034, 0.781211, 1.226128328, 1.389436101, 3.519798465},
{155, 0.685353485, 0.7819, 1.225391673, 1.388057142, 3.531083323},
{156, 0.686248663, 0.782583, 1.224661459, 1.386692236, 3.542332249},
{157, 0.687135994, 0.783259, 1.223939058, 1.385341152, 3.553545582},
{158, 0.688015593, 0.783929, 1.223223144, 1.384003659, 3.564723658},
{159, 0.688887572, 0.784593, 1.222513742, 1.382679535, 3.575866809},
{160, 0.689752043, 0.78525, 1.221812217, 1.381368562, 3.586975357},
{161, 0.690609114, 0.785902, 1.221115902, 1.380070525, 3.598049625},
{162, 0.691458891, 0.786547, 1.220427504, 1.378785217, 3.609089927},
{163, 0.692301479, 0.787187, 1.21974436, 1.377512432, 3.620096574},
{164, 0.693136979, 0.787821, 1.219067831, 1.376251973, 3.631069872},
{165, 0.693965491, 0.78845, 1.218396599, 1.375003643, 3.642010121},
{166, 0.694787114, 0.789073, 1.217732019, 1.373767252, 3.652917619},
{167, 0.695601943, 0.78969, 1.21707411, 1.372542614, 3.663792657},
{168, 0.696410073, 0.790302, 1.216421553, 1.371329545, 3.674635524},
{169, 0.697211597, 0.790909, 1.215774367, 1.370127866, 3.685446503},
{170, 0.698006604, 0.79151, 1.215133902, 1.368937404, 3.696225874},
{171, 0.698795185, 0.792107, 1.214497511, 1.367757986, 3.706973913},
{172, 0.699577425, 0.792697, 1.213869202, 1.366589446, 3.71769089},
{173, 0.70035341, 0.793283, 1.213245001, 1.365431619, 3.728377075},
{174, 0.701123225, 0.793864, 1.212626254, 1.364284344, 3.739032729},
{175, 0.701886952, 0.79444, 1.212012974, 1.363147464, 3.749658114},
{176, 0.702644671, 0.795012, 1.211403853, 1.362020826, 3.760253487},
{177, 0.703396462, 0.795578, 1.210801554, 1.360904277, 3.770819098},
{178, 0.704142402, 0.79614, 1.210203443, 1.359797671, 3.781355199},
{179, 0.704882569, 0.796697, 1.209610857, 1.358700862, 3.791862035},
{180, 0.705617036, 0.79725, 1.209022487, 1.357613708, 3.802339848},
{181, 0.706345879, 0.797798, 1.208439669, 1.356536071, 3.812788877},
{182, 0.707069167, 0.798342, 1.207861095, 1.355467814, 3.823209358},
{183, 0.707786974, 0.798881, 1.207288097, 1.354408804, 3.833601524},
{184, 0.708499369, 0.799416, 1.206719368, 1.353358908, 3.843965605},
{185, 0.709206419, 0.799947, 1.206154923, 1.352318, 3.854301826},
{186, 0.709908193, 0.800474, 1.205594772, 1.351285953, 3.864610412},
{187, 0.710604756, 0.800996, 1.205040242, 1.350262643, 3.874891581},
{188, 0.711296174, 0.801514, 1.20449003, 1.34924795, 3.885145553},
{189, 0.71198251, 0.802029, 1.203942835, 1.348241755, 3.895372542},
{190, 0.712663826, 0.802539, 1.203401292, 1.34724394, 3.905572759},
{191, 0.713340185, 0.803045, 1.202864099, 1.346254393, 3.915746414},
{192, 0.714011646, 0.803548, 1.202329958, 1.345273, 3.925893712},
{193, 0.71467827, 0.804046, 1.201801497, 1.344299652, 3.936014859},
{194, 0.715340115, 0.804541, 1.201276109, 1.34333424, 3.946110054},
{195, 0.715997238, 0.805032, 1.200755112, 1.342376659, 3.956179496},
{196, 0.716649697, 0.805519, 1.200238515, 1.341426805, 3.966223382},
{197, 0.717297545, 0.806003, 1.199725022, 1.340484575, 3.976241904},
{198, 0.717940839, 0.806483, 1.199215947, 1.33954987, 3.986235255},
{199, 0.718579631, 0.80696, 1.198709997, 1.33862259, 3.996203623},
{200, 0.719213975, 0.807433, 1.198208482, 1.337702639, 4.006147193}};
}
| |
package com.voidedtech.notifytosms;
import android.content.Context;
import android.content.SharedPreferences;
import android.preference.PreferenceManager;
import android.telephony.TelephonyManager;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
/**
* App settings
*/
class Settings {
/**
* Filters to apply
*/
private final List<String> filters;
/**
* App is enabled
*/
private boolean enabled;
/**
* Indicates whether to notify when the screen is on
*/
private boolean notifyOn;
/**
* Indicates if the message should always be prepared for sending
*/
private boolean sendAlways;
/**
* Indicates if the date should be included in email subjects
*/
private boolean includeDateInSubject;
/**
* Message type to send
*/
private MessageFactory.Types type = MessageFactory.Types.SMS;
/**
* SMTP port
*/
private String smtpPort = "";
/**
* SMTP server
*/
private String smtpServer = "";
/**
* Email user
*/
private String userName = "";
/**
* Email password
*/
private String password = "";
/**
* Phone number
*/
private String phone = "";
/**
* Delay value
*/
private String delayValue = "";
public Settings(){
this(new ArrayList<String>(), null);
}
public Settings(List<String> filter, Context context){
if (filter == null){
this.filters = new ArrayList<>();
} else {
this.filters = filter;
}
if (context != null){
this.phone = ((TelephonyManager) context.getSystemService(Context.TELEPHONY_SERVICE)).getLine1Number();
SharedPreferences pref = PreferenceManager.getDefaultSharedPreferences(context);
for(Map.Entry<String,?> entry : pref.getAll().entrySet()){
String key = entry.getKey();
Logger.debug(Settings.class, key);
switch (key){
case SettingsActivity.ENABLED_ITEM:
this.enabled = pref.getBoolean(key, false);
break;
case SettingsActivity.NOTIFY_ITEM:
this.notifyOn = pref.getBoolean(key, false);
break;
case SettingsActivity.PASS_TEXT:
this.password = pref.getString(key, "");
break;
case SettingsActivity.USER_TEXT:
this.userName = pref.getString(key, "");
break;
case SettingsActivity.SMTP_TEXT:
this.smtpServer = pref.getString(key, "");
break;
case SettingsActivity.PORT_TEXT:
this.smtpPort = pref.getString(key, "");
break;
case SettingsActivity.METHOD_ITEM:
this.type = MessageFactory.from(pref.getString(key, "1"));
break;
case SettingsActivity.ALWAYS_SEND:
this.sendAlways = pref.getBoolean(key, false);
break;
case SettingsActivity.INCLUDE_DATE:
this.includeDateInSubject = pref.getBoolean(key, false);
break;
case SettingsActivity.DELAY_ITEM:
this.delayValue = pref.getString(key, Delay.DEFAULT_VALUE);
break;
}
}
}else{
this.enabled = false;
}
}
/**
* Gets the SMPT port
* @return port number
*/
public String getSmtpPort(){
return this.smtpPort;
}
/**
* Gets the SMTP server
* @return server to use
*/
public String getSmtpServer(){
return this.smtpServer;
}
/**
* Get the phone number
* @return phone number
*/
public String getPhone(){
return this.phone;
}
/**
* Get the user name
* @return email user name
*/
public String getUserName(){
return this.userName;
}
/**
* Get the user's password
* @return user password
*/
public String getPassword(){
return this.password;
}
/**
* Gets whether to display a notification when the screen is on
* @return true if display should happen
*/
public boolean isNotifyPower(){
return this.notifyOn;
}
/**
* Get the configured messaging type
* @return messaging type
*/
public MessageFactory.Types getType(){
return this.type;
}
/**
* Gets whether the notification service should be enabled
* @return true if enabled
*/
public boolean isEnabled(){
return this.enabled;
}
/**
* Gets a value indicating whether to always send the message
* @return true if the message should always be prepared for sending
*/
public boolean isSendAlways(){
return this.sendAlways;
}
/**
* Indicates if the date should be included in emails
* @return true if it should be included
*/
public boolean isIncludeDateInSubject(){
return this.includeDateInSubject;
}
/**
* Filters to use
* @return filter set
*/
public List<String> getFilters(){
return this.filters;
}
/**
* Get the delay setting
* @return delay setting
*/
public String getDelayValue(){
return this.delayValue;
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.pinot.common.utils;
import java.io.File;
import java.io.IOException;
import java.net.InetSocketAddress;
import java.util.concurrent.TimeUnit;
import org.I0Itec.zkclient.ZkClient;
import org.apache.zookeeper.server.ServerConfig;
import org.apache.zookeeper.server.ZooKeeperServerMain;
import org.apache.zookeeper.server.admin.AdminServer;
import org.apache.zookeeper.server.quorum.QuorumPeerConfig;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class ZkStarter {
private static final Logger LOGGER = LoggerFactory.getLogger(ZkStarter.class);
public static final int DEFAULT_ZK_TEST_PORT = 2191;
public static final String DEFAULT_ZK_STR = "localhost:" + DEFAULT_ZK_TEST_PORT;
private static final int DEFAULT_ZK_CLIENT_RETRIES = 10;
public static class ZookeeperInstance {
private PublicZooKeeperServerMain _serverMain;
private String _dataDirPath;
private ZookeeperInstance(PublicZooKeeperServerMain serverMain, String dataDirPath) {
_serverMain = serverMain;
_dataDirPath = dataDirPath;
}
}
/**
* Silly class to make protected methods public.
*/
static class PublicZooKeeperServerMain extends ZooKeeperServerMain {
@Override
public void initializeAndRun(String[] args)
throws QuorumPeerConfig.ConfigException, IOException, AdminServer.AdminServerException {
// org.apache.log4j.jmx.* is not compatible under log4j-1.2-api, which provides the backward compatibility for
// log4j 1.* api for log4j2. In order to avoid 'class not found error', the following line disables log4j jmx
// bean registration for local zookeeper instance
System.setProperty("zookeeper.jmx.log4j.disable", "true");
System.setProperty("zookeeper.admin.enableServer", "false");
super.initializeAndRun(args);
}
@Override
public void runFromConfig(final ServerConfig config)
throws IOException, AdminServer.AdminServerException {
ServerConfig newServerConfig = new ServerConfig() {
public void parse(String[] args) {
config.parse(args);
}
public void parse(String path)
throws QuorumPeerConfig.ConfigException {
config.parse(path);
}
public void readFrom(QuorumPeerConfig otherConfig) {
config.readFrom(otherConfig);
}
public InetSocketAddress getClientPortAddress() {
return config.getClientPortAddress();
}
public File getDataDir() {
return config.getDataDir();
}
public File getDataLogDir() {
return config.getDataLogDir();
}
public int getTickTime() {
return config.getTickTime();
}
public int getMaxClientCnxns() {
dataDir = getDataDir();
dataLogDir = getDataLogDir();
tickTime = getTickTime();
minSessionTimeout = getMinSessionTimeout();
maxSessionTimeout = getMaxSessionTimeout();
maxClientCnxns = 0;
return 0;
}
public int getMinSessionTimeout() {
return config.getMinSessionTimeout();
}
public int getMaxSessionTimeout() {
return config.getMaxSessionTimeout();
}
};
newServerConfig.getMaxClientCnxns();
super.runFromConfig(newServerConfig);
}
@Override
public void shutdown() {
super.shutdown();
}
}
/**
* Starts an empty local Zk instance on the default port
*/
public static ZookeeperInstance startLocalZkServer() {
return startLocalZkServer(DEFAULT_ZK_TEST_PORT);
}
/**
* Starts a local Zk instance with a generated empty data directory
* @param port The port to listen on
*/
public static ZookeeperInstance startLocalZkServer(final int port) {
return startLocalZkServer(port,
org.apache.commons.io.FileUtils.getTempDirectoryPath() + File.separator + "test-" + System.currentTimeMillis());
}
/**
* Starts a local Zk instance
* @param port The port to listen on
* @param dataDirPath The path for the Zk data directory
*/
public synchronized static ZookeeperInstance startLocalZkServer(final int port, final String dataDirPath) {
// Start the local ZK server
try {
final PublicZooKeeperServerMain zookeeperServerMain = new PublicZooKeeperServerMain();
final String[] args = new String[]{Integer.toString(port), dataDirPath};
new Thread() {
@Override
public void run() {
try {
zookeeperServerMain.initializeAndRun(args);
} catch (Exception e) {
LOGGER.warn("Caught exception while starting ZK", e);
}
}
}.start();
// Wait until the ZK server is started
for (int retry = 0; retry < DEFAULT_ZK_CLIENT_RETRIES; retry++) {
try {
Thread.sleep(1000L);
ZkClient client = new ZkClient("localhost:" + port, 1000 * (DEFAULT_ZK_CLIENT_RETRIES - retry));
client.waitUntilConnected(DEFAULT_ZK_CLIENT_RETRIES - retry, TimeUnit.SECONDS);
client.close();
break;
} catch (Exception e) {
if (retry < DEFAULT_ZK_CLIENT_RETRIES - 1) {
LOGGER.warn("Failed to connect to zk server, retry: {}", retry, e);
} else {
LOGGER.warn("Failed to connect to zk server.", e);
throw e;
}
}
}
return new ZookeeperInstance(zookeeperServerMain, dataDirPath);
} catch (Exception e) {
LOGGER.warn("Caught exception while starting ZK", e);
throw new RuntimeException(e);
}
}
/**
* Stops a local Zk instance, deleting its data directory
*/
public static void stopLocalZkServer(final ZookeeperInstance instance) {
stopLocalZkServer(instance, true);
}
/**
* Stops a local Zk instance.
* @param deleteDataDir Whether or not to delete the data directory
*/
public synchronized static void stopLocalZkServer(final ZookeeperInstance instance, final boolean deleteDataDir) {
if (instance._serverMain != null) {
try {
// Shut down ZK
instance._serverMain.shutdown();
instance._serverMain = null;
// Delete the data dir
if (deleteDataDir) {
org.apache.commons.io.FileUtils.deleteDirectory(new File(instance._dataDirPath));
}
} catch (Exception e) {
LOGGER.warn("Caught exception while stopping ZK server", e);
throw new RuntimeException(e);
}
}
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.