gt stringclasses 1
value | context stringlengths 2.05k 161k |
|---|---|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.spi.discovery.tcp;
import java.util.HashMap;
import java.util.Map;
import java.util.UUID;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.Callable;
import java.util.concurrent.CyclicBarrier;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.ThreadLocalRandom;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicReference;
import javax.cache.Cache;
import javax.cache.event.CacheEntryEvent;
import javax.cache.event.CacheEntryUpdatedListener;
import org.apache.ignite.Ignite;
import org.apache.ignite.IgniteCache;
import org.apache.ignite.IgniteCheckedException;
import org.apache.ignite.IgniteClientDisconnectedException;
import org.apache.ignite.IgniteException;
import org.apache.ignite.cache.query.ContinuousQuery;
import org.apache.ignite.cache.query.QueryCursor;
import org.apache.ignite.configuration.CacheConfiguration;
import org.apache.ignite.configuration.IgniteConfiguration;
import org.apache.ignite.events.DiscoveryEvent;
import org.apache.ignite.events.Event;
import org.apache.ignite.internal.IgniteClientDisconnectedCheckedException;
import org.apache.ignite.internal.IgniteInternalFuture;
import org.apache.ignite.internal.IgniteKernal;
import org.apache.ignite.internal.client.util.GridConcurrentHashSet;
import org.apache.ignite.internal.cluster.ClusterTopologyCheckedException;
import org.apache.ignite.internal.util.typedef.G;
import org.apache.ignite.internal.util.typedef.X;
import org.apache.ignite.internal.util.typedef.internal.U;
import org.apache.ignite.lang.IgniteInClosure;
import org.apache.ignite.lang.IgnitePredicate;
import org.apache.ignite.spi.IgniteSpiException;
import org.apache.ignite.spi.communication.tcp.TcpCommunicationSpi;
import org.apache.ignite.testframework.GridTestUtils;
import org.apache.ignite.testframework.GridTestUtils.SF;
import org.apache.ignite.testframework.junits.common.GridCommonAbstractTest;
import org.junit.Ignore;
import org.junit.Test;
import static org.apache.ignite.events.EventType.EVT_JOB_MAPPED;
import static org.apache.ignite.events.EventType.EVT_NODE_FAILED;
import static org.apache.ignite.events.EventType.EVT_NODE_LEFT;
import static org.apache.ignite.events.EventType.EVT_TASK_FAILED;
import static org.apache.ignite.events.EventType.EVT_TASK_FINISHED;
/**
* Test for {@link TcpDiscoverySpi}.
*/
public class TcpDiscoveryMultiThreadedTest extends GridCommonAbstractTest {
/** */
private static final int GRID_CNT = 5;
/** */
private static final int CLIENT_GRID_CNT = 5;
/** */
private static final ThreadLocal<Boolean> clientFlagPerThread = new ThreadLocal<>();
/** */
private static final ThreadLocal<UUID> nodeId = new ThreadLocal<>();
/** */
private static volatile boolean clientFlagGlobal;
/** */
private static GridConcurrentHashSet<UUID> failedNodes = new GridConcurrentHashSet<>();
/**
* @return Client node flag.
*/
private static boolean client() {
Boolean client = clientFlagPerThread.get();
return client != null ? client : clientFlagGlobal;
}
/**
* @throws Exception If fails.
*/
public TcpDiscoveryMultiThreadedTest() throws Exception {
super(false);
}
/** {@inheritDoc} */
@Override protected IgniteConfiguration getConfiguration(String igniteInstanceName) throws Exception {
IgniteConfiguration cfg = super.getConfiguration(igniteInstanceName);
cfg.setConsistentId(igniteInstanceName);
UUID id = nodeId.get();
if (id != null) {
cfg.setNodeId(id);
nodeId.set(null);
}
if (client())
cfg.setClientMode(true);
((TcpDiscoverySpi)cfg.getDiscoverySpi()).setJoinTimeout(60_000);
((TcpDiscoverySpi)cfg.getDiscoverySpi()).setNetworkTimeout(10_000);
int[] evts = {EVT_NODE_FAILED, EVT_NODE_LEFT};
Map<IgnitePredicate<? extends Event>, int[]> lsnrs = new HashMap<>();
lsnrs.put(new IgnitePredicate<Event>() {
@Override public boolean apply(Event evt) {
DiscoveryEvent discoveryEvt = (DiscoveryEvent)evt;
failedNodes.add(discoveryEvt.eventNode().id());
return true;
}
}, evts);
cfg.setLocalEventListeners(lsnrs);
cfg.setCacheConfiguration();
cfg.setIncludeEventTypes(EVT_TASK_FAILED, EVT_TASK_FINISHED, EVT_JOB_MAPPED);
cfg.setIncludeProperties();
((TcpCommunicationSpi)cfg.getCommunicationSpi()).setSharedMemoryPort(-1);
return cfg;
}
/** {@inheritDoc} */
@Override protected void afterTest() throws Exception {
stopAllGrids();
super.afterTest();
failedNodes.clear();
}
/** {@inheritDoc} */
@Override protected long getTestTimeout() {
return 5 * 60 * 1000;
}
/**
* @throws Exception If any error occurs.
*/
@Test
public void testMultiThreadedClientsRestart() throws Exception {
final AtomicBoolean done = new AtomicBoolean();
try {
clientFlagGlobal = false;
info("Test timeout: " + (getTestTimeout() / (60 * 1000)) + " min.");
startGridsMultiThreaded(GRID_CNT);
clientFlagGlobal = true;
startGridsMultiThreaded(GRID_CNT, CLIENT_GRID_CNT);
final AtomicInteger clientIdx = new AtomicInteger(GRID_CNT);
IgniteInternalFuture<?> fut1 = multithreadedAsync(
new Callable<Object>() {
@Override public Object call() throws Exception {
clientFlagPerThread.set(true);
int idx = clientIdx.getAndIncrement();
while (!done.get()) {
stopGrid(idx, true);
startGrid(idx);
}
return null;
}
},
CLIENT_GRID_CNT,
"client-restart");
Thread.sleep(SF.applyLB(10_000, 30_000));
done.set(true);
fut1.get();
}
finally {
done.set(true);
}
}
/**
* @throws Exception If any error occurs.
*/
@Ignore("https://issues.apache.org/jira/browse/IGNITE-1123")
@Test
public void testMultiThreadedClientsServersRestart() throws Throwable {
multiThreadedClientsServersRestart(GRID_CNT, CLIENT_GRID_CNT);
}
/**
* @throws Exception If any error occurs.
*/
@Ignore("https://issues.apache.org/jira/browse/IGNITE-1123")
@Test
public void testMultiThreadedServersRestart() throws Throwable {
multiThreadedClientsServersRestart(GRID_CNT * 2, 0);
}
/**
* @param srvs Number of servers.
* @param clients Number of clients.
* @throws Exception If any error occurs.
*/
private void multiThreadedClientsServersRestart(int srvs, int clients) throws Throwable {
final AtomicBoolean done = new AtomicBoolean();
try {
clientFlagGlobal = false;
info("Test timeout: " + (getTestTimeout() / (60 * 1000)) + " min.");
startGridsMultiThreaded(srvs);
IgniteInternalFuture<?> clientFut = null;
final AtomicReference<Throwable> error = new AtomicReference<>();
if (clients > 0) {
clientFlagGlobal = true;
startGridsMultiThreaded(srvs, clients);
final BlockingQueue<Integer> clientStopIdxs = new LinkedBlockingQueue<>();
for (int i = srvs; i < srvs + clients; i++)
clientStopIdxs.add(i);
final AtomicInteger clientStartIdx = new AtomicInteger(9000);
clientFut = multithreadedAsync(
new Callable<Object>() {
@Override public Object call() throws Exception {
try {
clientFlagPerThread.set(true);
while (!done.get() && error.get() == null) {
Integer stopIdx = clientStopIdxs.take();
log.info("Stop client: " + stopIdx);
stopGrid(stopIdx);
while (!done.get() && error.get() == null) {
// Generate unique name to simplify debugging.
int startIdx = clientStartIdx.getAndIncrement();
log.info("Start client: " + startIdx);
UUID id = UUID.randomUUID();
nodeId.set(id);
try {
Ignite ignite = startGrid(startIdx);
assertTrue(ignite.configuration().isClientMode());
clientStopIdxs.add(startIdx);
break;
}
catch (Exception e) {
if (X.hasCause(e, IgniteClientDisconnectedCheckedException.class) ||
X.hasCause(e, IgniteClientDisconnectedException.class))
log.info("Client disconnected: " + e);
else if (X.hasCause(e, ClusterTopologyCheckedException.class))
log.info("Client failed to start: " + e);
else {
if (failedNodes.contains(id) && X.hasCause(e, IgniteSpiException.class))
log.info("Client failed: " + e);
else
throw e;
}
}
}
}
}
catch (Throwable e) {
log.error("Unexpected error: " + e, e);
error.compareAndSet(null, e);
return null;
}
return null;
}
},
clients,
"client-restart");
}
final BlockingQueue<Integer> srvStopIdxs = new LinkedBlockingQueue<>();
for (int i = 0; i < srvs; i++)
srvStopIdxs.add(i);
final AtomicInteger srvStartIdx = new AtomicInteger(srvs + clients);
IgniteInternalFuture<?> srvFut = multithreadedAsync(
new Callable<Object>() {
@Override public Object call() throws Exception {
try {
clientFlagPerThread.set(false);
while (!done.get() && error.get() == null) {
int stopIdx = srvStopIdxs.take();
U.sleep(50);
Thread.currentThread().setName("stop-server-" + getTestIgniteInstanceName(stopIdx));
log.info("Stop server: " + stopIdx);
stopGrid(stopIdx);
// Generate unique name to simplify debugging.
int startIdx = srvStartIdx.getAndIncrement();
Thread.currentThread().setName("start-server-" + getTestIgniteInstanceName(startIdx));
log.info("Start server: " + startIdx);
try {
Ignite ignite = startGrid(startIdx);
assertFalse(ignite.configuration().isClientMode());
srvStopIdxs.add(startIdx);
}
catch (IgniteCheckedException e) {
log.info("Failed to start: " + e);
}
}
}
catch (Throwable e) {
log.error("Unexpected error: " + e, e);
error.compareAndSet(null, e);
return null;
}
return null;
}
},
srvs - 1,
"server-restart");
final long endTime = System.currentTimeMillis() + SF.applyLB(10_000, 30_000);
while (System.currentTimeMillis() < endTime) {
Thread.sleep(1000);
if (error.get() != null) {
Throwable err = error.get();
U.error(log, "Test failed: " + err.getMessage());
done.set(true);
if (clientFut != null)
clientFut.cancel();
srvFut.cancel();
throw err;
}
}
log.info("Stop test.");
done.set(true);
if (clientFut != null)
clientFut.get();
srvFut.get();
}
finally {
done.set(true);
}
}
/**
* @throws Exception If any error occurs.
*/
@Test
public void testTopologyVersion() throws Exception {
clientFlagGlobal = false;
startGridsMultiThreaded(GRID_CNT);
long prev = 0;
for (Ignite g : G.allGrids()) {
IgniteKernal kernal = (IgniteKernal)g;
long ver = kernal.context().discovery().topologyVersion();
info("Top ver: " + ver);
if (prev == 0)
prev = ver;
}
info("Test finished.");
}
/**
* @throws Exception If any error occurs.
*/
@Test
public void testMultipleStartOnCoordinatorStop() throws Exception {
for (int k = 0; k < 3; k++) {
log.info("Iteration: " + k);
clientFlagGlobal = false;
final int START_NODES = 5;
final int JOIN_NODES = 10;
startGrids(START_NODES);
final CyclicBarrier barrier = new CyclicBarrier(JOIN_NODES + 1);
final AtomicInteger startIdx = new AtomicInteger(START_NODES);
IgniteInternalFuture<?> fut = GridTestUtils.runMultiThreadedAsync(new Callable<Object>() {
@Override public Object call() throws Exception {
int idx = startIdx.getAndIncrement();
Thread.currentThread().setName("start-thread-" + idx);
barrier.await();
Ignite ignite = startGrid(idx);
assertFalse(ignite.configuration().isClientMode());
log.info("Started node: " + ignite.name());
return null;
}
}, JOIN_NODES, "start-thread");
barrier.await();
U.sleep(ThreadLocalRandom.current().nextInt(10, 100));
for (int i = 0; i < START_NODES; i++)
stopGrid(i);
fut.get();
stopAllGrids();
}
}
/**
* @throws Exception If failed.
*/
@Ignore("https://issues.apache.org/jira/browse/IGNITE-10198")
@Test
public void testCustomEventOnJoinCoordinatorStop() throws Exception {
for (int k = 0; k < 10; k++) {
log.info("Iteration: " + k);
clientFlagGlobal = false;
final int START_NODES = 5;
final int JOIN_NODES = 5;
startGrids(START_NODES);
final AtomicInteger startIdx = new AtomicInteger(START_NODES);
final AtomicBoolean stop = new AtomicBoolean();
IgniteInternalFuture<?> fut1 = GridTestUtils.runAsync(new Callable<Void>() {
@Override public Void call() throws Exception {
String cacheName = DEFAULT_CACHE_NAME + "-tmp";
Ignite ignite = ignite(START_NODES - 1);
while (!stop.get()) {
CacheConfiguration ccfg = new CacheConfiguration(cacheName);
ignite.createCache(ccfg);
ignite.destroyCache(ccfg.getName());
}
return null;
}
});
try {
final CyclicBarrier barrier = new CyclicBarrier(JOIN_NODES + 1);
IgniteInternalFuture<?> fut2 = GridTestUtils.runMultiThreadedAsync(new Callable<Object>() {
@Override public Object call() throws Exception {
int idx = startIdx.getAndIncrement();
Thread.currentThread().setName("start-thread-" + idx);
barrier.await();
Ignite ignite = startGrid(idx);
assertFalse(ignite.configuration().isClientMode());
log.info("Started node: " + ignite.name());
IgniteCache<Object, Object> cache = ignite.getOrCreateCache(DEFAULT_CACHE_NAME);
ContinuousQuery<Object, Object> qry = new ContinuousQuery<>();
qry.setLocalListener(new CacheEntryUpdatedListener<Object, Object>() {
@Override public void onUpdated(Iterable<CacheEntryEvent<?, ?>> evts) {
// No-op.
}
});
QueryCursor<Cache.Entry<Object, Object>> cur = cache.query(qry);
cur.close();
return null;
}
}, JOIN_NODES, "start-thread");
barrier.await();
U.sleep(ThreadLocalRandom.current().nextInt(10, 100));
for (int i = 0; i < START_NODES - 1; i++) {
GridTestUtils.invoke(ignite(i).configuration().getDiscoverySpi(), "simulateNodeFailure");
stopGrid(i);
}
stop.set(true);
fut1.get();
fut2.get();
}
finally {
stop.set(true);
fut1.get();
}
stopAllGrids();
}
}
/**
* @throws Exception If failed.
*/
@Ignore("https://issues.apache.org/jira/browse/IGNITE-10198")
@Test
public void testClientContinuousQueryCoordinatorStop() throws Exception {
for (int k = 0; k < 10; k++) {
log.info("Iteration: " + k);
clientFlagGlobal = false;
final int START_NODES = 5;
final int JOIN_NODES = 5;
startGrids(START_NODES);
ignite(0).createCache(new CacheConfiguration<>(DEFAULT_CACHE_NAME));
final AtomicInteger startIdx = new AtomicInteger(START_NODES);
final CyclicBarrier barrier = new CyclicBarrier(JOIN_NODES + 1);
clientFlagGlobal = true;
IgniteInternalFuture<?> fut = GridTestUtils.runMultiThreadedAsync(new Callable<Object>() {
@Override public Object call() throws Exception {
int idx = startIdx.getAndIncrement();
Thread.currentThread().setName("start-thread-" + idx);
barrier.await();
Ignite ignite = startGrid(idx);
assertTrue(ignite.configuration().isClientMode());
log.info("Started node: " + ignite.name());
IgniteCache<Object, Object> cache = ignite.getOrCreateCache(DEFAULT_CACHE_NAME);
for (int i = 0; i < 10; i++) {
ContinuousQuery<Object, Object> qry = new ContinuousQuery<>();
qry.setLocalListener(new CacheEntryUpdatedListener<Object, Object>() {
@Override public void onUpdated(Iterable<CacheEntryEvent<?, ?>> evts) {
// No-op.
}
});
cache.query(qry);
}
return null;
}
}, JOIN_NODES, "start-thread");
barrier.await();
U.sleep(ThreadLocalRandom.current().nextInt(100, 500));
for (int i = 0; i < START_NODES - 1; i++) {
GridTestUtils.invoke(ignite(i).configuration().getDiscoverySpi(), "simulateNodeFailure");
stopGrid(i);
}
fut.get();
stopAllGrids();
}
}
/**
* @throws Exception If failed.
*/
@Ignore("https://issues.apache.org/jira/browse/IGNITE-10249")
@Test
public void testCustomEventNodeRestart() throws Exception {
clientFlagGlobal = false;
Ignite ignite = startGrid(0);
ignite.getOrCreateCache(new CacheConfiguration<>(DEFAULT_CACHE_NAME));
final long stopTime = System.currentTimeMillis() + 60_000;
GridTestUtils.runMultiThreaded(new IgniteInClosure<Integer>() {
@Override public void apply(Integer idx) {
try {
while (System.currentTimeMillis() < stopTime) {
Ignite ignite = startGrid(idx + 1);
IgniteCache<Object, Object> cache = ignite.cache(DEFAULT_CACHE_NAME);
int qryCnt = ThreadLocalRandom.current().nextInt(10) + 1;
for (int i = 0; i < qryCnt; i++) {
ContinuousQuery<Object, Object> qry = new ContinuousQuery<>();
qry.setLocalListener(new CacheEntryUpdatedListener<Object, Object>() {
@Override public void onUpdated(Iterable<CacheEntryEvent<?, ?>> evts) {
// No-op.
}
});
QueryCursor<Cache.Entry<Object, Object>> cur = cache.query(qry);
cur.close();
}
GridTestUtils.invoke(ignite.configuration().getDiscoverySpi(), "simulateNodeFailure");
ignite.close();
}
}
catch (Exception e) {
log.error("Unexpected error: " + e, e);
throw new IgniteException(e);
}
}
}, 5, "node-restart");
}
}
| |
package com.example.Dare;
import android.app.Service;
import android.content.BroadcastReceiver;
import android.content.Context;
import android.content.Intent;
import android.content.IntentFilter;
import android.os.Bundle;
import android.os.IBinder;
import android.support.v7.media.MediaRouteSelector;
import android.support.v7.media.MediaRouter;
import android.util.Log;
import android.widget.Toast;
import com.google.android.gms.cast.ApplicationMetadata;
import com.google.android.gms.cast.Cast;
import com.google.android.gms.cast.CastDevice;
import com.google.android.gms.common.ConnectionResult;
import com.google.android.gms.common.api.GoogleApiClient;
import com.google.android.gms.common.api.ResultCallback;
import com.google.android.gms.common.api.Status;
import com.ludum.dare.R;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import java.io.IOException;
import java.util.ArrayList;
public class SimpleService extends Service {
private CastDevice mSelectedDevice;
private GoogleApiClient mApiClient;
private String username;
private Cast.Listener mCastListener;
private GoogleApiClient.ConnectionCallbacks mConnectionCallbacks;
private ConnectionFailedListener mConnectionFailedListener;
private HelloWorldChannel mHelloWorldChannel;
private boolean mApplicationStarted;
private boolean mWaitingForReconnect;
private String mSessionId;
public SimpleService() {
}
@Override
public IBinder onBind(Intent intent) {
throw new UnsupportedOperationException("Not yet implemented");
}
@Override
public void onCreate() {
Toast.makeText(this, "The new Service was Created", Toast.LENGTH_LONG).show();
}
@Override
public int onStartCommand (Intent intent, int flags, int startId) {
if(intent != null) {
if (intent.hasExtra("device")) {
mSelectedDevice = (CastDevice) intent.getExtras().get("device");
}
if (intent.hasExtra("username")) {
username = (String) intent.getExtras().get("username");
}
launchReceiver();
}
registerReceiver(receiver, new IntentFilter("com.example.Dare.service.receiver"));
return START_STICKY;
}
@Override
public void onDestroy(){
teardown();
unregisterReceiver(receiver);
}
/**
* Start the receiver app
*/
private void launchReceiver() {
try {
mCastListener = new Cast.Listener() {
@Override
public void onApplicationDisconnected(int errorCode) {
Log.d("Application Stopped", "application has stopped");
teardown();
}
};
// Connect to Google Play services
mConnectionCallbacks = new ConnectionCallbacks();
mConnectionFailedListener = new ConnectionFailedListener();
Cast.CastOptions.Builder apiOptionsBuilder = Cast.CastOptions.builder(mSelectedDevice, mCastListener);
mApiClient = new GoogleApiClient.Builder(this)
.addApi(Cast.API, apiOptionsBuilder.build())
.addConnectionCallbacks(mConnectionCallbacks)
.addOnConnectionFailedListener(mConnectionFailedListener)
.build();
mApiClient.connect();
} catch (Exception e) {
Log.e("Failed", "Failed launchReceiver", e);
}
}
/**
* Send a text message to the receiver
*
* @param message
*/
private void sendMessage(String message) {
if (mApiClient != null && mHelloWorldChannel != null) {
try {
Cast.CastApi.sendMessage(mApiClient, mHelloWorldChannel.getNamespace(), message).setResultCallback(new ResultCallback<Status>() {
@Override
public void onResult(Status result) {
if (!result.isSuccess()) {
Log.e("Bad Message", "Sending message failed");
}
}
});
} catch (Exception e) {
Log.e("Exception", "Exception while sending message", e);
}
} else {
//Toast.makeText(MainActivity.this, message, Toast.LENGTH_SHORT).show();
}
}
/**
* Custom message channel
*/
public class HelloWorldChannel implements Cast.MessageReceivedCallback {
/**
* @return custom namespace
*/
public String getNamespace() {
return getString(R.string.namespace);
}
/*
* Receive message from the receiver app
*/
@Override
public void onMessageReceived(CastDevice castDevice, String namespace, String message) {
Log.d("MEssage", "onMessageReceived: " + message);
transferMessage(message);
}
public void transferMessage(String message){
Intent intent = new Intent("com.example.Dare.lobby.retriever");
intent.putExtra("json", message);
sendBroadcast(intent);
}
}
private BroadcastReceiver receiver = new BroadcastReceiver() {
@Override
public void onReceive(Context context, Intent intent) {
sendMessage(createJSONString(mSessionId, "startGame"));
}
};
private void teardown() {
Log.d("CLOSING", "teardown");
if (mApiClient != null) {
if (mApplicationStarted) {
if (mApiClient.isConnected() || mApiClient.isConnecting()) {
try {
sendMessage(createJSONString(mSessionId, "leave"));
//Closes the apps
//Cast.CastApi.stopApplication(mApiClient, mSessionId);
if (mHelloWorldChannel != null) {
Cast.CastApi.removeMessageReceivedCallbacks(mApiClient,
mHelloWorldChannel.getNamespace());
mHelloWorldChannel = null;
}
} catch (IOException e) {
Log.e("Error", "Exception while removing channel", e);
}
mApiClient.disconnect();
}
mApplicationStarted = false;
}
mApiClient = null;
}
mSelectedDevice = null;
mWaitingForReconnect = false;
mSessionId = null;
}
/**
* Google Play services callbacks
*/
private class ConnectionFailedListener implements
GoogleApiClient.OnConnectionFailedListener {
@Override
public void onConnectionFailed(ConnectionResult result) {
Log.e("Failed", "onConnectionFailed ");
teardown();
}
}
private String createJSONString(String sessionId, String action){
JSONObject player = new JSONObject();
try{
player.put("id", sessionId);
if(!username.equals("")) {
player.put("name", username);
}
else{
player.put("name", "player");
}
player.put("command", action);
}
catch (JSONException ex){
ex.printStackTrace();
}
JSONArray jsonArray = new JSONArray();
jsonArray.put(player);
//playerObject.put("players", jsonArray);
return player.toString();
}
private class ConnectionCallbacks implements GoogleApiClient.ConnectionCallbacks {
@Override
public void onConnected(Bundle connectionHint) {
Log.d("Connected", "onConnected");
if (mApiClient == null) {
// We got disconnected while this runnable was pending
// execution.
Log.d("Disconnected", "Disconnected From Route");
return;
}
try {
if (mWaitingForReconnect) {
mWaitingForReconnect = false;
// Check if the receiver app is still running
if ((connectionHint != null) && connectionHint.getBoolean(Cast.EXTRA_APP_NO_LONGER_RUNNING)) {
Log.d("App Closed", "App is no longer running");
teardown();
} else {
// Re-create the custom message channel
try {
Cast.CastApi.setMessageReceivedCallbacks(
mApiClient,
mHelloWorldChannel.getNamespace(),
mHelloWorldChannel);
Log.d("Set Callbacks", "Here");
} catch (IOException e) {
Log.e("Bad Channel", "Exception while creating channel", e);
}
}
} else {Cast.CastApi.launchApplication(mApiClient, getString(R.string.app_id) , false)
.setResultCallback(
new ResultCallback<Cast.ApplicationConnectionResult>() {
@Override
public void onResult(Cast.ApplicationConnectionResult result) {
Status status = result.getStatus();
if (status.isSuccess()) {
mApplicationStarted = true;
mHelloWorldChannel = new HelloWorldChannel();
try {
Cast.CastApi.setMessageReceivedCallbacks(mApiClient,
mHelloWorldChannel.getNamespace(),
mHelloWorldChannel);
} catch (IOException e) {
Log.e("Bad Channel", "Exception while creating channel", e);
}
}
}
});
// Launch the receiver app
Cast.CastApi.launchApplication(mApiClient, getString(R.string.app_id), false)
.setResultCallback(
new ResultCallback<Cast.ApplicationConnectionResult>() {
@Override
public void onResult(
Cast.ApplicationConnectionResult result) {
Status status = result.getStatus();
Log.d("Application", "ApplicationConnectionResultCallback.onResult: statusCode" + status.getStatusCode());
if (status.isSuccess()) {
ApplicationMetadata applicationMetadata = result
.getApplicationMetadata();
mSessionId = result.getSessionId();
String applicationStatus = result.getApplicationStatus();
boolean wasLaunched = result.getWasLaunched();
Log.d("Name","application name: "+ applicationMetadata.getName()
+ ", status: "
+ applicationStatus
+ ", sessionId: "
+ mSessionId
+ ", wasLaunched: "
+ wasLaunched);
mApplicationStarted = true;
// Create the custom message
// channel
mHelloWorldChannel = new HelloWorldChannel();
try {
Cast.CastApi
.setMessageReceivedCallbacks(
mApiClient,
mHelloWorldChannel
.getNamespace(),
mHelloWorldChannel);
} catch (IOException e) {
Log.e("Exception", "Exception while creating channel", e);
}
// set the initial instructions
// on the receiver
Log.d("JSON OBJECT", createJSONString(mSessionId, "join"));
Log.d("INFO", String.valueOf(mSelectedDevice.getFriendlyName()));
sendMessage(createJSONString(mSessionId, "join"));
//startNewService();
//Intent i = new Intent(getApplicationContext(), LobbyActivity.class);
//startActivity(i);
} else {
Log.e("Failed Launch", "application could not launch");
teardown();
}
}
});
}
} catch (Exception e) {
Log.e("Fail Launch", "Failed to launch application", e);
}
}
@Override
public void onConnectionSuspended(int cause) {
Log.d("Suspended", "onConnectionSuspended");
mWaitingForReconnect = true;
}
}
}
| |
/*
* Copyright 2015 Google Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.cloud;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import com.google.cloud.ExceptionHandler.Interceptor;
import com.google.cloud.ExceptionHandler.Interceptor.RetryResult;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.nio.channels.ClosedByInterruptException;
import java.util.concurrent.Callable;
import java.util.concurrent.atomic.AtomicReference;
/**
* Tests for {@link ExceptionHandler}.
*/
public class ExceptionHandlerTest {
@Rule
public ExpectedException thrown = ExpectedException.none();
@Test
public void testVerifyCaller() {
class A implements Callable<Object> {
@Override
public Object call() throws IOException, InterruptedException {
return null;
}
}
class B extends A {
}
class C extends A {
@Override
public Object call() throws FileNotFoundException {
return "c";
}
}
class D extends C {
@Override
public Object call() throws IllegalArgumentException {
return "d";
}
}
class E extends A {
@Override
public String call() throws NullPointerException {
return "e";
}
}
class F extends A {
@Override
public Object call() throws Error {
return "f";
}
}
// using default exception handler (retry upon any non-runtime exceptions)
ExceptionHandler handler = ExceptionHandler.getDefaultInstance();
assertValidCallable(new A(), handler);
assertValidCallable(new B(), handler);
assertValidCallable(new C(), handler);
assertValidCallable(new D(), handler);
assertValidCallable(new E(), handler);
assertInvalidCallable(new F(), handler);
handler = ExceptionHandler.newBuilder()
.retryOn(FileNotFoundException.class, NullPointerException.class)
.build();
assertInvalidCallable(new A(), handler);
assertInvalidCallable(new B(), handler);
assertValidCallable(new C(), handler);
assertInvalidCallable(new D(), handler);
assertValidCallable(new E(), handler);
assertInvalidCallable(new F(), handler);
}
private static <T> void assertValidCallable(Callable<T> callable, ExceptionHandler handler) {
handler.verifyCaller(callable);
}
private static <T> void assertInvalidCallable(Callable<T> callable, ExceptionHandler handler) {
try {
handler.verifyCaller(callable);
fail("Expected RetryHelper constructor to fail");
} catch (IllegalArgumentException ex) {
// expected
}
}
@Test
public void testShouldTry() {
ExceptionHandler handler = ExceptionHandler.newBuilder().retryOn(IOException.class).build();
assertTrue(handler.accept(new IOException()));
assertTrue(handler.accept(new ClosedByInterruptException()));
assertFalse(handler.accept(new RuntimeException()));
ExceptionHandler.Builder builder = ExceptionHandler.newBuilder()
.retryOn(IOException.class, NullPointerException.class)
.abortOn(RuntimeException.class, ClosedByInterruptException.class,
InterruptedException.class);
handler = builder.build();
assertTrue(handler.accept(new IOException()));
assertFalse(handler.accept(new ClosedByInterruptException()));
assertFalse(handler.accept(new InterruptedException()));
assertFalse(handler.accept(new RuntimeException()));
assertTrue(handler.accept(new NullPointerException()));
final AtomicReference<RetryResult> before = new AtomicReference<>(RetryResult.NO_RETRY);
@SuppressWarnings("serial")
Interceptor interceptor = new Interceptor() {
@Override
public RetryResult afterEval(Exception exception, RetryResult retryResult) {
return retryResult == RetryResult.NO_RETRY ? RetryResult.RETRY : RetryResult.NO_RETRY;
}
@Override
public RetryResult beforeEval(Exception exception) {
return before.get();
}
};
builder.addInterceptors(interceptor);
handler = builder.build();
assertFalse(handler.accept(new IOException()));
assertFalse(handler.accept(new ClosedByInterruptException()));
assertFalse(handler.accept(new InterruptedException()));
assertFalse(handler.accept(new RuntimeException()));
assertFalse(handler.accept(new NullPointerException()));
before.set(RetryResult.RETRY);
assertTrue(handler.accept(new IOException()));
assertTrue(handler.accept(new ClosedByInterruptException()));
assertTrue(handler.accept(new InterruptedException()));
assertTrue(handler.accept(new RuntimeException()));
assertTrue(handler.accept(new NullPointerException()));
before.set(RetryResult.CONTINUE_EVALUATION);
assertFalse(handler.accept(new IOException()));
assertTrue(handler.accept(new ClosedByInterruptException()));
assertTrue(handler.accept(new InterruptedException()));
assertTrue(handler.accept(new RuntimeException()));
assertFalse(handler.accept(new NullPointerException()));
}
@Test
public void testNullRetryResultFromBeforeEval() {
@SuppressWarnings("serial")
Interceptor interceptor = new Interceptor() {
@Override
public RetryResult beforeEval(Exception exception) {
return null;
}
@Override
public RetryResult afterEval(Exception exception, RetryResult retryResult) {
return RetryResult.CONTINUE_EVALUATION;
}
};
ExceptionHandler handler = ExceptionHandler.newBuilder().addInterceptors(interceptor).build();
thrown.expect(NullPointerException.class);
handler.accept(new Exception());
}
@Test
public void testNullRetryResultFromAfterEval() {
@SuppressWarnings("serial")
Interceptor interceptor = new Interceptor() {
@Override
public RetryResult beforeEval(Exception exception) {
return RetryResult.CONTINUE_EVALUATION;
}
@Override
public RetryResult afterEval(Exception exception, RetryResult retryResult) {
return null;
}
};
ExceptionHandler handler = ExceptionHandler.newBuilder().addInterceptors(interceptor).build();
thrown.expect(NullPointerException.class);
handler.accept(new Exception());
}
}
| |
/*
* Copyright 2015-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.facebook.buck.android;
import static org.easymock.EasyMock.createMock;
import static org.easymock.EasyMock.expect;
import static org.easymock.EasyMock.replay;
import static org.easymock.EasyMock.verify;
import static org.hamcrest.junit.MatcherAssert.assertThat;
import static org.junit.Assert.assertEquals;
import com.facebook.buck.jvm.java.ExtraClasspathFromContextFunction;
import com.facebook.buck.jvm.java.JavaBuckConfig;
import com.facebook.buck.jvm.java.JavaLibrary;
import com.facebook.buck.jvm.java.JavaLibraryBuilder;
import com.facebook.buck.jvm.java.JavaLibraryDescription;
import com.facebook.buck.jvm.java.JavaLibraryDescriptionArg;
import com.facebook.buck.jvm.java.JavacOptions;
import com.facebook.buck.jvm.java.testutil.AbiCompilationModeTest;
import com.facebook.buck.model.BuildTarget;
import com.facebook.buck.model.BuildTargetFactory;
import com.facebook.buck.rules.BuildRule;
import com.facebook.buck.rules.BuildRuleResolver;
import com.facebook.buck.rules.DefaultTargetNodeToBuildRuleTransformer;
import com.facebook.buck.rules.FakeBuildContext;
import com.facebook.buck.rules.FakeBuildRule;
import com.facebook.buck.rules.TargetGraph;
import com.facebook.buck.rules.TargetNode;
import com.facebook.buck.rules.query.Query;
import com.facebook.buck.testutil.TargetGraphFactory;
import com.google.common.base.Suppliers;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSortedSet;
import java.io.File;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.List;
import java.util.Optional;
import org.hamcrest.Matchers;
import org.junit.Before;
import org.junit.Test;
public class AndroidLibraryDescriptionTest extends AbiCompilationModeTest {
private JavaBuckConfig javaBuckConfig;
@Before
public void setUp() {
javaBuckConfig = getJavaBuckConfigWithCompilationMode();
}
@Test
public void rulesExportedFromDepsBecomeFirstOrderDeps() throws Exception {
TargetNode<?, ?> transitiveExportedNode =
JavaLibraryBuilder.createBuilder(
BuildTargetFactory.newInstance("//:transitive_exported_rule"), javaBuckConfig)
.addSrc(Paths.get("java/src/com/transitive/hi.java"))
.build();
TargetNode<?, ?> exportedNode =
JavaLibraryBuilder.createBuilder(
BuildTargetFactory.newInstance("//:exported_rule"), javaBuckConfig)
.addSrc(Paths.get("java/src/com/exported_rule/foo.java"))
.addExportedDep(transitiveExportedNode.getBuildTarget())
.build();
TargetNode<?, ?> exportingNode =
JavaLibraryBuilder.createBuilder(
BuildTargetFactory.newInstance("//:exporting_rule"), javaBuckConfig)
.addSrc(Paths.get("java/src/com/exporting_rule/bar.java"))
.addExportedDep(exportedNode.getBuildTarget())
.build();
TargetNode<?, ?> androidLibNode =
AndroidLibraryBuilder.createBuilder(
BuildTargetFactory.newInstance("//:rule"), javaBuckConfig)
.addDep(exportingNode.getBuildTarget())
.build();
TargetGraph targetGraph =
TargetGraphFactory.newInstance(
transitiveExportedNode, exportedNode, exportingNode, androidLibNode);
BuildRuleResolver resolver =
new BuildRuleResolver(targetGraph, new DefaultTargetNodeToBuildRuleTransformer());
BuildRule androidLibRule = resolver.requireRule(androidLibNode.getBuildTarget());
BuildRule exportedRule = resolver.requireRule(exportedNode.getBuildTarget());
BuildRule transitiveExportedRule =
resolver.requireRule(transitiveExportedNode.getBuildTarget());
// First order deps should become CalculateAbi rules if we're compiling against ABIs
if (compileAgainstAbis.equals(TRUE)) {
exportedRule = resolver.getRule(((JavaLibrary) exportedRule).getAbiJar().get());
transitiveExportedRule =
resolver.getRule(((JavaLibrary) transitiveExportedRule).getAbiJar().get());
}
assertThat(
androidLibRule.getBuildDeps(),
Matchers.allOf(Matchers.hasItem(exportedRule), Matchers.hasItem(transitiveExportedRule)));
}
@Test
public void rulesMatchingDepQueryBecomeFirstOrderDeps() throws Exception {
// Set up target graph: rule -> lib -> sublib -> bottom
TargetNode<JavaLibraryDescriptionArg, JavaLibraryDescription> bottomNode =
JavaLibraryBuilder.createBuilder(
BuildTargetFactory.newInstance("//:bottom"), javaBuckConfig)
.build();
TargetNode<JavaLibraryDescriptionArg, JavaLibraryDescription> sublibNode =
JavaLibraryBuilder.createBuilder(
BuildTargetFactory.newInstance("//:sublib"), javaBuckConfig)
.addDep(bottomNode.getBuildTarget())
.build();
TargetNode<JavaLibraryDescriptionArg, JavaLibraryDescription> libNode =
JavaLibraryBuilder.createBuilder(BuildTargetFactory.newInstance("//:lib"), javaBuckConfig)
.addDep(sublibNode.getBuildTarget())
.build();
BuildTarget target = BuildTargetFactory.newInstance("//:rule");
AndroidLibraryBuilder ruleBuilder =
AndroidLibraryBuilder.createBuilder(target, javaBuckConfig)
.addDep(libNode.getBuildTarget())
.setDepsQuery(Query.of("filter('.*lib', deps($declared_deps))"));
TargetNode<AndroidLibraryDescriptionArg, AndroidLibraryDescription> rule = ruleBuilder.build();
TargetGraph targetGraph = TargetGraphFactory.newInstance(bottomNode, libNode, sublibNode, rule);
BuildRuleResolver resolver =
new BuildRuleResolver(targetGraph, new DefaultTargetNodeToBuildRuleTransformer());
FakeBuildRule bottomRule = resolver.addToIndex(new FakeBuildRule(bottomNode.getBuildTarget()));
FakeBuildRule sublibRule =
resolver.addToIndex(
new FakeBuildRule(sublibNode.getBuildTarget(), ImmutableSortedSet.of(bottomRule)));
FakeBuildRule libRule =
resolver.addToIndex(
new FakeBuildRule(libNode.getBuildTarget(), ImmutableSortedSet.of(sublibRule)));
BuildRule javaLibrary = ruleBuilder.build(resolver, targetGraph);
assertThat(javaLibrary.getBuildDeps(), Matchers.hasItems(libRule, sublibRule));
// The bottom rule should be filtered since it does not match the regex
assertThat(javaLibrary.getBuildDeps(), Matchers.not(Matchers.hasItem(bottomRule)));
}
@Test
public void rulesExportedFromProvidedDepsBecomeFirstOrderDeps() throws Exception {
TargetNode<?, ?> transitiveExportedNode =
JavaLibraryBuilder.createBuilder(
BuildTargetFactory.newInstance("//:transitive_exported_rule"), javaBuckConfig)
.addSrc(Paths.get("java/src/com/transitive/hi.java"))
.build();
TargetNode<?, ?> exportedNode =
JavaLibraryBuilder.createBuilder(
BuildTargetFactory.newInstance("//:exported_rule"), javaBuckConfig)
.addSrc(Paths.get("java/src/com/exported_rule/foo.java"))
.addExportedDep(transitiveExportedNode.getBuildTarget())
.build();
TargetNode<?, ?> exportingNode =
JavaLibraryBuilder.createBuilder(
BuildTargetFactory.newInstance("//:exporting_rule"), javaBuckConfig)
.addSrc(Paths.get("java/src/com/exporting_rule/bar.java"))
.addExportedDep(exportedNode.getBuildTarget())
.build();
TargetNode<?, ?> androidLibNode =
AndroidLibraryBuilder.createBuilder(
BuildTargetFactory.newInstance("//:rule"), javaBuckConfig)
.addProvidedDep(exportingNode.getBuildTarget())
.build();
TargetGraph targetGraph =
TargetGraphFactory.newInstance(
transitiveExportedNode, exportedNode, exportingNode, androidLibNode);
BuildRuleResolver resolver =
new BuildRuleResolver(targetGraph, new DefaultTargetNodeToBuildRuleTransformer());
BuildRule androidLibRule = resolver.requireRule(androidLibNode.getBuildTarget());
BuildRule exportedRule = resolver.requireRule(exportedNode.getBuildTarget());
BuildRule transitiveExportedRule =
resolver.requireRule(transitiveExportedNode.getBuildTarget());
// First order deps should become CalculateAbi rules if we're compiling against ABIs
if (compileAgainstAbis.equals(TRUE)) {
exportedRule = resolver.getRule(((JavaLibrary) exportedRule).getAbiJar().get());
transitiveExportedRule =
resolver.getRule(((JavaLibrary) transitiveExportedRule).getAbiJar().get());
}
assertThat(
androidLibRule.getBuildDeps(),
Matchers.allOf(Matchers.hasItem(exportedRule), Matchers.hasItem(transitiveExportedRule)));
}
@Test
public void androidClasspathFromContextFunctionAddsLibsFromAndroidPlatformTarget() {
AndroidPlatformTarget androidPlatformTarget = createMock(AndroidPlatformTarget.class);
List<Path> entries =
ImmutableList.of(
Paths.get("add-ons/addon-google_apis-google-15/libs/effects.jar"),
Paths.get("add-ons/addon-google_apis-google-15/libs/maps.jar"),
Paths.get("add-ons/addon-google_apis-google-15/libs/usb.jar"));
expect(androidPlatformTarget.getBootclasspathEntries()).andReturn(entries);
replay(androidPlatformTarget);
ExtraClasspathFromContextFunction extraClasspathFromContextFunction =
AndroidClasspathFromContextFunction.INSTANCE;
JavacOptions options =
JavacOptions.builder().setSourceLevel("1.7").setTargetLevel("1.7").build();
JavacOptions updated =
options.withBootclasspathFromContext(
extraClasspathFromContextFunction,
FakeBuildContext.NOOP_CONTEXT.withAndroidPlatformTargetSupplier(
Suppliers.ofInstance(androidPlatformTarget)));
assertEquals(
Optional.of(
("add-ons/addon-google_apis-google-15/libs/effects.jar"
+ File.pathSeparatorChar
+ "add-ons/addon-google_apis-google-15/libs/maps.jar"
+ File.pathSeparatorChar
+ "add-ons/addon-google_apis-google-15/libs/usb.jar")
.replace("/", File.separator)),
updated.getBootclasspath());
verify(androidPlatformTarget);
}
@Test
public void testClasspathContainsOnlyJavaTargets() throws Exception {
TargetNode<AndroidResourceDescriptionArg, AndroidResourceDescription> resourceRule =
AndroidResourceBuilder.createBuilder(BuildTargetFactory.newInstance("//:res")).build();
TargetGraph targetGraph = TargetGraphFactory.newInstance(resourceRule);
BuildRuleResolver resolver =
new BuildRuleResolver(targetGraph, new DefaultTargetNodeToBuildRuleTransformer());
resolver.addToIndex(new FakeBuildRule(resourceRule.getBuildTarget()));
AndroidLibrary androidLibrary =
AndroidLibraryBuilder.createBuilder(BuildTargetFactory.newInstance("//:android_lib"))
.addDep(resourceRule.getBuildTarget())
.build(resolver, targetGraph);
assertThat(androidLibrary.getCompileTimeClasspathSourcePaths(), Matchers.empty());
}
}
| |
package com.giikey.cdb;
import java.awt.Color;
import java.awt.Graphics;
import java.awt.Insets;
import java.awt.event.KeyAdapter;
import java.awt.event.KeyEvent;
import java.awt.image.BufferStrategy;
import java.util.Timer;
import java.util.TimerTask;
import javax.swing.JFrame;
import javax.swing.UIManager;
import com.giikey.cdb.game.Manage;
import com.giikey.cdb.util.Key;
public class Main {
public JFrame f;
public BufferStrategy bs;
private Manage manage;
public static final int h = 480, w = 640, FONTSIZE = 15;
public static Key k;
private boolean stop = false;
public static final String name = "Crazy Dancing Ball";
// public static Clip[] clips;
public static void main(String[] args) {
new Main();
}
public Main(){
try{
UIManager.setLookAndFeel("com.sun.java.swing.plaf.windows.WindowsLookAndFeel");
}catch(Exception e){
e.printStackTrace();
}
f = new JFrame();
manage = new Manage();
k = new Key();
/* try {
clips = new Clip[3];
clips[0] = (Clip) AudioSystem.getLine(new DataLine.Info(Clip.class,AudioSystem.getAudioInputStream(getClass().getResource("1.wav")).getFormat()));
clips[1] = (Clip) AudioSystem.getLine(new DataLine.Info(Clip.class,AudioSystem.getAudioInputStream(getClass().getResource("2.wav")).getFormat()));
clips[2] = (Clip) AudioSystem.getLine(new DataLine.Info(Clip.class,AudioSystem.getAudioInputStream(getClass().getResource("3.wav")).getFormat()));
clips[0].open(AudioSystem.getAudioInputStream(getClass().getResource("1.wav")));
clips[1].open(AudioSystem.getAudioInputStream(getClass().getResource("2.wav")));
clips[2].open(AudioSystem.getAudioInputStream(getClass().getResource("3.wav")));
} catch (LineUnavailableException e) {
e.printStackTrace();
} catch (UnsupportedAudioFileException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}*/
f.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
f.setBackground(new Color(30, 35, 30));
f.addKeyListener(new KL());
f.setResizable(false);
f.setVisible(true);
Insets insets = f.getInsets();
f.setSize(w + insets.left + insets.right, h + insets.top + insets.bottom);
f.setLocationRelativeTo(null);
f.setIgnoreRepaint(true);
f.createBufferStrategy(2);
bs=f.getBufferStrategy();
Timer t=new Timer();
t.schedule(new TT(), 5, 10);
}
public class TT extends TimerTask{
@Override
public void run() {
Graphics g = bs.getDrawGraphics();
Insets in = f.getInsets();
g.translate(in.left, in.top);
g.setColor(new Color(30, 35, 30));
g.fillRect(0, 0, w, h);
if(k.esc){
if(!stop)
stop=true;
else
stop=false;
k.offAll();
}
if(!stop){
manage.loop(g, k);
}
bs.show();
g.dispose();
}
}
public class KL extends KeyAdapter{
@Override
public void keyPressed(KeyEvent ke) {
if(ke.getKeyCode() == KeyEvent.VK_W){
k.w = Key.ON;
}
if(ke.getKeyCode() == KeyEvent.VK_A){
k.a = Key.ON;
}
if(ke.getKeyCode() == KeyEvent.VK_S){
k.s = Key.ON;
}
if(ke.getKeyCode() == KeyEvent.VK_D){
k.d = Key.ON;
}
if(ke.getKeyCode() == KeyEvent.VK_UP){
k.w = Key.ON;
}
if(ke.getKeyCode() == KeyEvent.VK_LEFT){
k.a = Key.ON;
}
if(ke.getKeyCode() == KeyEvent.VK_DOWN){
k.s = Key.ON;
}
if(ke.getKeyCode() == KeyEvent.VK_RIGHT){
k.d = Key.ON;
}
if(ke.getKeyCode() == KeyEvent.VK_SPACE){
k.space = Key.ON;
}
if(ke.getKeyCode() == KeyEvent.VK_ENTER){
k.space = Key.ON;
}
if(ke.getKeyCode() == KeyEvent.VK_Z){
k.space = Key.ON;
}
if(ke.getKeyCode() == KeyEvent.VK_SHIFT){
k.space = Key.ON;
}
if(ke.getKeyCode() == KeyEvent.VK_Q){
k.q = Key.ON;
}
if(ke.getKeyCode() == KeyEvent.VK_R){
k.r = Key.ON;
}
if(ke.getKeyCode() == KeyEvent.VK_ESCAPE){
k.esc = Key.ON;
}
}
@Override
public void keyReleased(KeyEvent ke) {
if(ke.getKeyCode() == KeyEvent.VK_W){
k.w = Key.OFF;
}
if(ke.getKeyCode() == KeyEvent.VK_A){
k.a = Key.OFF;
}
if(ke.getKeyCode() == KeyEvent.VK_S){
k.s = Key.OFF;
}
if(ke.getKeyCode() == KeyEvent.VK_D){
k.d = Key.OFF;
}
if(ke.getKeyCode() == KeyEvent.VK_UP){
k.w = Key.OFF;
}
if(ke.getKeyCode() == KeyEvent.VK_LEFT){
k.a = Key.OFF;
}
if(ke.getKeyCode() == KeyEvent.VK_DOWN){
k.s = Key.OFF;
}
if(ke.getKeyCode() == KeyEvent.VK_RIGHT){
k.d = Key.OFF;
}
if(ke.getKeyCode() == KeyEvent.VK_SPACE){
k.space = Key.OFF;
}
if(ke.getKeyCode() == KeyEvent.VK_ENTER){
k.space = Key.OFF;
}
if(ke.getKeyCode() == KeyEvent.VK_Z){
k.space = Key.OFF;
}
if(ke.getKeyCode() == KeyEvent.VK_SHIFT){
k.space = Key.OFF;
}
if(ke.getKeyCode() == KeyEvent.VK_Q){
k.q = Key.OFF;
}
if(ke.getKeyCode() == KeyEvent.VK_R){
k.r = Key.OFF;
}
if(ke.getKeyCode() == KeyEvent.VK_ESCAPE){
k.esc = Key.OFF;
}
}
}
public static void offKeys(){
k.offAll();
}
}
| |
/* Copyright (C) 2005-2011 Fabio Riccardi */
package com.lightcrafts.ui.operation;
import com.lightcrafts.ui.operation.drag.StackableComponent;
import com.lightcrafts.ui.LightZoneSkin;
import com.lightcrafts.app.ComboFrame;
import javax.swing.border.Border;
import javax.swing.*;
import java.awt.*;
public class SelectableControl
extends JLayeredPane implements StackableComponent
{
public final static Color Background = LightZoneSkin.Colors.ToolsBackground;
public final static Font ControlFont = LightZoneSkin.fontSet.getSmallFont();
private final static Border ControlBorder;
private final static Border SelectedBorder;
private final static int TitleHeight = 24;
static {
Border thinPadding = BorderFactory.createEmptyBorder(1, 1, 1, 1);
Border thickPadding = BorderFactory.createEmptyBorder(2, 2, 2, 2);
Border selectedBorder = BorderFactory.createLineBorder(LightZoneSkin.Colors.SelectedToolBorder, 2);
Border unselectedBorder = BorderFactory.createLineBorder(Color.darkGray, 1);
SelectedBorder = BorderFactory.createCompoundBorder(thinPadding, selectedBorder);
ControlBorder = BorderFactory.createCompoundBorder(thickPadding, unselectedBorder);
}
SelectableTitle title;
JComponent content;
boolean isContentVisible;
@SuppressWarnings({"OverridableMethodCallInConstructor"})
public SelectableControl() {
setBackground(Background);
title = new SelectableTitle(this);
title.setBackground(Background);
title.setFont(ControlFont);
add(title);
setContent(new JLabel("Default Control"));
setShowContent(true);
setOpaque(false);
setFont(ControlFont);
setBorder(ControlBorder);
setTitle("Default Control");
// Enable mouse events, for the global AWTEventListener in OpStack.
enableEvents(AWTEvent.MOUSE_EVENT_MASK);
}
public ComboFrame getComboFrame() {
return (ComboFrame)SwingUtilities.getAncestorOfClass(
ComboFrame.class, this
);
}
public boolean isFocusCycleRoot() {
return true;
}
public void setTitle(String s) {
title.setTitleText(s);
}
public JComponent getDraggableComponent() {
return title;
}
public boolean isSwappable() {
return false;
}
void setShowContent(boolean visible) {
if (isContentVisible != visible) {
isContentVisible = visible;
if (visible) {
add(content);
}
else {
remove(content);
}
revalidate();
}
}
boolean isContentShown() {
return isContentVisible;
}
/**
* Derived classes provide the display for an SelectedControl as a
* JComponent.
*/
protected void setContent(JComponent c) {
if (isContentVisible) {
remove(content);
}
content = c;
if (isContentVisible) {
add(content);
content.setBackground(Background);
content.setFont(ControlFont);
}
}
/**
* An accessor for the content lets class hierarchies supplement the
* display from base classes.
*/
protected JComponent getContent() {
return content;
}
public void setRegionIndicator(boolean hasRegion) {
title.setRegionIndicator(hasRegion);
}
void setSelected(boolean selected) {
title.setSelected(selected);
if (selected) {
setBorder(SelectedBorder);
}
else {
setBorder(ControlBorder);
}
}
protected void paintComponent(Graphics g) {
super.paintComponent(g);
// Paint the Background Color, but only inside the ControlBorder:
Insets insets = getInsets();
Dimension size = getSize();
Color oldColor = g.getColor();
g.setColor(Background);
g.fillRect(
insets.left,
insets.top,
size.width - insets.right - insets.left,
size.height - insets.bottom - insets.top
);
g.setColor(oldColor);
}
public Dimension getPreferredSize() {
int height = TitleHeight;
if (isContentVisible) {
Dimension contentSize = content.getPreferredSize();
height += contentSize.height;
height += SelectableTitleSeparator.Height;
}
Insets insets = getInsets();
height += insets.top + insets.bottom;
return new Dimension(Integer.MAX_VALUE, height);
}
public void doLayout() {
Dimension size = getSize();
Insets insets = getInsets();
int minX = insets.left;
int maxX = size.width - insets.right;
int minY = insets.top;
int maxY = size.height - insets.bottom;
int width = maxX - minX;
int height = maxY - minY;
// The title gets fixed height and full width:
title.setLocation(minX, minY);
title.setSize(width, TitleHeight);
if (! isContentVisible) {
return;
}
// The content is centered in the remaining height:
Dimension contentSize = content.getPreferredSize();
int x = Math.max((width - contentSize.width) / 2, 0) + minX;
int y = minY + TitleHeight + SelectableTitleSeparator.Height;
int h = height - TitleHeight - SelectableTitleSeparator.Height;
int w = Math.min(contentSize.width, width);
content.setLocation(x, y);
content.setSize(w, h);
}
protected String getHelpTopic() {
return null;
}
}
| |
/**
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.airavata.registry.core.experiment.catalog.resources;
import org.apache.airavata.common.exception.ApplicationSettingsException;
import org.apache.airavata.common.utils.SecurityUtil;
import org.apache.airavata.common.utils.ServerSettings;
import org.apache.airavata.registry.core.experiment.catalog.ExpCatResourceUtils;
import org.apache.airavata.registry.core.experiment.catalog.ExperimentCatResource;
import org.apache.airavata.registry.core.experiment.catalog.ResourceType;
import org.apache.airavata.registry.core.experiment.catalog.model.Gateway;
import org.apache.airavata.registry.core.experiment.catalog.model.UserPK;
import org.apache.airavata.registry.core.experiment.catalog.model.Users;
import org.apache.airavata.registry.cpi.RegistryException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.persistence.EntityManager;
import java.security.NoSuchAlgorithmException;
import java.util.List;
public class UserResource extends AbstractExpCatResource {
private final static Logger logger = LoggerFactory.getLogger(UserResource.class);
private String userName;
private String password;
private String gatewayId;
// private GatewayResource gatewayResource;
public String getGatewayId() {
return gatewayId;
}
public void setGatewayId(String gatewayId) {
this.gatewayId = gatewayId;
}
// public GatewayResource getGatewayResource() {
// return gatewayResource;
// }
//
// public void setGatewayResource(GatewayResource gatewayResource) {
// this.gatewayResource = gatewayResource;
// }
/**
*
*/
public UserResource() {
}
/**
*
* @param userName user name
*/
public void setUserName(String userName) {
this.userName = userName;
}
/**
*
* @return user name
*/
public String getUserName() {
return userName;
}
/**
* User is a hypothical data structure.
* @param type child resource type
* @return child resource
*/
public ExperimentCatResource create(ResourceType type) throws RegistryException {
logger.error("Unsupported resource type for user resource.", new UnsupportedOperationException());
throw new UnsupportedOperationException();
}
/**
*
* @param type child resource type
* @param name child resource name
*/
public void remove(ResourceType type, Object name) throws RegistryException{
logger.error("Unsupported resource type for user resource.", new UnsupportedOperationException());
throw new UnsupportedOperationException();
}
/**
*
* @param type child resource type
* @param name child resource name
* @return UnsupportedOperationException
*/
public ExperimentCatResource get(ResourceType type, Object name) throws RegistryException {
logger.error("Unsupported resource type for user resource.", new UnsupportedOperationException());
throw new UnsupportedOperationException();
}
/**
*
* @param type child resource type
* @return UnsupportedOperationException
*/
public List<ExperimentCatResource> get(ResourceType type) throws RegistryException{
logger.error("Unsupported resource type for user resource.", new UnsupportedOperationException());
throw new UnsupportedOperationException();
}
/**
* save user to the database
*/
public void save() throws RegistryException {
EntityManager em = null;
try {
em = ExpCatResourceUtils.getEntityManager();
UserPK userPK = new UserPK();
userPK.setGatewayId(gatewayId);
userPK.setUserName(userName);
Users existingUser = em.find(Users.class, userPK);
Gateway gateway = em.find(Gateway.class, gatewayId);
em.close();
em = ExpCatResourceUtils.getEntityManager();
em.getTransaction().begin();
Users user = new Users();
user.setAiravataInternalUserId(userName+"@"+gatewayId);
user.setUserName(userName);
user.setGatewayId(gateway.getGatewayId());
user.setGateway(gateway);
if (password != null && !password.equals("")) {
try {
user.setPassword(SecurityUtil.digestString(password,
ServerSettings.getSetting("default.registry.password.hash.method")));
} catch (NoSuchAlgorithmException e) {
throw new RuntimeException("Error hashing default admin password. Invalid hash algorithm.", e);
} catch (ApplicationSettingsException e) {
throw new RuntimeException("Error reading hash algorithm from configurations", e);
}
}
if (existingUser != null) {
if (password != null && !password.equals("")) {
try {
existingUser.setPassword(SecurityUtil.digestString(password,
ServerSettings.getSetting("default.registry.password.hash.method")));
existingUser.setGatewayId(gateway.getGatewayId());
existingUser.setGateway(gateway);
} catch (NoSuchAlgorithmException e) {
throw new RuntimeException("Error hashing default admin password. Invalid hash algorithm.", e);
} catch (ApplicationSettingsException e) {
throw new RuntimeException("Error reading hash algorithm from configurations", e);
}
}
user = em.merge(existingUser);
} else {
em.persist(user);
}
em.getTransaction().commit();
em.close();
} catch (Exception e) {
logger.error(e.getMessage(), e);
throw new RegistryException(e);
} finally {
if (em != null && em.isOpen()) {
if (em.getTransaction().isActive()){
em.getTransaction().rollback();
}
em.close();
}
}
}
/**
*
* @param type child resource type
* @param name child resource name
* @return UnsupportedOperationException
*/
public boolean isExists(ResourceType type, Object name) throws RegistryException{
logger.error("Unsupported resource type for user resource.", new UnsupportedOperationException());
throw new UnsupportedOperationException();
}
/**
*
* @return password
*/
public String getPassword() {
return password;
}
/**
*
* @param password password
*/
public void setPassword(String password) {
this.password = password;
}
}
| |
package org.spongycastle.jcajce.provider.asymmetric.ecgost;
import java.math.BigInteger;
import java.security.InvalidKeyException;
import java.security.PrivateKey;
import java.security.PublicKey;
import java.security.SignatureException;
import java.security.spec.AlgorithmParameterSpec;
import org.spongycastle.asn1.pkcs.PKCSObjectIdentifiers;
import org.spongycastle.asn1.x509.SubjectPublicKeyInfo;
import org.spongycastle.asn1.x509.X509ObjectIdentifiers;
import org.spongycastle.crypto.CipherParameters;
import org.spongycastle.crypto.DSA;
import org.spongycastle.crypto.Digest;
import org.spongycastle.crypto.digests.GOST3411Digest;
import org.spongycastle.crypto.params.ParametersWithRandom;
import org.spongycastle.crypto.signers.ECGOST3410Signer;
import org.spongycastle.jcajce.provider.asymmetric.util.ECUtil;
import org.spongycastle.jce.interfaces.ECKey;
import org.spongycastle.jce.interfaces.ECPublicKey;
import org.spongycastle.jce.interfaces.GOST3410Key;
import org.spongycastle.jce.provider.BouncyCastleProvider;
import org.spongycastle.jcajce.provider.asymmetric.util.GOST3410Util;
public class SignatureSpi
extends java.security.Signature
implements PKCSObjectIdentifiers, X509ObjectIdentifiers
{
private Digest digest;
private DSA signer;
public SignatureSpi()
{
super("ECGOST3410");
this.digest = new GOST3411Digest();
this.signer = new ECGOST3410Signer();
}
protected void engineInitVerify(
PublicKey publicKey)
throws InvalidKeyException
{
CipherParameters param;
if (publicKey instanceof ECPublicKey)
{
param = ECUtil.generatePublicKeyParameter(publicKey);
}
else if (publicKey instanceof GOST3410Key)
{
param = GOST3410Util.generatePublicKeyParameter(publicKey);
}
else
{
try
{
byte[] bytes = publicKey.getEncoded();
publicKey = BouncyCastleProvider.getPublicKey(SubjectPublicKeyInfo.getInstance(bytes));
param = ECUtil.generatePublicKeyParameter(publicKey);
}
catch (Exception e)
{
throw new InvalidKeyException("can't recognise key type in DSA based signer");
}
}
digest.reset();
signer.init(false, param);
}
protected void engineInitSign(
PrivateKey privateKey)
throws InvalidKeyException
{
CipherParameters param;
if (privateKey instanceof ECKey)
{
param = ECUtil.generatePrivateKeyParameter(privateKey);
}
else
{
param = GOST3410Util.generatePrivateKeyParameter(privateKey);
}
digest.reset();
if (appRandom != null)
{
signer.init(true, new ParametersWithRandom(param, appRandom));
}
else
{
signer.init(true, param);
}
}
protected void engineUpdate(
byte b)
throws SignatureException
{
digest.update(b);
}
protected void engineUpdate(
byte[] b,
int off,
int len)
throws SignatureException
{
digest.update(b, off, len);
}
protected byte[] engineSign()
throws SignatureException
{
byte[] hash = new byte[digest.getDigestSize()];
digest.doFinal(hash, 0);
try
{
byte[] sigBytes = new byte[64];
BigInteger[] sig = signer.generateSignature(hash);
byte[] r = sig[0].toByteArray();
byte[] s = sig[1].toByteArray();
if (s[0] != 0)
{
System.arraycopy(s, 0, sigBytes, 32 - s.length, s.length);
}
else
{
System.arraycopy(s, 1, sigBytes, 32 - (s.length - 1), s.length - 1);
}
if (r[0] != 0)
{
System.arraycopy(r, 0, sigBytes, 64 - r.length, r.length);
}
else
{
System.arraycopy(r, 1, sigBytes, 64 - (r.length - 1), r.length - 1);
}
return sigBytes;
}
catch (Exception e)
{
throw new SignatureException(e.toString());
}
}
protected boolean engineVerify(
byte[] sigBytes)
throws SignatureException
{
byte[] hash = new byte[digest.getDigestSize()];
digest.doFinal(hash, 0);
BigInteger[] sig;
try
{
byte[] r = new byte[32];
byte[] s = new byte[32];
System.arraycopy(sigBytes, 0, s, 0, 32);
System.arraycopy(sigBytes, 32, r, 0, 32);
sig = new BigInteger[2];
sig[0] = new BigInteger(1, r);
sig[1] = new BigInteger(1, s);
}
catch (Exception e)
{
throw new SignatureException("error decoding signature bytes.");
}
return signer.verifySignature(hash, sig[0], sig[1]);
}
protected void engineSetParameter(
AlgorithmParameterSpec params)
{
throw new UnsupportedOperationException("engineSetParameter unsupported");
}
/**
* @deprecated replaced with <a href = "#engineSetParameter(java.security.spec.AlgorithmParameterSpec)">
*/
protected void engineSetParameter(
String param,
Object value)
{
throw new UnsupportedOperationException("engineSetParameter unsupported");
}
/**
* @deprecated
*/
protected Object engineGetParameter(
String param)
{
throw new UnsupportedOperationException("engineSetParameter unsupported");
}
}
| |
/*
* Copyright 2019 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package androidx.media3.session;
import static androidx.media3.common.Player.COMMAND_ADJUST_DEVICE_VOLUME;
import static androidx.media3.common.Player.COMMAND_CHANGE_MEDIA_ITEMS;
import static androidx.media3.common.Player.COMMAND_PLAY_PAUSE;
import static androidx.media3.common.Player.COMMAND_PREPARE;
import static androidx.media3.common.Player.COMMAND_SEEK_BACK;
import static androidx.media3.common.Player.COMMAND_SEEK_FORWARD;
import static androidx.media3.common.Player.COMMAND_SEEK_IN_CURRENT_MEDIA_ITEM;
import static androidx.media3.common.Player.COMMAND_SEEK_TO_DEFAULT_POSITION;
import static androidx.media3.common.Player.COMMAND_SEEK_TO_MEDIA_ITEM;
import static androidx.media3.common.Player.COMMAND_SEEK_TO_NEXT;
import static androidx.media3.common.Player.COMMAND_SEEK_TO_NEXT_MEDIA_ITEM;
import static androidx.media3.common.Player.COMMAND_SEEK_TO_PREVIOUS;
import static androidx.media3.common.Player.COMMAND_SEEK_TO_PREVIOUS_MEDIA_ITEM;
import static androidx.media3.common.Player.COMMAND_SET_DEVICE_VOLUME;
import static androidx.media3.common.Player.COMMAND_SET_MEDIA_ITEMS_METADATA;
import static androidx.media3.common.Player.COMMAND_SET_REPEAT_MODE;
import static androidx.media3.common.Player.COMMAND_SET_SHUFFLE_MODE;
import static androidx.media3.common.Player.COMMAND_SET_SPEED_AND_PITCH;
import static androidx.media3.common.Player.COMMAND_SET_TRACK_SELECTION_PARAMETERS;
import static androidx.media3.common.Player.COMMAND_SET_VIDEO_SURFACE;
import static androidx.media3.common.Player.COMMAND_SET_VOLUME;
import static androidx.media3.common.Player.COMMAND_STOP;
import static androidx.media3.common.util.Assertions.checkNotNull;
import static androidx.media3.common.util.Assertions.checkStateNotNull;
import static androidx.media3.common.util.Util.postOrRun;
import static androidx.media3.session.SessionCommand.COMMAND_CODE_CUSTOM;
import static androidx.media3.session.SessionCommand.COMMAND_CODE_LIBRARY_GET_CHILDREN;
import static androidx.media3.session.SessionCommand.COMMAND_CODE_LIBRARY_GET_ITEM;
import static androidx.media3.session.SessionCommand.COMMAND_CODE_LIBRARY_GET_LIBRARY_ROOT;
import static androidx.media3.session.SessionCommand.COMMAND_CODE_LIBRARY_GET_SEARCH_RESULT;
import static androidx.media3.session.SessionCommand.COMMAND_CODE_LIBRARY_SEARCH;
import static androidx.media3.session.SessionCommand.COMMAND_CODE_LIBRARY_SUBSCRIBE;
import static androidx.media3.session.SessionCommand.COMMAND_CODE_LIBRARY_UNSUBSCRIBE;
import static androidx.media3.session.SessionCommand.COMMAND_CODE_SESSION_SET_MEDIA_URI;
import static androidx.media3.session.SessionCommand.COMMAND_CODE_SESSION_SET_RATING;
import android.net.Uri;
import android.os.Binder;
import android.os.Bundle;
import android.os.IBinder;
import android.os.RemoteException;
import android.text.TextUtils;
import android.view.Surface;
import androidx.annotation.Nullable;
import androidx.core.util.ObjectsCompat;
import androidx.media.MediaSessionManager;
import androidx.media3.common.BundleListRetriever;
import androidx.media3.common.MediaItem;
import androidx.media3.common.MediaLibraryInfo;
import androidx.media3.common.MediaMetadata;
import androidx.media3.common.PlaybackParameters;
import androidx.media3.common.Player;
import androidx.media3.common.Rating;
import androidx.media3.common.TrackSelectionParameters;
import androidx.media3.common.util.BundleableUtil;
import androidx.media3.common.util.Log;
import androidx.media3.common.util.Util;
import androidx.media3.session.MediaLibraryService.LibraryParams;
import androidx.media3.session.MediaLibraryService.MediaLibrarySession;
import androidx.media3.session.MediaSession.ControllerCb;
import androidx.media3.session.MediaSession.ControllerInfo;
import androidx.media3.session.SessionCommand.CommandCode;
import com.google.common.collect.ImmutableList;
import com.google.common.util.concurrent.ListenableFuture;
import com.google.common.util.concurrent.MoreExecutors;
import java.lang.ref.WeakReference;
import java.util.Collections;
import java.util.Deque;
import java.util.List;
import java.util.Set;
import java.util.concurrent.CancellationException;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ExecutionException;
/**
* Class that handles incoming commands from {@link MediaController} and {@link MediaBrowser} to
* both {@link MediaSession} and {@link MediaLibrarySession}.
*/
// We cannot create a subclass for library service specific function because AIDL doesn't support
// subclassing and it's generated stub class is an abstract class.
/* package */ final class MediaSessionStub extends IMediaSession.Stub {
private static final String TAG = "MediaSessionStub";
private final WeakReference<MediaSessionImpl> sessionImpl;
private final MediaSessionManager sessionManager;
private final ConnectedControllersManager<IBinder> connectedControllersManager;
private final Set<ControllerInfo> pendingControllers;
public MediaSessionStub(MediaSessionImpl sessionImpl) {
// Initialize members with params.
this.sessionImpl = new WeakReference<>(sessionImpl);
sessionManager = MediaSessionManager.getSessionManager(sessionImpl.getContext());
connectedControllersManager = new ConnectedControllersManager<>(sessionImpl);
pendingControllers = Collections.newSetFromMap(new ConcurrentHashMap<>());
}
public ConnectedControllersManager<IBinder> getConnectedControllersManager() {
return connectedControllersManager;
}
private static void sendSessionResult(
ControllerInfo controller, int seq, @SessionResult.Code int resultCode) {
sendSessionResult(controller, seq, new SessionResult(resultCode));
}
private static void sendSessionResult(ControllerInfo controller, int seq, SessionResult result) {
try {
checkStateNotNull(controller.getControllerCb()).onSessionResult(seq, result);
} catch (RemoteException e) {
Log.w(TAG, "Failed to send result to controller " + controller, e);
}
}
private static void sendSessionResultWhenReady(
ControllerInfo controller, int seq, ListenableFuture<SessionResult> future) {
future.addListener(
() -> {
SessionResult result;
try {
result = checkNotNull(future.get(), "SessionResult must not be null");
} catch (CancellationException unused) {
result = new SessionResult(SessionResult.RESULT_INFO_SKIPPED);
} catch (ExecutionException | InterruptedException unused) {
result = new SessionResult(SessionResult.RESULT_ERROR_UNKNOWN);
}
sendSessionResult(controller, seq, result);
},
MoreExecutors.directExecutor());
}
private static void sendLibraryResult(
ControllerInfo controller, int seq, LibraryResult<?> result) {
try {
checkStateNotNull(controller.getControllerCb()).onLibraryResult(seq, result);
} catch (RemoteException e) {
Log.w(TAG, "Failed to send result to browser " + controller, e);
}
}
private static <V> void sendLibraryResultWhenReady(
ControllerInfo controller, int seq, ListenableFuture<LibraryResult<V>> future) {
future.addListener(
() -> {
LibraryResult<V> result;
try {
result = checkNotNull(future.get(), "LibraryResult must not be null");
} catch (CancellationException unused) {
result = LibraryResult.ofError(LibraryResult.RESULT_INFO_SKIPPED);
} catch (ExecutionException | InterruptedException unused) {
result = LibraryResult.ofError(LibraryResult.RESULT_ERROR_UNKNOWN);
}
sendLibraryResult(controller, seq, result);
},
MoreExecutors.directExecutor());
}
private <T, K extends MediaSessionImpl> void dispatchSessionTaskWithPlayerCommand(
IMediaController caller,
int seq,
@Player.Command int command,
SessionTask<T, K> task,
PostSessionTask<T> postTask) {
long token = Binder.clearCallingIdentity();
try {
@SuppressWarnings({"unchecked", "cast.unsafe"})
@Nullable
K sessionImpl = (K) this.sessionImpl.get();
if (sessionImpl == null || sessionImpl.isReleased()) {
return;
}
@Nullable
ControllerInfo controller = connectedControllersManager.getController(caller.asBinder());
if (controller == null) {
return;
}
if (command == COMMAND_SET_VIDEO_SURFACE) {
postOrRun(
sessionImpl.getApplicationHandler(),
getSessionTaskWithPlayerCommandRunnable(
controller, seq, command, sessionImpl, task, postTask));
} else {
connectedControllersManager.addToCommandQueue(
controller,
getSessionTaskWithPlayerCommandRunnable(
controller, seq, command, sessionImpl, task, postTask));
}
} finally {
Binder.restoreCallingIdentity(token);
}
}
private <T, K extends MediaSessionImpl> Runnable getSessionTaskWithPlayerCommandRunnable(
ControllerInfo controller,
int seq,
@Player.Command int command,
K sessionImpl,
SessionTask<T, K> task,
PostSessionTask<T> postTask) {
return () -> {
if (!connectedControllersManager.isPlayerCommandAvailable(controller, command)) {
sendSessionResult(
controller, seq, new SessionResult(SessionResult.RESULT_ERROR_PERMISSION_DENIED));
return;
}
@SessionResult.Code
int resultCode = sessionImpl.onPlayerCommandRequestOnHandler(controller, command);
if (resultCode != SessionResult.RESULT_SUCCESS) {
// Don't run rejected command.
sendSessionResult(controller, seq, new SessionResult(resultCode));
return;
}
T result = task.run(sessionImpl, controller);
postTask.run(controller, seq, result);
};
}
private <T> void dispatchSessionTaskWithLibrarySessionCommand(
IMediaController caller,
int seq,
@CommandCode int commandCode,
SessionTask<T, MediaLibrarySessionImpl> task,
PostSessionTask<T> postTask) {
dispatchSessionTaskWithSessionCommandInternal(
caller, seq, /* sessionCommand= */ null, commandCode, task, postTask);
}
private <T, K extends MediaSessionImpl> void dispatchSessionTaskWithSessionCommand(
IMediaController caller,
int seq,
@CommandCode int commandCode,
SessionTask<T, K> task,
PostSessionTask<T> postTask) {
dispatchSessionTaskWithSessionCommandInternal(
caller, seq, /* sessionCommand= */ null, commandCode, task, postTask);
}
private <T, K extends MediaSessionImpl> void dispatchSessionTaskWithSessionCommand(
IMediaController caller,
int seq,
SessionCommand sessionCommand,
SessionTask<T, K> task,
PostSessionTask<T> postTask) {
dispatchSessionTaskWithSessionCommandInternal(
caller, seq, sessionCommand, COMMAND_CODE_CUSTOM, task, postTask);
}
private <T, K extends MediaSessionImpl> void dispatchSessionTaskWithSessionCommandInternal(
IMediaController caller,
int seq,
@Nullable SessionCommand sessionCommand,
@CommandCode int commandCode,
SessionTask<T, K> task,
PostSessionTask<T> postTask) {
long token = Binder.clearCallingIdentity();
try {
@SuppressWarnings({"unchecked", "cast.unsafe"})
@Nullable
K sessionImpl = (K) this.sessionImpl.get();
if (sessionImpl == null || sessionImpl.isReleased()) {
return;
}
@Nullable
ControllerInfo controller = connectedControllersManager.getController(caller.asBinder());
if (controller == null) {
return;
}
postOrRun(
sessionImpl.getApplicationHandler(),
() -> {
if (!connectedControllersManager.isConnected(controller)) {
return;
}
if (sessionCommand != null) {
if (!connectedControllersManager.isSessionCommandAvailable(
controller, sessionCommand)) {
sendSessionResult(
controller,
seq,
new SessionResult(SessionResult.RESULT_ERROR_PERMISSION_DENIED));
return;
}
} else {
if (!connectedControllersManager.isSessionCommandAvailable(controller, commandCode)) {
sendSessionResult(
controller,
seq,
new SessionResult(SessionResult.RESULT_ERROR_PERMISSION_DENIED));
return;
}
}
T result = task.run(sessionImpl, controller);
postTask.run(controller, seq, result);
});
} finally {
Binder.restoreCallingIdentity(token);
}
}
public void connect(
IMediaController caller,
int controllerVersion,
String callingPackage,
int pid,
int uid,
Bundle connectionHints) {
MediaSessionManager.RemoteUserInfo remoteUserInfo =
new MediaSessionManager.RemoteUserInfo(callingPackage, pid, uid);
ControllerInfo controllerInfo =
new ControllerInfo(
remoteUserInfo,
controllerVersion,
sessionManager.isTrustedForMediaControl(remoteUserInfo),
new Controller2Cb(caller),
connectionHints);
@Nullable MediaSessionImpl sessionImpl = this.sessionImpl.get();
if (sessionImpl == null || sessionImpl.isReleased()) {
try {
caller.onDisconnected(/* seq= */ 0);
} catch (RemoteException e) {
// Controller may be died prematurely.
// Not an issue because we'll ignore it anyway.
}
return;
}
pendingControllers.add(controllerInfo);
postOrRun(
sessionImpl.getApplicationHandler(),
() -> {
boolean connected = false;
try {
pendingControllers.remove(controllerInfo);
if (sessionImpl.isReleased()) {
return;
}
IBinder callbackBinder =
checkStateNotNull((Controller2Cb) controllerInfo.getControllerCb())
.getCallbackBinder();
MediaSession.ConnectionResult connectionResult =
sessionImpl.onConnectOnHandler(controllerInfo);
// Don't reject connection for the request from trusted app.
// Otherwise server will fail to retrieve session's information to dispatch
// media keys to.
if (!connectionResult.isAccepted && !controllerInfo.isTrusted()) {
return;
}
if (!connectionResult.isAccepted) {
// For the accepted controller, send non-null allowed commands to keep connection.
connectionResult =
MediaSession.ConnectionResult.accept(
SessionCommands.EMPTY, Player.Commands.EMPTY);
}
SequencedFutureManager sequencedFutureManager;
if (connectedControllersManager.isConnected(controllerInfo)) {
Log.w(
TAG,
"Controller "
+ controllerInfo
+ " has sent connection"
+ " request multiple times");
}
connectedControllersManager.addController(
callbackBinder,
controllerInfo,
connectionResult.availableSessionCommands,
connectionResult.availablePlayerCommands);
sequencedFutureManager =
checkStateNotNull(
connectedControllersManager.getSequencedFutureManager(controllerInfo));
// If connection is accepted, notify the current state to the controller.
// It's needed because we cannot call synchronous calls between
// session/controller.
PlayerWrapper playerWrapper = sessionImpl.getPlayerWrapper();
PlayerInfo playerInfo = playerWrapper.createPlayerInfoForBundling();
ConnectionState state =
new ConnectionState(
MediaLibraryInfo.VERSION_INT,
MediaSessionStub.this,
sessionImpl.getSessionActivity(),
connectionResult.availableSessionCommands,
connectionResult.availablePlayerCommands,
playerWrapper.getAvailableCommands(),
sessionImpl.getToken().getExtras(),
playerInfo);
// Double check if session is still there, because release() can be called in
// another thread.
if (sessionImpl.isReleased()) {
return;
}
try {
caller.onConnected(
sequencedFutureManager.obtainNextSequenceNumber(), state.toBundle());
connected = true;
} catch (RemoteException e) {
// Controller may be died prematurely.
}
sessionImpl.onPostConnectOnHandler(controllerInfo);
} finally {
if (!connected) {
try {
caller.onDisconnected(/* seq= */ 0);
} catch (RemoteException e) {
// Controller may be died prematurely.
// Not an issue because we'll ignore it anyway.
}
}
}
});
}
public void release() {
List<ControllerInfo> controllers = connectedControllersManager.getConnectedControllers();
for (ControllerInfo controller : controllers) {
ControllerCb cb = controller.getControllerCb();
if (cb != null) {
try {
cb.onDisconnected(/* seq= */ 0);
} catch (RemoteException e) {
// Ignore. We're releasing.
}
}
}
for (ControllerInfo controller : pendingControllers) {
ControllerCb cb = controller.getControllerCb();
if (cb != null) {
try {
cb.onDisconnected(/* seq= */ 0);
} catch (RemoteException e) {
// Ignore. We're releasing.
}
}
}
}
//////////////////////////////////////////////////////////////////////////////////////////////
// AIDL methods for session overrides
//////////////////////////////////////////////////////////////////////////////////////////////
@Override
public void connect(
@Nullable IMediaController caller, int seq, @Nullable Bundle connectionRequestBundle)
throws RuntimeException {
if (caller == null || connectionRequestBundle == null) {
return;
}
ConnectionRequest request;
try {
request = ConnectionRequest.CREATOR.fromBundle(connectionRequestBundle);
} catch (RuntimeException e) {
Log.w(TAG, "Ignoring malformed Bundle for ConnectionRequest", e);
return;
}
int uid = Binder.getCallingUid();
int callingPid = Binder.getCallingPid();
long token = Binder.clearCallingIdentity();
// Binder.getCallingPid() can be 0 for an oneway call from the remote process.
// If it's the case, use PID from the ConnectionRequest.
int pid = (callingPid != 0) ? callingPid : request.pid;
try {
connect(caller, request.version, request.packageName, pid, uid, request.connectionHints);
} finally {
Binder.restoreCallingIdentity(token);
}
}
@Override
public void stop(@Nullable IMediaController caller, int seq) throws RemoteException {
if (caller == null) {
return;
}
dispatchSessionTaskWithPlayerCommand(
caller,
seq,
COMMAND_STOP,
(sessionImpl, controller) -> {
sessionImpl.getPlayerWrapper().stop();
return SessionResult.RESULT_SUCCESS;
},
MediaSessionStub::sendSessionResult);
}
@Override
public void release(@Nullable IMediaController caller, int seq) throws RemoteException {
if (caller == null) {
return;
}
long token = Binder.clearCallingIdentity();
try {
connectedControllersManager.removeController(caller.asBinder());
} finally {
Binder.restoreCallingIdentity(token);
}
}
@Override
public void onControllerResult(
@Nullable IMediaController caller, int seq, @Nullable Bundle sessionResultBundle) {
if (caller == null || sessionResultBundle == null) {
return;
}
SessionResult result;
try {
result = SessionResult.CREATOR.fromBundle(sessionResultBundle);
} catch (RuntimeException e) {
Log.w(TAG, "Ignoring malformed Bundle for SessionResult", e);
return;
}
long token = Binder.clearCallingIdentity();
try {
@Nullable
SequencedFutureManager manager =
connectedControllersManager.getSequencedFutureManager(caller.asBinder());
if (manager == null) {
return;
}
manager.setFutureResult(seq, result);
} finally {
Binder.restoreCallingIdentity(token);
}
}
@Override
public void play(@Nullable IMediaController caller, int seq) throws RuntimeException {
if (caller == null) {
return;
}
dispatchSessionTaskWithPlayerCommand(
caller,
seq,
COMMAND_PLAY_PAUSE,
(sessionImpl, controller) -> {
sessionImpl.getPlayerWrapper().play();
return SessionResult.RESULT_SUCCESS;
},
MediaSessionStub::sendSessionResult);
}
@Override
public void pause(@Nullable IMediaController caller, int seq) throws RuntimeException {
if (caller == null) {
return;
}
dispatchSessionTaskWithPlayerCommand(
caller,
seq,
COMMAND_PLAY_PAUSE,
(sessionImpl, controller) -> {
sessionImpl.getPlayerWrapper().pause();
return SessionResult.RESULT_SUCCESS;
},
MediaSessionStub::sendSessionResult);
}
@Override
public void prepare(@Nullable IMediaController caller, int seq) throws RuntimeException {
if (caller == null) {
return;
}
dispatchSessionTaskWithPlayerCommand(
caller,
seq,
COMMAND_PREPARE,
(sessionImpl, controller) -> {
sessionImpl.getPlayerWrapper().prepare();
return SessionResult.RESULT_SUCCESS;
},
MediaSessionStub::sendSessionResult);
}
@Override
public void seekToDefaultPosition(IMediaController caller, int seq) {
if (caller == null) {
return;
}
dispatchSessionTaskWithPlayerCommand(
caller,
seq,
COMMAND_SEEK_TO_DEFAULT_POSITION,
(sessionImpl, controller) -> {
sessionImpl.getPlayerWrapper().seekToDefaultPosition();
return SessionResult.RESULT_SUCCESS;
},
MediaSessionStub::sendSessionResult);
}
@Override
public void seekToDefaultPositionWithMediaItemIndex(
IMediaController caller, int seq, int mediaItemIndex) throws RemoteException {
if (caller == null) {
return;
}
dispatchSessionTaskWithPlayerCommand(
caller,
seq,
COMMAND_SEEK_TO_MEDIA_ITEM,
(sessionImpl, controller) -> {
sessionImpl.getPlayerWrapper().seekToDefaultPosition(mediaItemIndex);
return SessionResult.RESULT_SUCCESS;
},
MediaSessionStub::sendSessionResult);
}
@Override
public void seekTo(@Nullable IMediaController caller, int seq, long positionMs)
throws RuntimeException {
if (caller == null) {
return;
}
dispatchSessionTaskWithPlayerCommand(
caller,
seq,
COMMAND_SEEK_IN_CURRENT_MEDIA_ITEM,
(sessionImpl, controller) -> {
sessionImpl.getPlayerWrapper().seekTo(positionMs);
return SessionResult.RESULT_SUCCESS;
},
MediaSessionStub::sendSessionResult);
}
@Override
public void seekToWithMediaItemIndex(
IMediaController caller, int seq, int mediaItemIndex, long positionMs)
throws RemoteException {
if (caller == null) {
return;
}
dispatchSessionTaskWithPlayerCommand(
caller,
seq,
COMMAND_SEEK_TO_MEDIA_ITEM,
(sessionImpl, controller) -> {
sessionImpl.getPlayerWrapper().seekTo(mediaItemIndex, positionMs);
return SessionResult.RESULT_SUCCESS;
},
MediaSessionStub::sendSessionResult);
}
@Override
public void seekBack(IMediaController caller, int seq) {
if (caller == null) {
return;
}
dispatchSessionTaskWithPlayerCommand(
caller,
seq,
COMMAND_SEEK_BACK,
(sessionImpl, controller) -> {
sessionImpl.getPlayerWrapper().seekBack();
return SessionResult.RESULT_SUCCESS;
},
MediaSessionStub::sendSessionResult);
}
@Override
public void seekForward(IMediaController caller, int seq) {
if (caller == null) {
return;
}
dispatchSessionTaskWithPlayerCommand(
caller,
seq,
COMMAND_SEEK_FORWARD,
(sessionImpl, controller) -> {
sessionImpl.getPlayerWrapper().seekForward();
return SessionResult.RESULT_SUCCESS;
},
MediaSessionStub::sendSessionResult);
}
@Override
public void onCustomCommand(
@Nullable IMediaController caller,
int seq,
@Nullable Bundle commandBundle,
@Nullable Bundle args) {
if (caller == null || commandBundle == null || args == null) {
return;
}
SessionCommand command;
try {
command = SessionCommand.CREATOR.fromBundle(commandBundle);
} catch (RuntimeException e) {
Log.w(TAG, "Ignoring malformed Bundle for SessionCommand", e);
return;
}
dispatchSessionTaskWithSessionCommand(
caller,
seq,
command,
(sessionImpl, controller) ->
sessionImpl.onCustomCommandOnHandler(controller, command, args),
MediaSessionStub::sendSessionResultWhenReady);
}
@Override
public void setRatingWithMediaId(
@Nullable IMediaController caller, int seq, String mediaId, @Nullable Bundle ratingBundle) {
if (caller == null || ratingBundle == null) {
return;
}
if (TextUtils.isEmpty(mediaId)) {
Log.w(TAG, "setRatingWithMediaId(): Ignoring empty mediaId");
return;
}
Rating rating;
try {
rating = Rating.CREATOR.fromBundle(ratingBundle);
} catch (RuntimeException e) {
Log.w(TAG, "Ignoring malformed Bundle for Rating", e);
return;
}
dispatchSessionTaskWithSessionCommand(
caller,
seq,
COMMAND_CODE_SESSION_SET_RATING,
(sessionImpl, controller) -> sessionImpl.onSetRatingOnHandler(controller, mediaId, rating),
MediaSessionStub::sendSessionResultWhenReady);
}
@Override
public void setRating(@Nullable IMediaController caller, int seq, @Nullable Bundle ratingBundle) {
if (caller == null || ratingBundle == null) {
return;
}
Rating rating;
try {
rating = Rating.CREATOR.fromBundle(ratingBundle);
} catch (RuntimeException e) {
Log.w(TAG, "Ignoring malformed Bundle for Rating", e);
return;
}
dispatchSessionTaskWithSessionCommand(
caller,
seq,
COMMAND_CODE_SESSION_SET_RATING,
(sessionImpl, controller) -> sessionImpl.onSetRatingOnHandler(controller, rating),
MediaSessionStub::sendSessionResultWhenReady);
}
@Override
public void setPlaybackSpeed(@Nullable IMediaController caller, int seq, float speed) {
if (caller == null) {
return;
}
dispatchSessionTaskWithPlayerCommand(
caller,
seq,
COMMAND_SET_SPEED_AND_PITCH,
(sessionImpl, controller) -> {
sessionImpl.getPlayerWrapper().setPlaybackSpeed(speed);
return SessionResult.RESULT_SUCCESS;
},
MediaSessionStub::sendSessionResult);
}
@Override
public void setPlaybackParameters(
@Nullable IMediaController caller, int seq, Bundle playbackParametersBundle) {
if (caller == null || playbackParametersBundle == null) {
return;
}
PlaybackParameters playbackParameters =
PlaybackParameters.CREATOR.fromBundle(playbackParametersBundle);
dispatchSessionTaskWithPlayerCommand(
caller,
seq,
COMMAND_SET_SPEED_AND_PITCH,
(sessionImpl, controller) -> {
sessionImpl.getPlayerWrapper().setPlaybackParameters(playbackParameters);
return SessionResult.RESULT_SUCCESS;
},
MediaSessionStub::sendSessionResult);
}
@Override
public void setMediaItem(
@Nullable IMediaController caller, int seq, @Nullable Bundle mediaItemBundle) {
if (caller == null || mediaItemBundle == null) {
return;
}
MediaItem mediaItem;
try {
mediaItem = MediaItem.CREATOR.fromBundle(mediaItemBundle);
} catch (RuntimeException e) {
Log.w(TAG, "Ignoring malformed Bundle for MediaItem", e);
return;
}
dispatchSessionTaskWithPlayerCommand(
caller,
seq,
COMMAND_CHANGE_MEDIA_ITEMS,
(sessionImpl, controller) -> {
MediaItem mediaItemWithPlaybackProperties =
sessionImpl.fillInLocalConfiguration(controller, mediaItem);
sessionImpl.getPlayerWrapper().setMediaItem(mediaItemWithPlaybackProperties);
return SessionResult.RESULT_SUCCESS;
},
MediaSessionStub::sendSessionResult);
}
@Override
public void setMediaItemWithStartPosition(
@Nullable IMediaController caller,
int seq,
@Nullable Bundle mediaItemBundle,
long startPositionMs) {
if (caller == null || mediaItemBundle == null) {
return;
}
MediaItem mediaItem;
try {
mediaItem = MediaItem.CREATOR.fromBundle(mediaItemBundle);
} catch (RuntimeException e) {
Log.w(TAG, "Ignoring malformed Bundle for MediaItem", e);
return;
}
dispatchSessionTaskWithPlayerCommand(
caller,
seq,
COMMAND_CHANGE_MEDIA_ITEMS,
(sessionImpl, controller) -> {
MediaItem mediaItemWithPlaybackProperties =
sessionImpl.fillInLocalConfiguration(controller, mediaItem);
sessionImpl
.getPlayerWrapper()
.setMediaItem(mediaItemWithPlaybackProperties, startPositionMs);
return SessionResult.RESULT_SUCCESS;
},
MediaSessionStub::sendSessionResult);
}
@Override
public void setMediaItemWithResetPosition(
@Nullable IMediaController caller,
int seq,
@Nullable Bundle mediaItemBundle,
boolean resetPosition) {
if (caller == null || mediaItemBundle == null) {
return;
}
MediaItem mediaItem;
try {
mediaItem = MediaItem.CREATOR.fromBundle(mediaItemBundle);
} catch (RuntimeException e) {
Log.w(TAG, "Ignoring malformed Bundle for MediaItem", e);
return;
}
dispatchSessionTaskWithPlayerCommand(
caller,
seq,
COMMAND_CHANGE_MEDIA_ITEMS,
(sessionImpl, controller) -> {
MediaItem mediaItemWithPlaybackProperties =
sessionImpl.fillInLocalConfiguration(controller, mediaItem);
sessionImpl
.getPlayerWrapper()
.setMediaItem(mediaItemWithPlaybackProperties, resetPosition);
return SessionResult.RESULT_SUCCESS;
},
MediaSessionStub::sendSessionResult);
}
@Override
public void setMediaItems(
@Nullable IMediaController caller, int seq, @Nullable IBinder mediaItemsRetriever) {
if (caller == null || mediaItemsRetriever == null) {
return;
}
List<MediaItem> mediaItemList;
try {
mediaItemList =
BundleableUtil.fromBundleList(
MediaItem.CREATOR, BundleListRetriever.getList(mediaItemsRetriever));
} catch (RuntimeException e) {
Log.w(TAG, "Ignoring malformed Bundle for MediaItem", e);
return;
}
dispatchSessionTaskWithPlayerCommand(
caller,
seq,
COMMAND_CHANGE_MEDIA_ITEMS,
(sessionImpl, controller) -> {
ImmutableList.Builder<MediaItem> mediaItemWithPlaybackPropertiesListBuilder =
ImmutableList.builder();
for (MediaItem mediaItem : mediaItemList) {
MediaItem mediaItemWithPlaybackProperties =
sessionImpl.fillInLocalConfiguration(controller, mediaItem);
mediaItemWithPlaybackPropertiesListBuilder.add(mediaItemWithPlaybackProperties);
}
sessionImpl
.getPlayerWrapper()
.setMediaItems(mediaItemWithPlaybackPropertiesListBuilder.build());
return SessionResult.RESULT_SUCCESS;
},
MediaSessionStub::sendSessionResult);
}
@Override
public void setMediaItemsWithResetPosition(
@Nullable IMediaController caller,
int seq,
@Nullable IBinder mediaItemsRetriever,
boolean resetPosition) {
if (caller == null || mediaItemsRetriever == null) {
return;
}
List<MediaItem> mediaItemList;
try {
mediaItemList =
BundleableUtil.fromBundleList(
MediaItem.CREATOR, BundleListRetriever.getList(mediaItemsRetriever));
} catch (RuntimeException e) {
Log.w(TAG, "Ignoring malformed Bundle for MediaItem", e);
return;
}
dispatchSessionTaskWithPlayerCommand(
caller,
seq,
COMMAND_CHANGE_MEDIA_ITEMS,
(sessionImpl, controller) -> {
ImmutableList.Builder<MediaItem> mediaItemWithPlaybackPropertiesListBuilder =
ImmutableList.builder();
for (MediaItem mediaItem : mediaItemList) {
MediaItem mediaItemWithPlaybackProperties =
sessionImpl.fillInLocalConfiguration(controller, mediaItem);
mediaItemWithPlaybackPropertiesListBuilder.add(mediaItemWithPlaybackProperties);
}
sessionImpl
.getPlayerWrapper()
.setMediaItems(mediaItemWithPlaybackPropertiesListBuilder.build(), resetPosition);
return SessionResult.RESULT_SUCCESS;
},
MediaSessionStub::sendSessionResult);
}
@Override
public void setMediaItemsWithStartIndex(
@Nullable IMediaController caller,
int seq,
@Nullable IBinder mediaItemsRetriever,
int startIndex,
long startPositionMs) {
if (caller == null || mediaItemsRetriever == null) {
return;
}
List<MediaItem> mediaItemList;
try {
mediaItemList =
BundleableUtil.fromBundleList(
MediaItem.CREATOR, BundleListRetriever.getList(mediaItemsRetriever));
} catch (RuntimeException e) {
Log.w(TAG, "Ignoring malformed Bundle for MediaItem", e);
return;
}
dispatchSessionTaskWithPlayerCommand(
caller,
seq,
COMMAND_CHANGE_MEDIA_ITEMS,
(sessionImpl, controller) -> {
ImmutableList.Builder<MediaItem> mediaItemWithPlaybackPropertiesListBuilder =
ImmutableList.builder();
for (MediaItem mediaItem : mediaItemList) {
MediaItem mediaItemWithPlaybackProperties =
sessionImpl.fillInLocalConfiguration(controller, mediaItem);
mediaItemWithPlaybackPropertiesListBuilder.add(mediaItemWithPlaybackProperties);
}
sessionImpl
.getPlayerWrapper()
.setMediaItems(
mediaItemWithPlaybackPropertiesListBuilder.build(), startIndex, startPositionMs);
return SessionResult.RESULT_SUCCESS;
},
MediaSessionStub::sendSessionResult);
}
@Override
public void setMediaUri(
@Nullable IMediaController caller, int seq, @Nullable Uri uri, @Nullable Bundle extras) {
if (caller == null || uri == null || extras == null) {
return;
}
dispatchSessionTaskWithSessionCommand(
caller,
seq,
COMMAND_CODE_SESSION_SET_MEDIA_URI,
(sessionImpl, controller) ->
new SessionResult(sessionImpl.onSetMediaUriOnHandler(controller, uri, extras)),
MediaSessionStub::sendSessionResult);
}
@Override
public void setPlaylistMetadata(
@Nullable IMediaController caller, int seq, @Nullable Bundle playlistMetadataBundle) {
if (caller == null || playlistMetadataBundle == null) {
return;
}
MediaMetadata playlistMetadata;
try {
playlistMetadata = MediaMetadata.CREATOR.fromBundle(playlistMetadataBundle);
} catch (RuntimeException e) {
Log.w(TAG, "Ignoring malformed Bundle for MediaMetadata", e);
return;
}
dispatchSessionTaskWithPlayerCommand(
caller,
seq,
COMMAND_SET_MEDIA_ITEMS_METADATA,
(sessionImpl, controller) -> {
sessionImpl.getPlayerWrapper().setPlaylistMetadata(playlistMetadata);
return SessionResult.RESULT_SUCCESS;
},
MediaSessionStub::sendSessionResult);
}
@Override
public void addMediaItem(@Nullable IMediaController caller, int seq, Bundle mediaItemBundle) {
if (caller == null || mediaItemBundle == null) {
return;
}
MediaItem mediaItem;
try {
mediaItem = MediaItem.CREATOR.fromBundle(mediaItemBundle);
} catch (RuntimeException e) {
Log.w(TAG, "Ignoring malformed Bundle for MediaItem", e);
return;
}
dispatchSessionTaskWithPlayerCommand(
caller,
seq,
COMMAND_CHANGE_MEDIA_ITEMS,
(sessionImpl, controller) -> {
MediaItem mediaItemWithPlaybackProperties =
sessionImpl.fillInLocalConfiguration(controller, mediaItem);
sessionImpl.getPlayerWrapper().addMediaItem(mediaItemWithPlaybackProperties);
return SessionResult.RESULT_SUCCESS;
},
MediaSessionStub::sendSessionResult);
}
@Override
public void addMediaItemWithIndex(
@Nullable IMediaController caller, int seq, int index, Bundle mediaItemBundle) {
if (caller == null || mediaItemBundle == null) {
return;
}
MediaItem mediaItem;
try {
mediaItem = MediaItem.CREATOR.fromBundle(mediaItemBundle);
} catch (RuntimeException e) {
Log.w(TAG, "Ignoring malformed Bundle for MediaItem", e);
return;
}
dispatchSessionTaskWithPlayerCommand(
caller,
seq,
COMMAND_CHANGE_MEDIA_ITEMS,
(sessionImpl, controller) -> {
MediaItem mediaItemWithPlaybackProperties =
sessionImpl.fillInLocalConfiguration(controller, mediaItem);
sessionImpl.getPlayerWrapper().addMediaItem(index, mediaItemWithPlaybackProperties);
return SessionResult.RESULT_SUCCESS;
},
MediaSessionStub::sendSessionResult);
}
@Override
public void addMediaItems(
@Nullable IMediaController caller, int seq, @Nullable IBinder mediaItemsRetriever) {
if (caller == null || mediaItemsRetriever == null) {
return;
}
List<MediaItem> mediaItems;
try {
mediaItems =
BundleableUtil.fromBundleList(
MediaItem.CREATOR, BundleListRetriever.getList(mediaItemsRetriever));
} catch (RuntimeException e) {
Log.w(TAG, "Ignoring malformed Bundle for MediaItem", e);
return;
}
dispatchSessionTaskWithPlayerCommand(
caller,
seq,
COMMAND_CHANGE_MEDIA_ITEMS,
(sessionImpl, controller) -> {
ImmutableList.Builder<MediaItem> mediaItemsWithPlaybackPropertiesBuilder =
ImmutableList.builder();
for (MediaItem mediaItem : mediaItems) {
MediaItem mediaItemWithPlaybackProperties =
sessionImpl.fillInLocalConfiguration(controller, mediaItem);
mediaItemsWithPlaybackPropertiesBuilder.add(mediaItemWithPlaybackProperties);
}
sessionImpl
.getPlayerWrapper()
.addMediaItems(mediaItemsWithPlaybackPropertiesBuilder.build());
return SessionResult.RESULT_SUCCESS;
},
MediaSessionStub::sendSessionResult);
}
@Override
public void addMediaItemsWithIndex(
@Nullable IMediaController caller,
int seq,
int index,
@Nullable IBinder mediaItemsRetriever) {
if (caller == null || mediaItemsRetriever == null) {
return;
}
List<MediaItem> mediaItems;
try {
mediaItems =
BundleableUtil.fromBundleList(
MediaItem.CREATOR, BundleListRetriever.getList(mediaItemsRetriever));
} catch (RuntimeException e) {
Log.w(TAG, "Ignoring malformed Bundle for MediaItem", e);
return;
}
dispatchSessionTaskWithPlayerCommand(
caller,
seq,
COMMAND_CHANGE_MEDIA_ITEMS,
(sessionImpl, controller) -> {
ImmutableList.Builder<MediaItem> mediaItemsWithPlaybackPropertiesBuilder =
ImmutableList.builder();
for (MediaItem mediaItem : mediaItems) {
MediaItem mediaItemWithPlaybackProperties =
sessionImpl.fillInLocalConfiguration(controller, mediaItem);
mediaItemsWithPlaybackPropertiesBuilder.add(mediaItemWithPlaybackProperties);
}
sessionImpl
.getPlayerWrapper()
.addMediaItems(index, mediaItemsWithPlaybackPropertiesBuilder.build());
return SessionResult.RESULT_SUCCESS;
},
MediaSessionStub::sendSessionResult);
}
@Override
public void removeMediaItem(@Nullable IMediaController caller, int seq, int index) {
if (caller == null) {
return;
}
dispatchSessionTaskWithPlayerCommand(
caller,
seq,
COMMAND_CHANGE_MEDIA_ITEMS,
(sessionImpl, controller) -> {
sessionImpl.getPlayerWrapper().removeMediaItem(index);
return SessionResult.RESULT_SUCCESS;
},
MediaSessionStub::sendSessionResult);
}
@Override
public void removeMediaItems(
@Nullable IMediaController caller, int seq, int fromIndex, int toIndex) {
if (caller == null) {
return;
}
dispatchSessionTaskWithPlayerCommand(
caller,
seq,
COMMAND_CHANGE_MEDIA_ITEMS,
(sessionImpl, controller) -> {
sessionImpl.getPlayerWrapper().removeMediaItems(fromIndex, toIndex);
return SessionResult.RESULT_SUCCESS;
},
MediaSessionStub::sendSessionResult);
}
@Override
public void clearMediaItems(@Nullable IMediaController caller, int seq) {
if (caller == null) {
return;
}
dispatchSessionTaskWithPlayerCommand(
caller,
seq,
COMMAND_CHANGE_MEDIA_ITEMS,
(sessionImpl, controller) -> {
sessionImpl.getPlayerWrapper().clearMediaItems();
return SessionResult.RESULT_SUCCESS;
},
MediaSessionStub::sendSessionResult);
}
@Override
public void moveMediaItem(
@Nullable IMediaController caller, int seq, int currentIndex, int newIndex) {
if (caller == null) {
return;
}
dispatchSessionTaskWithPlayerCommand(
caller,
seq,
COMMAND_CHANGE_MEDIA_ITEMS,
(sessionImpl, controller) -> {
sessionImpl.getPlayerWrapper().moveMediaItem(currentIndex, newIndex);
return SessionResult.RESULT_SUCCESS;
},
MediaSessionStub::sendSessionResult);
}
@Override
public void moveMediaItems(
@Nullable IMediaController caller, int seq, int fromIndex, int toIndex, int newIndex) {
if (caller == null) {
return;
}
dispatchSessionTaskWithPlayerCommand(
caller,
seq,
COMMAND_CHANGE_MEDIA_ITEMS,
(sessionImpl, controller) -> {
sessionImpl.getPlayerWrapper().moveMediaItems(fromIndex, toIndex, newIndex);
return SessionResult.RESULT_SUCCESS;
},
MediaSessionStub::sendSessionResult);
}
@Override
public void seekToPreviousMediaItem(@Nullable IMediaController caller, int seq) {
if (caller == null) {
return;
}
dispatchSessionTaskWithPlayerCommand(
caller,
seq,
COMMAND_SEEK_TO_PREVIOUS_MEDIA_ITEM,
(sessionImpl, controller) -> {
sessionImpl.getPlayerWrapper().seekToPreviousMediaItem();
return SessionResult.RESULT_SUCCESS;
},
MediaSessionStub::sendSessionResult);
}
@Override
public void seekToNextMediaItem(@Nullable IMediaController caller, int seq) {
if (caller == null) {
return;
}
dispatchSessionTaskWithPlayerCommand(
caller,
seq,
COMMAND_SEEK_TO_NEXT_MEDIA_ITEM,
(sessionImpl, controller) -> {
sessionImpl.getPlayerWrapper().seekToNextMediaItem();
return SessionResult.RESULT_SUCCESS;
},
MediaSessionStub::sendSessionResult);
}
@Override
public void seekToPrevious(@Nullable IMediaController caller, int seq) {
if (caller == null) {
return;
}
dispatchSessionTaskWithPlayerCommand(
caller,
seq,
COMMAND_SEEK_TO_PREVIOUS,
(sessionImpl, controller) -> {
sessionImpl.getPlayerWrapper().seekToPrevious();
return SessionResult.RESULT_SUCCESS;
},
MediaSessionStub::sendSessionResult);
}
@Override
public void seekToNext(@Nullable IMediaController caller, int seq) {
if (caller == null) {
return;
}
dispatchSessionTaskWithPlayerCommand(
caller,
seq,
COMMAND_SEEK_TO_NEXT,
(sessionImpl, controller) -> {
sessionImpl.getPlayerWrapper().seekToNext();
return SessionResult.RESULT_SUCCESS;
},
MediaSessionStub::sendSessionResult);
}
@Override
public void setRepeatMode(
@Nullable IMediaController caller, int seq, @Player.RepeatMode int repeatMode) {
if (caller == null) {
return;
}
dispatchSessionTaskWithPlayerCommand(
caller,
seq,
COMMAND_SET_REPEAT_MODE,
(sessionImpl, controller) -> {
sessionImpl.getPlayerWrapper().setRepeatMode(repeatMode);
return SessionResult.RESULT_SUCCESS;
},
MediaSessionStub::sendSessionResult);
}
@Override
public void setShuffleModeEnabled(
@Nullable IMediaController caller, int seq, boolean shuffleModeEnabled) {
if (caller == null) {
return;
}
dispatchSessionTaskWithPlayerCommand(
caller,
seq,
COMMAND_SET_SHUFFLE_MODE,
(sessionImpl, controller) -> {
sessionImpl.getPlayerWrapper().setShuffleModeEnabled(shuffleModeEnabled);
return SessionResult.RESULT_SUCCESS;
},
MediaSessionStub::sendSessionResult);
}
@Override
public void setVideoSurface(
@Nullable IMediaController caller, int seq, @Nullable Surface surface) {
if (caller == null) {
return;
}
dispatchSessionTaskWithPlayerCommand(
caller,
seq,
COMMAND_SET_VIDEO_SURFACE,
(sessionImpl, controller) -> {
sessionImpl.getPlayerWrapper().setVideoSurface(surface);
return SessionResult.RESULT_SUCCESS;
},
MediaSessionStub::sendSessionResult);
}
@Override
public void setVolume(@Nullable IMediaController caller, int seq, float volume) {
if (caller == null) {
return;
}
dispatchSessionTaskWithPlayerCommand(
caller,
seq,
COMMAND_SET_VOLUME,
(sessionImpl, controller) -> {
sessionImpl.getPlayerWrapper().setVolume(volume);
return SessionResult.RESULT_SUCCESS;
},
MediaSessionStub::sendSessionResult);
}
@Override
public void setDeviceVolume(@Nullable IMediaController caller, int seq, int volume) {
if (caller == null) {
return;
}
dispatchSessionTaskWithPlayerCommand(
caller,
seq,
COMMAND_SET_DEVICE_VOLUME,
(sessionImpl, controller) -> {
sessionImpl.getPlayerWrapper().setDeviceVolume(volume);
return SessionResult.RESULT_SUCCESS;
},
MediaSessionStub::sendSessionResult);
}
@Override
public void increaseDeviceVolume(@Nullable IMediaController caller, int seq) {
if (caller == null) {
return;
}
dispatchSessionTaskWithPlayerCommand(
caller,
seq,
COMMAND_ADJUST_DEVICE_VOLUME,
(sessionImpl, controller) -> {
sessionImpl.getPlayerWrapper().increaseDeviceVolume();
return SessionResult.RESULT_SUCCESS;
},
MediaSessionStub::sendSessionResult);
}
@Override
public void decreaseDeviceVolume(@Nullable IMediaController caller, int seq) {
if (caller == null) {
return;
}
dispatchSessionTaskWithPlayerCommand(
caller,
seq,
COMMAND_ADJUST_DEVICE_VOLUME,
(sessionImpl, controller) -> {
sessionImpl.getPlayerWrapper().decreaseDeviceVolume();
return SessionResult.RESULT_SUCCESS;
},
MediaSessionStub::sendSessionResult);
}
@Override
public void setDeviceMuted(@Nullable IMediaController caller, int seq, boolean muted) {
if (caller == null) {
return;
}
dispatchSessionTaskWithPlayerCommand(
caller,
seq,
COMMAND_SET_DEVICE_VOLUME,
(sessionImpl, controller) -> {
sessionImpl.getPlayerWrapper().setDeviceMuted(muted);
return SessionResult.RESULT_SUCCESS;
},
MediaSessionStub::sendSessionResult);
}
@Override
public void setPlayWhenReady(@Nullable IMediaController caller, int seq, boolean playWhenReady) {
if (caller == null) {
return;
}
dispatchSessionTaskWithPlayerCommand(
caller,
seq,
COMMAND_PLAY_PAUSE,
(sessionImpl, controller) -> {
sessionImpl.getPlayerWrapper().setPlayWhenReady(playWhenReady);
return SessionResult.RESULT_SUCCESS;
},
MediaSessionStub::sendSessionResult);
}
@Override
public void flushCommandQueue(@Nullable IMediaController caller) {
if (caller == null) {
return;
}
long token = Binder.clearCallingIdentity();
try {
@Nullable MediaSessionImpl sessionImpl = this.sessionImpl.get();
if (sessionImpl == null || sessionImpl.isReleased()) {
return;
}
ControllerInfo controllerInfo = connectedControllersManager.getController(caller.asBinder());
if (controllerInfo != null) {
Deque<Runnable> queue = connectedControllersManager.getAndClearCommandQueue(controllerInfo);
postOrRun(
sessionImpl.getApplicationHandler(),
() -> {
while (!queue.isEmpty()) {
Runnable runnable = queue.poll();
if (runnable != null) {
runnable.run();
}
}
});
}
} finally {
Binder.restoreCallingIdentity(token);
}
}
@Override
public void setTrackSelectionParameters(
@Nullable IMediaController caller, int seq, Bundle trackSelectionParametersBundle)
throws RemoteException {
if (caller == null) {
return;
}
TrackSelectionParameters trackSelectionParameters;
try {
trackSelectionParameters =
TrackSelectionParameters.CREATOR.fromBundle(trackSelectionParametersBundle);
} catch (RuntimeException e) {
Log.w(TAG, "Ignoring malformed Bundle for TrackSelectionParameters", e);
return;
}
dispatchSessionTaskWithPlayerCommand(
caller,
seq,
COMMAND_SET_TRACK_SELECTION_PARAMETERS,
(sessionImpl, controller) -> {
sessionImpl.getPlayerWrapper().setTrackSelectionParameters(trackSelectionParameters);
return SessionResult.RESULT_SUCCESS;
},
MediaSessionStub::sendSessionResult);
}
//////////////////////////////////////////////////////////////////////////////////////////////
// AIDL methods for LibrarySession overrides
//////////////////////////////////////////////////////////////////////////////////////////////
@Override
public void getLibraryRoot(
@Nullable IMediaController caller, int seq, @Nullable Bundle libraryParamsBundle)
throws RuntimeException {
if (caller == null) {
return;
}
@Nullable
LibraryParams libraryParams =
BundleableUtil.fromNullableBundle(LibraryParams.CREATOR, libraryParamsBundle);
dispatchSessionTaskWithLibrarySessionCommand(
caller,
seq,
COMMAND_CODE_LIBRARY_GET_LIBRARY_ROOT,
(librarySessionImpl, controller) ->
librarySessionImpl.onGetLibraryRootOnHandler(controller, libraryParams),
MediaSessionStub::sendLibraryResultWhenReady);
}
@Override
public void getItem(@Nullable IMediaController caller, int seq, @Nullable String mediaId)
throws RuntimeException {
if (caller == null) {
return;
}
if (TextUtils.isEmpty(mediaId)) {
Log.w(TAG, "getItem(): Ignoring empty mediaId");
return;
}
dispatchSessionTaskWithLibrarySessionCommand(
caller,
seq,
COMMAND_CODE_LIBRARY_GET_ITEM,
(librarySessionImpl, controller) ->
librarySessionImpl.onGetItemOnHandler(controller, mediaId),
MediaSessionStub::sendLibraryResultWhenReady);
}
@Override
public void getChildren(
@Nullable IMediaController caller,
int seq,
String parentId,
int page,
int pageSize,
@Nullable Bundle libraryParamsBundle)
throws RuntimeException {
if (caller == null) {
return;
}
if (TextUtils.isEmpty(parentId)) {
Log.w(TAG, "getChildren(): Ignoring empty parentId");
return;
}
if (page < 0) {
Log.w(TAG, "getChildren(): Ignoring negative page");
return;
}
if (pageSize < 1) {
Log.w(TAG, "getChildren(): Ignoring pageSize less than 1");
return;
}
@Nullable
LibraryParams libraryParams =
BundleableUtil.fromNullableBundle(LibraryParams.CREATOR, libraryParamsBundle);
dispatchSessionTaskWithLibrarySessionCommand(
caller,
seq,
COMMAND_CODE_LIBRARY_GET_CHILDREN,
(librarySessionImpl, controller) ->
librarySessionImpl.onGetChildrenOnHandler(
controller, parentId, page, pageSize, libraryParams),
MediaSessionStub::sendLibraryResultWhenReady);
}
@Override
public void search(
@Nullable IMediaController caller,
int seq,
String query,
@Nullable Bundle libraryParamsBundle) {
if (caller == null) {
return;
}
if (TextUtils.isEmpty(query)) {
Log.w(TAG, "search(): Ignoring empty query");
return;
}
@Nullable
LibraryParams libraryParams =
BundleableUtil.fromNullableBundle(LibraryParams.CREATOR, libraryParamsBundle);
dispatchSessionTaskWithLibrarySessionCommand(
caller,
seq,
COMMAND_CODE_LIBRARY_SEARCH,
(librarySessionImpl, controller) ->
librarySessionImpl.onSearchOnHandler(controller, query, libraryParams),
MediaSessionStub::sendLibraryResultWhenReady);
}
@Override
public void getSearchResult(
@Nullable IMediaController caller,
int seq,
String query,
int page,
int pageSize,
@Nullable Bundle libraryParamsBundle) {
if (caller == null) {
return;
}
if (TextUtils.isEmpty(query)) {
Log.w(TAG, "getSearchResult(): Ignoring empty query");
return;
}
if (page < 0) {
Log.w(TAG, "getSearchResult(): Ignoring negative page");
return;
}
if (pageSize < 1) {
Log.w(TAG, "getSearchResult(): Ignoring pageSize less than 1");
return;
}
@Nullable
LibraryParams libraryParams =
BundleableUtil.fromNullableBundle(LibraryParams.CREATOR, libraryParamsBundle);
dispatchSessionTaskWithLibrarySessionCommand(
caller,
seq,
COMMAND_CODE_LIBRARY_GET_SEARCH_RESULT,
(librarySessionImpl, controller) ->
librarySessionImpl.onGetSearchResultOnHandler(
controller, query, page, pageSize, libraryParams),
MediaSessionStub::sendLibraryResultWhenReady);
}
@Override
public void subscribe(
@Nullable IMediaController caller,
int seq,
String parentId,
@Nullable Bundle libraryParamsBundle) {
if (caller == null) {
return;
}
if (TextUtils.isEmpty(parentId)) {
Log.w(TAG, "subscribe(): Ignoring empty parentId");
return;
}
@Nullable
LibraryParams libraryParams =
BundleableUtil.fromNullableBundle(LibraryParams.CREATOR, libraryParamsBundle);
dispatchSessionTaskWithLibrarySessionCommand(
caller,
seq,
COMMAND_CODE_LIBRARY_SUBSCRIBE,
(librarySessionImpl, controller) ->
librarySessionImpl.onSubscribeOnHandler(controller, parentId, libraryParams),
MediaSessionStub::sendLibraryResultWhenReady);
}
@Override
public void unsubscribe(@Nullable IMediaController caller, int seq, String parentId) {
if (caller == null) {
return;
}
if (TextUtils.isEmpty(parentId)) {
Log.w(TAG, "unsubscribe(): Ignoring empty parentId");
return;
}
dispatchSessionTaskWithLibrarySessionCommand(
caller,
seq,
COMMAND_CODE_LIBRARY_UNSUBSCRIBE,
(librarySessionImpl, controller) ->
librarySessionImpl.onUnsubscribeOnHandler(controller, parentId),
MediaSessionStub::sendLibraryResultWhenReady);
}
/** Common interface for code snippets to handle all incoming commands from the controller. */
private interface SessionTask<T, K extends MediaSessionImpl> {
T run(K sessionImpl, ControllerInfo controller);
}
private interface PostSessionTask<T> {
void run(ControllerInfo controller, int seq, T result);
}
/* package */ static final class Controller2Cb implements ControllerCb {
private final IMediaController iController;
public Controller2Cb(IMediaController callback) {
iController = callback;
}
public IBinder getCallbackBinder() {
return iController.asBinder();
}
@Override
public void onSessionResult(int seq, SessionResult result) throws RemoteException {
iController.onSessionResult(seq, result.toBundle());
}
@Override
public void onLibraryResult(int seq, LibraryResult<?> result) throws RemoteException {
iController.onLibraryResult(seq, result.toBundle());
}
@Override
public void onPlayerInfoChanged(
int seq,
PlayerInfo playerInfo,
boolean excludeMediaItems,
boolean excludeMediaItemsMetadata,
boolean excludeCues,
boolean excludeTimeline)
throws RemoteException {
iController.onPlayerInfoChanged(
seq,
playerInfo.toBundle(
excludeMediaItems, excludeMediaItemsMetadata, excludeCues, excludeTimeline),
/* isTimelineExcluded= */ excludeTimeline);
}
@Override
public void setCustomLayout(int seq, List<CommandButton> layout) throws RemoteException {
iController.onSetCustomLayout(seq, BundleableUtil.toBundleList(layout));
}
@Override
public void onAvailableCommandsChangedFromSession(
int seq, SessionCommands sessionCommands, Player.Commands playerCommands)
throws RemoteException {
iController.onAvailableCommandsChangedFromSession(
seq, sessionCommands.toBundle(), playerCommands.toBundle());
}
@Override
public void onAvailableCommandsChangedFromPlayer(int seq, Player.Commands availableCommands)
throws RemoteException {
iController.onAvailableCommandsChangedFromPlayer(seq, availableCommands.toBundle());
}
@Override
public void sendCustomCommand(int seq, SessionCommand command, Bundle args)
throws RemoteException {
iController.onCustomCommand(seq, command.toBundle(), args);
}
@SuppressWarnings("nullness:argument") // params can be null.
@Override
public void onChildrenChanged(
int seq, String parentId, int itemCount, @Nullable LibraryParams params)
throws RemoteException {
iController.onChildrenChanged(
seq, parentId, itemCount, BundleableUtil.toNullableBundle(params));
}
@SuppressWarnings("nullness:argument") // params can be null.
@Override
public void onSearchResultChanged(
int seq, String query, int itemCount, @Nullable LibraryParams params)
throws RemoteException {
iController.onSearchResultChanged(
seq, query, itemCount, BundleableUtil.toNullableBundle(params));
}
@Override
public void onDisconnected(int seq) throws RemoteException {
iController.onDisconnected(seq);
}
@Override
public void onPeriodicSessionPositionInfoChanged(
int seq, SessionPositionInfo sessionPositionInfo) throws RemoteException {
iController.onPeriodicSessionPositionInfoChanged(seq, sessionPositionInfo.toBundle());
}
@Override
public void onRenderedFirstFrame(int seq) throws RemoteException {
iController.onRenderedFirstFrame(seq);
}
@Override
public int hashCode() {
return ObjectsCompat.hash(getCallbackBinder());
}
@Override
public boolean equals(@Nullable Object obj) {
if (this == obj) {
return true;
}
if (obj == null || obj.getClass() != Controller2Cb.class) {
return false;
}
Controller2Cb other = (Controller2Cb) obj;
return Util.areEqual(getCallbackBinder(), other.getCallbackBinder());
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.test.recovery;
import akka.actor.ActorRef;
import akka.actor.ActorSystem;
import akka.actor.PoisonPill;
import akka.pattern.Patterns;
import org.apache.flink.api.common.functions.ReduceFunction;
import org.apache.flink.api.common.functions.RichMapFunction;
import org.apache.flink.api.common.restartstrategy.RestartStrategies;
import org.apache.flink.api.java.ExecutionEnvironment;
import org.apache.flink.api.java.io.LocalCollectionOutputFormat;
import org.apache.flink.configuration.ConfigConstants;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.configuration.TaskManagerOptions;
import org.apache.flink.runtime.messages.TaskManagerMessages;
import org.apache.flink.runtime.minicluster.LocalFlinkMiniCluster;
import org.apache.flink.test.util.TestEnvironment;
import org.apache.flink.util.TestLogger;
import org.junit.Test;
import scala.concurrent.Await;
import scala.concurrent.Future;
import scala.concurrent.duration.FiniteDuration;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import java.util.concurrent.atomic.AtomicReference;
import static org.junit.Assert.*;
/**
* This test verifies the behavior of the recovery in the case when a TaskManager
* fails (shut down) in the middle of a job execution.
*
* The test works with multiple in-process task managers. Initially, it starts a JobManager
* and two TaskManagers with 2 slots each. It submits a program with parallelism 4
* and waits until all tasks are brought up (coordination between the test and the tasks
* happens via shared blocking queues). It then starts another TaskManager, which is
* guaranteed to remain empty (all tasks are already deployed) and kills one of
* the original task managers. The recovery should restart the tasks on the new TaskManager.
*/
@SuppressWarnings("serial")
public class TaskManagerFailureRecoveryITCase extends TestLogger {
@Test
public void testRestartWithFailingTaskManager() {
final int PARALLELISM = 4;
LocalFlinkMiniCluster cluster = null;
ActorSystem additionalSystem = null;
try {
Configuration config = new Configuration();
config.setInteger(ConfigConstants.LOCAL_NUMBER_TASK_MANAGER, 2);
config.setInteger(ConfigConstants.TASK_MANAGER_NUM_TASK_SLOTS, PARALLELISM);
config.setLong(TaskManagerOptions.MANAGED_MEMORY_SIZE, 16L);
config.setString(ConfigConstants.AKKA_WATCH_HEARTBEAT_INTERVAL, "500 ms");
config.setString(ConfigConstants.AKKA_WATCH_HEARTBEAT_PAUSE, "20 s");
config.setInteger(ConfigConstants.AKKA_WATCH_THRESHOLD, 20);
cluster = new LocalFlinkMiniCluster(config, false);
cluster.start();
// for the result
List<Long> resultCollection = new ArrayList<Long>();
final ExecutionEnvironment env = new TestEnvironment(cluster, PARALLELISM, false);
env.setParallelism(PARALLELISM);
env.setRestartStrategy(RestartStrategies.fixedDelayRestart(1, 1000));
env.getConfig().disableSysoutLogging();
env.generateSequence(1, 10)
.map(new FailingMapper<Long>())
.reduce(new ReduceFunction<Long>() {
@Override
public Long reduce(Long value1, Long value2) {
return value1 + value2;
}
})
.output(new LocalCollectionOutputFormat<Long>(resultCollection));
// simple reference (atomic does not matter) to pass back an exception from the trigger thread
final AtomicReference<Throwable> ref = new AtomicReference<Throwable>();
// trigger the execution from a separate thread, so we are available to temper with the
// cluster during the execution
Thread trigger = new Thread("program trigger") {
@Override
public void run() {
try {
env.execute();
}
catch (Throwable t) {
ref.set(t);
}
}
};
trigger.setDaemon(true);
trigger.start();
// block until all the mappers are actually deployed
// the mappers in turn are waiting
for (int i = 0; i < PARALLELISM; i++) {
FailingMapper.TASK_TO_COORD_QUEUE.take();
}
// bring up one more task manager and wait for it to appear
{
additionalSystem = cluster.startTaskManagerActorSystem(2);
ActorRef additionalTaskManager = cluster.startTaskManager(2, additionalSystem);
Object message = TaskManagerMessages.getNotifyWhenRegisteredAtJobManagerMessage();
Future<Object> future = Patterns.ask(additionalTaskManager, message, 30000);
try {
Await.result(future, new FiniteDuration(30000, TimeUnit.MILLISECONDS));
}
catch (TimeoutException e) {
fail ("The additional TaskManager did not come up within 30 seconds");
}
}
// kill the two other TaskManagers
for (ActorRef tm : cluster.getTaskManagersAsJava()) {
tm.tell(PoisonPill.getInstance(), null);
}
// wait for the next set of mappers (the recovery ones) to come online
for (int i = 0; i < PARALLELISM; i++) {
FailingMapper.TASK_TO_COORD_QUEUE.take();
}
// tell the mappers that they may continue this time
for (int i = 0; i < PARALLELISM; i++) {
FailingMapper.COORD_TO_TASK_QUEUE.add(new Object());
}
// wait for the program to finish
trigger.join();
if (ref.get() != null) {
Throwable t = ref.get();
t.printStackTrace();
fail("Program execution caused an exception: " + t.getMessage());
}
}
catch (Exception e) {
e.printStackTrace();
fail(e.getMessage());
}
finally {
if (additionalSystem != null) {
additionalSystem.shutdown();
}
if (cluster != null) {
cluster.stop();
}
}
}
private static class FailingMapper<T> extends RichMapFunction<T, T> {
private static final long serialVersionUID = 4435412404173331157L;
private static final BlockingQueue<Object> TASK_TO_COORD_QUEUE = new LinkedBlockingQueue<Object>();
private static final BlockingQueue<Object> COORD_TO_TASK_QUEUE = new LinkedBlockingQueue<Object>();
@Override
public void open(Configuration parameters) throws Exception {
TASK_TO_COORD_QUEUE.add(new Object());
COORD_TO_TASK_QUEUE.take();
}
@Override
public T map(T value) throws Exception {
return value;
}
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.nested;
import org.apache.lucene.search.Explanation;
import org.elasticsearch.action.admin.cluster.health.ClusterHealthStatus;
import org.elasticsearch.action.admin.cluster.stats.ClusterStatsResponse;
import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse;
import org.elasticsearch.action.delete.DeleteResponse;
import org.elasticsearch.action.get.GetResponse;
import org.elasticsearch.action.index.IndexRequestBuilder;
import org.elasticsearch.action.search.SearchPhaseExecutionException;
import org.elasticsearch.action.search.SearchRequestBuilder;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.action.search.SearchType;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.index.query.support.QueryInnerHitBuilder;
import org.elasticsearch.script.Script;
import org.elasticsearch.search.sort.SortBuilders;
import org.elasticsearch.search.sort.SortOrder;
import org.elasticsearch.test.ElasticsearchIntegrationTest;
import org.junit.Assert;
import org.junit.Test;
import java.util.ArrayList;
import java.util.List;
import static org.elasticsearch.common.settings.Settings.settingsBuilder;
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
import static org.elasticsearch.index.query.QueryBuilders.*;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.*;
import static org.hamcrest.Matchers.*;
public class SimpleNestedIT extends ElasticsearchIntegrationTest {
@Test
public void simpleNested() throws Exception {
assertAcked(prepareCreate("test").addMapping("type1", "nested1", "type=nested").addMapping("type2", "nested1", "type=nested"));
ensureGreen();
// check on no data, see it works
SearchResponse searchResponse = client().prepareSearch("test").setQuery(termQuery("_all", "n_value1_1")).execute().actionGet();
assertThat(searchResponse.getHits().totalHits(), equalTo(0l));
searchResponse = client().prepareSearch("test").setQuery(termQuery("n_field1", "n_value1_1")).execute().actionGet();
assertThat(searchResponse.getHits().totalHits(), equalTo(0l));
client().prepareIndex("test", "type1", "1").setSource(jsonBuilder().startObject()
.field("field1", "value1")
.startArray("nested1")
.startObject()
.field("n_field1", "n_value1_1")
.field("n_field2", "n_value2_1")
.endObject()
.startObject()
.field("n_field1", "n_value1_2")
.field("n_field2", "n_value2_2")
.endObject()
.endArray()
.endObject()).execute().actionGet();
waitForRelocation(ClusterHealthStatus.GREEN);
// flush, so we fetch it from the index (as see that we filter nested docs)
flush();
GetResponse getResponse = client().prepareGet("test", "type1", "1").get();
assertThat(getResponse.isExists(), equalTo(true));
assertThat(getResponse.getSourceAsBytes(), notNullValue());
// check the numDocs
assertDocumentCount("test", 3);
// check that _all is working on nested docs
searchResponse = client().prepareSearch("test").setQuery(termQuery("_all", "n_value1_1")).execute().actionGet();
assertThat(searchResponse.getHits().totalHits(), equalTo(1l));
searchResponse = client().prepareSearch("test").setQuery(termQuery("n_field1", "n_value1_1")).execute().actionGet();
assertThat(searchResponse.getHits().totalHits(), equalTo(0l));
// search for something that matches the nested doc, and see that we don't find the nested doc
searchResponse = client().prepareSearch("test").setQuery(matchAllQuery()).get();
assertThat(searchResponse.getHits().totalHits(), equalTo(1l));
searchResponse = client().prepareSearch("test").setQuery(termQuery("n_field1", "n_value1_1")).get();
assertThat(searchResponse.getHits().totalHits(), equalTo(0l));
// now, do a nested query
searchResponse = client().prepareSearch("test").setQuery(nestedQuery("nested1", termQuery("nested1.n_field1", "n_value1_1"))).get();
assertNoFailures(searchResponse);
assertThat(searchResponse.getHits().totalHits(), equalTo(1l));
searchResponse = client().prepareSearch("test").setQuery(nestedQuery("nested1", termQuery("nested1.n_field1", "n_value1_1"))).setSearchType(SearchType.DFS_QUERY_THEN_FETCH).get();
assertNoFailures(searchResponse);
assertThat(searchResponse.getHits().totalHits(), equalTo(1l));
// add another doc, one that would match if it was not nested...
client().prepareIndex("test", "type1", "2").setSource(jsonBuilder().startObject()
.field("field1", "value1")
.startArray("nested1")
.startObject()
.field("n_field1", "n_value1_1")
.field("n_field2", "n_value2_2")
.endObject()
.startObject()
.field("n_field1", "n_value1_2")
.field("n_field2", "n_value2_1")
.endObject()
.endArray()
.endObject()).execute().actionGet();
waitForRelocation(ClusterHealthStatus.GREEN);
// flush, so we fetch it from the index (as see that we filter nested docs)
flush();
assertDocumentCount("test", 6);
searchResponse = client().prepareSearch("test").setQuery(nestedQuery("nested1",
boolQuery().must(termQuery("nested1.n_field1", "n_value1_1")).must(termQuery("nested1.n_field2", "n_value2_1")))).execute().actionGet();
assertNoFailures(searchResponse);
assertThat(searchResponse.getHits().totalHits(), equalTo(1l));
// filter
searchResponse = client().prepareSearch("test").setQuery(filteredQuery(matchAllQuery(), nestedQuery("nested1",
boolQuery().must(termQuery("nested1.n_field1", "n_value1_1")).must(termQuery("nested1.n_field2", "n_value2_1"))))).execute().actionGet();
assertNoFailures(searchResponse);
assertThat(searchResponse.getHits().totalHits(), equalTo(1l));
// check with type prefix
searchResponse = client().prepareSearch("test").setQuery(nestedQuery("nested1",
boolQuery().must(termQuery("nested1.n_field1", "n_value1_1")).must(termQuery("nested1.n_field2", "n_value2_1")))).execute().actionGet();
assertNoFailures(searchResponse);
assertThat(searchResponse.getHits().totalHits(), equalTo(1l));
// check delete, so all is gone...
DeleteResponse deleteResponse = client().prepareDelete("test", "type1", "2").execute().actionGet();
assertThat(deleteResponse.isFound(), equalTo(true));
// flush, so we fetch it from the index (as see that we filter nested docs)
flush();
assertDocumentCount("test", 3);
searchResponse = client().prepareSearch("test").setQuery(nestedQuery("nested1", termQuery("nested1.n_field1", "n_value1_1"))).execute().actionGet();
assertNoFailures(searchResponse);
assertThat(searchResponse.getHits().totalHits(), equalTo(1l));
searchResponse = client().prepareSearch("test").setTypes("type1", "type2").setQuery(nestedQuery("nested1", termQuery("nested1.n_field1", "n_value1_1"))).execute().actionGet();
assertNoFailures(searchResponse);
assertThat(searchResponse.getHits().totalHits(), equalTo(1l));
}
@Test
public void multiNested() throws Exception {
assertAcked(prepareCreate("test")
.addMapping("type1", jsonBuilder().startObject().startObject("type1").startObject("properties")
.startObject("nested1")
.field("type", "nested").startObject("properties")
.startObject("nested2").field("type", "nested").endObject()
.endObject().endObject()
.endObject().endObject().endObject()));
ensureGreen();
client().prepareIndex("test", "type1", "1").setSource(jsonBuilder()
.startObject()
.field("field", "value")
.startArray("nested1")
.startObject().field("field1", "1").startArray("nested2").startObject().field("field2", "2").endObject().startObject().field("field2", "3").endObject().endArray().endObject()
.startObject().field("field1", "4").startArray("nested2").startObject().field("field2", "5").endObject().startObject().field("field2", "6").endObject().endArray().endObject()
.endArray()
.endObject()).execute().actionGet();
// flush, so we fetch it from the index (as see that we filter nested docs)
flush();
GetResponse getResponse = client().prepareGet("test", "type1", "1").execute().actionGet();
assertThat(getResponse.isExists(), equalTo(true));
waitForRelocation(ClusterHealthStatus.GREEN);
// check the numDocs
assertDocumentCount("test", 7);
// do some multi nested queries
SearchResponse searchResponse = client().prepareSearch("test").setQuery(nestedQuery("nested1",
termQuery("nested1.field1", "1"))).execute().actionGet();
assertNoFailures(searchResponse);
assertThat(searchResponse.getHits().totalHits(), equalTo(1l));
searchResponse = client().prepareSearch("test").setQuery(nestedQuery("nested1.nested2",
termQuery("nested1.nested2.field2", "2"))).execute().actionGet();
assertNoFailures(searchResponse);
assertThat(searchResponse.getHits().totalHits(), equalTo(1l));
searchResponse = client().prepareSearch("test").setQuery(nestedQuery("nested1",
boolQuery().must(termQuery("nested1.field1", "1")).must(nestedQuery("nested1.nested2", termQuery("nested1.nested2.field2", "2"))))).execute().actionGet();
assertNoFailures(searchResponse);
assertThat(searchResponse.getHits().totalHits(), equalTo(1l));
searchResponse = client().prepareSearch("test").setQuery(nestedQuery("nested1",
boolQuery().must(termQuery("nested1.field1", "1")).must(nestedQuery("nested1.nested2", termQuery("nested1.nested2.field2", "3"))))).execute().actionGet();
assertNoFailures(searchResponse);
assertThat(searchResponse.getHits().totalHits(), equalTo(1l));
searchResponse = client().prepareSearch("test").setQuery(nestedQuery("nested1",
boolQuery().must(termQuery("nested1.field1", "1")).must(nestedQuery("nested1.nested2", termQuery("nested1.nested2.field2", "4"))))).execute().actionGet();
assertNoFailures(searchResponse);
assertThat(searchResponse.getHits().totalHits(), equalTo(0l));
searchResponse = client().prepareSearch("test").setQuery(nestedQuery("nested1",
boolQuery().must(termQuery("nested1.field1", "1")).must(nestedQuery("nested1.nested2", termQuery("nested1.nested2.field2", "5"))))).execute().actionGet();
assertNoFailures(searchResponse);
assertThat(searchResponse.getHits().totalHits(), equalTo(0l));
searchResponse = client().prepareSearch("test").setQuery(nestedQuery("nested1",
boolQuery().must(termQuery("nested1.field1", "4")).must(nestedQuery("nested1.nested2", termQuery("nested1.nested2.field2", "5"))))).execute().actionGet();
assertNoFailures(searchResponse);
assertThat(searchResponse.getHits().totalHits(), equalTo(1l));
searchResponse = client().prepareSearch("test").setQuery(nestedQuery("nested1",
boolQuery().must(termQuery("nested1.field1", "4")).must(nestedQuery("nested1.nested2", termQuery("nested1.nested2.field2", "2"))))).execute().actionGet();
assertNoFailures(searchResponse);
assertThat(searchResponse.getHits().totalHits(), equalTo(0l));
}
@Test
// When IncludeNestedDocsQuery is wrapped in a FilteredQuery then a in-finite loop occurs b/c of a bug in IncludeNestedDocsQuery#advance()
// This IncludeNestedDocsQuery also needs to be aware of the filter from alias
public void testDeleteNestedDocsWithAlias() throws Exception {
assertAcked(prepareCreate("test")
.setSettings(settingsBuilder().put(indexSettings()).put("index.referesh_interval", -1).build())
.addMapping("type1", jsonBuilder().startObject().startObject("type1").startObject("properties")
.startObject("field1")
.field("type", "string")
.endObject()
.startObject("nested1")
.field("type", "nested")
.endObject()
.endObject().endObject().endObject()));
client().admin().indices().prepareAliases()
.addAlias("test", "alias1", QueryBuilders.termQuery("field1", "value1")).execute().actionGet();
ensureGreen();
client().prepareIndex("test", "type1", "1").setSource(jsonBuilder().startObject()
.field("field1", "value1")
.startArray("nested1")
.startObject()
.field("n_field1", "n_value1_1")
.field("n_field2", "n_value2_1")
.endObject()
.startObject()
.field("n_field1", "n_value1_2")
.field("n_field2", "n_value2_2")
.endObject()
.endArray()
.endObject()).execute().actionGet();
client().prepareIndex("test", "type1", "2").setSource(jsonBuilder().startObject()
.field("field1", "value2")
.startArray("nested1")
.startObject()
.field("n_field1", "n_value1_1")
.field("n_field2", "n_value2_1")
.endObject()
.startObject()
.field("n_field1", "n_value1_2")
.field("n_field2", "n_value2_2")
.endObject()
.endArray()
.endObject()).execute().actionGet();
flush();
refresh();
assertDocumentCount("test", 6);
}
@Test
public void testExplain() throws Exception {
assertAcked(prepareCreate("test")
.addMapping("type1", jsonBuilder().startObject().startObject("type1").startObject("properties")
.startObject("nested1")
.field("type", "nested")
.endObject()
.endObject().endObject().endObject()));
ensureGreen();
client().prepareIndex("test", "type1", "1").setSource(jsonBuilder().startObject()
.field("field1", "value1")
.startArray("nested1")
.startObject()
.field("n_field1", "n_value1")
.endObject()
.startObject()
.field("n_field1", "n_value1")
.endObject()
.endArray()
.endObject())
.setRefresh(true)
.execute().actionGet();
SearchResponse searchResponse = client().prepareSearch("test")
.setQuery(nestedQuery("nested1", termQuery("nested1.n_field1", "n_value1")).scoreMode("total"))
.setExplain(true)
.execute().actionGet();
assertNoFailures(searchResponse);
assertThat(searchResponse.getHits().totalHits(), equalTo(1l));
Explanation explanation = searchResponse.getHits().hits()[0].explanation();
assertThat(explanation.getValue(), equalTo(2f));
assertThat(explanation.toString(), startsWith("2.0 = sum of:\n 2.0 = Score based on child doc range from 0 to 1\n"));
// TODO: Enable when changes from BlockJoinQuery#explain are added to Lucene (Most likely version 4.2)
// assertThat(explanation.getDetails().length, equalTo(2));
// assertThat(explanation.getDetails()[0].getValue(), equalTo(1f));
// assertThat(explanation.getDetails()[0].getDescription(), equalTo("Child[0]"));
// assertThat(explanation.getDetails()[1].getValue(), equalTo(1f));
// assertThat(explanation.getDetails()[1].getDescription(), equalTo("Child[1]"));
}
@Test
public void testSimpleNestedSorting() throws Exception {
assertAcked(prepareCreate("test")
.setSettings(settingsBuilder()
.put(indexSettings())
.put("index.refresh_interval", -1))
.addMapping("type1", jsonBuilder().startObject().startObject("type1").startObject("properties")
.startObject("nested1")
.field("type", "nested")
.startObject("properties")
.startObject("field1")
.field("type", "long")
.field("store", "yes")
.endObject()
.endObject()
.endObject()
.endObject().endObject().endObject()));
ensureGreen();
client().prepareIndex("test", "type1", "1").setSource(jsonBuilder().startObject()
.field("field1", 1)
.startArray("nested1")
.startObject()
.field("field1", 5)
.endObject()
.startObject()
.field("field1", 4)
.endObject()
.endArray()
.endObject()).execute().actionGet();
client().prepareIndex("test", "type1", "2").setSource(jsonBuilder().startObject()
.field("field1", 2)
.startArray("nested1")
.startObject()
.field("field1", 1)
.endObject()
.startObject()
.field("field1", 2)
.endObject()
.endArray()
.endObject()).execute().actionGet();
client().prepareIndex("test", "type1", "3").setSource(jsonBuilder().startObject()
.field("field1", 3)
.startArray("nested1")
.startObject()
.field("field1", 3)
.endObject()
.startObject()
.field("field1", 4)
.endObject()
.endArray()
.endObject()).execute().actionGet();
refresh();
SearchResponse searchResponse = client().prepareSearch("test")
.setTypes("type1")
.setQuery(QueryBuilders.matchAllQuery())
.addSort(SortBuilders.fieldSort("nested1.field1").order(SortOrder.ASC))
.execute().actionGet();
assertHitCount(searchResponse, 3);
assertThat(searchResponse.getHits().hits()[0].id(), equalTo("2"));
assertThat(searchResponse.getHits().hits()[0].sortValues()[0].toString(), equalTo("1"));
assertThat(searchResponse.getHits().hits()[1].id(), equalTo("3"));
assertThat(searchResponse.getHits().hits()[1].sortValues()[0].toString(), equalTo("3"));
assertThat(searchResponse.getHits().hits()[2].id(), equalTo("1"));
assertThat(searchResponse.getHits().hits()[2].sortValues()[0].toString(), equalTo("4"));
searchResponse = client().prepareSearch("test")
.setTypes("type1")
.setQuery(QueryBuilders.matchAllQuery())
.addSort(SortBuilders.fieldSort("nested1.field1").order(SortOrder.DESC))
.execute().actionGet();
assertHitCount(searchResponse, 3);
assertThat(searchResponse.getHits().hits()[0].id(), equalTo("1"));
assertThat(searchResponse.getHits().hits()[0].sortValues()[0].toString(), equalTo("5"));
assertThat(searchResponse.getHits().hits()[1].id(), equalTo("3"));
assertThat(searchResponse.getHits().hits()[1].sortValues()[0].toString(), equalTo("4"));
assertThat(searchResponse.getHits().hits()[2].id(), equalTo("2"));
assertThat(searchResponse.getHits().hits()[2].sortValues()[0].toString(), equalTo("2"));
searchResponse = client().prepareSearch("test")
.setTypes("type1")
.setQuery(QueryBuilders.matchAllQuery())
.addSort(
SortBuilders.scriptSort(new Script("_fields['nested1.field1'].value + 1"), "number").setNestedPath("nested1")
.order(SortOrder.DESC)).execute().actionGet();
assertHitCount(searchResponse, 3);
assertThat(searchResponse.getHits().hits()[0].id(), equalTo("1"));
assertThat(searchResponse.getHits().hits()[0].sortValues()[0].toString(), equalTo("6.0"));
assertThat(searchResponse.getHits().hits()[1].id(), equalTo("3"));
assertThat(searchResponse.getHits().hits()[1].sortValues()[0].toString(), equalTo("5.0"));
assertThat(searchResponse.getHits().hits()[2].id(), equalTo("2"));
assertThat(searchResponse.getHits().hits()[2].sortValues()[0].toString(), equalTo("3.0"));
searchResponse = client()
.prepareSearch("test")
.setTypes("type1")
.setQuery(QueryBuilders.matchAllQuery())
.addSort(
SortBuilders.scriptSort(new Script("_fields['nested1.field1'].value + 1"), "number").setNestedPath("nested1")
.sortMode("sum").order(SortOrder.DESC)).execute().actionGet();
// B/c of sum it is actually +2
assertHitCount(searchResponse, 3);
assertThat(searchResponse.getHits().hits()[0].id(), equalTo("1"));
assertThat(searchResponse.getHits().hits()[0].sortValues()[0].toString(), equalTo("11.0"));
assertThat(searchResponse.getHits().hits()[1].id(), equalTo("3"));
assertThat(searchResponse.getHits().hits()[1].sortValues()[0].toString(), equalTo("9.0"));
assertThat(searchResponse.getHits().hits()[2].id(), equalTo("2"));
assertThat(searchResponse.getHits().hits()[2].sortValues()[0].toString(), equalTo("5.0"));
searchResponse = client()
.prepareSearch("test")
.setTypes("type1")
.setQuery(QueryBuilders.matchAllQuery())
.addSort(
SortBuilders.scriptSort(new Script("_fields['nested1.field1'].value"), "number")
.setNestedFilter(rangeQuery("nested1.field1").from(1).to(3)).setNestedPath("nested1").sortMode("avg")
.order(SortOrder.DESC)).execute().actionGet();
assertHitCount(searchResponse, 3);
assertThat(searchResponse.getHits().hits()[0].id(), equalTo("1"));
assertThat(searchResponse.getHits().hits()[0].sortValues()[0].toString(), equalTo(Double.toString(Double.MAX_VALUE)));
assertThat(searchResponse.getHits().hits()[1].id(), equalTo("3"));
assertThat(searchResponse.getHits().hits()[1].sortValues()[0].toString(), equalTo("3.0"));
assertThat(searchResponse.getHits().hits()[2].id(), equalTo("2"));
assertThat(searchResponse.getHits().hits()[2].sortValues()[0].toString(), equalTo("1.5"));
searchResponse = client()
.prepareSearch("test")
.setTypes("type1")
.setQuery(QueryBuilders.matchAllQuery())
.addSort(
SortBuilders.scriptSort(new Script("_fields['nested1.field1'].value"), "string").setNestedPath("nested1")
.order(SortOrder.DESC)).execute().actionGet();
assertHitCount(searchResponse, 3);
assertThat(searchResponse.getHits().hits()[0].id(), equalTo("1"));
assertThat(searchResponse.getHits().hits()[0].sortValues()[0].toString(), equalTo("5"));
assertThat(searchResponse.getHits().hits()[1].id(), equalTo("3"));
assertThat(searchResponse.getHits().hits()[1].sortValues()[0].toString(), equalTo("4"));
assertThat(searchResponse.getHits().hits()[2].id(), equalTo("2"));
assertThat(searchResponse.getHits().hits()[2].sortValues()[0].toString(), equalTo("2"));
searchResponse = client()
.prepareSearch("test")
.setTypes("type1")
.setQuery(QueryBuilders.matchAllQuery())
.addSort(
SortBuilders.scriptSort(new Script("_fields['nested1.field1'].value"), "string").setNestedPath("nested1")
.order(SortOrder.ASC)).execute().actionGet();
assertHitCount(searchResponse, 3);
assertThat(searchResponse.getHits().hits()[0].id(), equalTo("2"));
assertThat(searchResponse.getHits().hits()[0].sortValues()[0].toString(), equalTo("1"));
assertThat(searchResponse.getHits().hits()[1].id(), equalTo("3"));
assertThat(searchResponse.getHits().hits()[1].sortValues()[0].toString(), equalTo("3"));
assertThat(searchResponse.getHits().hits()[2].id(), equalTo("1"));
assertThat(searchResponse.getHits().hits()[2].sortValues()[0].toString(), equalTo("4"));
try {
client().prepareSearch("test")
.setTypes("type1")
.setQuery(QueryBuilders.matchAllQuery())
.addSort(
SortBuilders.scriptSort(new Script("_fields['nested1.field1'].value"), "string").setNestedPath("nested1")
.sortMode("sum").order(SortOrder.ASC)).execute().actionGet();
Assert.fail("SearchPhaseExecutionException should have been thrown");
} catch (SearchPhaseExecutionException e) {
assertThat(e.toString(), containsString("type [string] doesn't support mode [SUM]"));
}
}
@Test
public void testSimpleNestedSorting_withNestedFilterMissing() throws Exception {
assertAcked(prepareCreate("test")
.setSettings(settingsBuilder()
.put(indexSettings())
.put("index.referesh_interval", -1))
.addMapping("type1", jsonBuilder().startObject().startObject("type1").startObject("properties")
.startObject("nested1")
.field("type", "nested")
.startObject("properties")
.startObject("field1")
.field("type", "long")
.endObject()
.startObject("field2")
.field("type", "boolean")
.endObject()
.endObject()
.endObject()
.endObject().endObject().endObject()));
ensureGreen();
client().prepareIndex("test", "type1", "1").setSource(jsonBuilder().startObject()
.field("field1", 1)
.startArray("nested1")
.startObject()
.field("field1", 5)
.field("field2", true)
.endObject()
.startObject()
.field("field1", 4)
.field("field2", true)
.endObject()
.endArray()
.endObject()).execute().actionGet();
client().prepareIndex("test", "type1", "2").setSource(jsonBuilder().startObject()
.field("field1", 2)
.startArray("nested1")
.startObject()
.field("field1", 1)
.field("field2", true)
.endObject()
.startObject()
.field("field1", 2)
.field("field2", true)
.endObject()
.endArray()
.endObject()).execute().actionGet();
// Doc with missing nested docs if nested filter is used
refresh();
client().prepareIndex("test", "type1", "3").setSource(jsonBuilder().startObject()
.field("field1", 3)
.startArray("nested1")
.startObject()
.field("field1", 3)
.field("field2", false)
.endObject()
.startObject()
.field("field1", 4)
.field("field2", false)
.endObject()
.endArray()
.endObject()).execute().actionGet();
refresh();
SearchRequestBuilder searchRequestBuilder = client().prepareSearch("test").setTypes("type1")
.setQuery(QueryBuilders.matchAllQuery())
.addSort(SortBuilders.fieldSort("nested1.field1").setNestedFilter(termQuery("nested1.field2", true)).missing(10).order(SortOrder.ASC));
if (randomBoolean()) {
searchRequestBuilder.setScroll("10m");
}
SearchResponse searchResponse = searchRequestBuilder.get();
assertHitCount(searchResponse, 3);
assertThat(searchResponse.getHits().hits()[0].id(), equalTo("2"));
assertThat(searchResponse.getHits().hits()[0].sortValues()[0].toString(), equalTo("1"));
assertThat(searchResponse.getHits().hits()[1].id(), equalTo("1"));
assertThat(searchResponse.getHits().hits()[1].sortValues()[0].toString(), equalTo("4"));
assertThat(searchResponse.getHits().hits()[2].id(), equalTo("3"));
assertThat(searchResponse.getHits().hits()[2].sortValues()[0].toString(), equalTo("10"));
searchRequestBuilder = client().prepareSearch("test").setTypes("type1").setQuery(QueryBuilders.matchAllQuery())
.addSort(SortBuilders.fieldSort("nested1.field1").setNestedFilter(termQuery("nested1.field2", true)).missing(10).order(SortOrder.DESC));
if (randomBoolean()) {
searchRequestBuilder.setScroll("10m");
}
searchResponse = searchRequestBuilder.get();
assertHitCount(searchResponse, 3);
assertThat(searchResponse.getHits().hits()[0].id(), equalTo("3"));
assertThat(searchResponse.getHits().hits()[0].sortValues()[0].toString(), equalTo("10"));
assertThat(searchResponse.getHits().hits()[1].id(), equalTo("1"));
assertThat(searchResponse.getHits().hits()[1].sortValues()[0].toString(), equalTo("5"));
assertThat(searchResponse.getHits().hits()[2].id(), equalTo("2"));
assertThat(searchResponse.getHits().hits()[2].sortValues()[0].toString(), equalTo("2"));
client().prepareClearScroll().addScrollId("_all").get();
}
@Test
public void testSortNestedWithNestedFilter() throws Exception {
assertAcked(prepareCreate("test")
.addMapping("type1", XContentFactory.jsonBuilder().startObject()
.startObject("type1")
.startObject("properties")
.startObject("grand_parent_values").field("type", "long").endObject()
.startObject("parent").field("type", "nested")
.startObject("properties")
.startObject("parent_values").field("type", "long").endObject()
.startObject("child").field("type", "nested")
.startObject("properties")
.startObject("child_values").field("type", "long").endObject()
.endObject()
.endObject()
.endObject()
.endObject()
.endObject()
.endObject()
.endObject()));
ensureGreen();
// sum: 11
client().prepareIndex("test", "type1", Integer.toString(1)).setSource(jsonBuilder().startObject()
.field("grand_parent_values", 1l)
.startObject("parent")
.field("filter", false)
.field("parent_values", 1l)
.startObject("child")
.field("filter", true)
.field("child_values", 1l)
.startObject("child_obj")
.field("value", 1l)
.endObject()
.endObject()
.startObject("child")
.field("filter", false)
.field("child_values", 6l)
.endObject()
.endObject()
.startObject("parent")
.field("filter", true)
.field("parent_values", 2l)
.startObject("child")
.field("filter", false)
.field("child_values", -1l)
.endObject()
.startObject("child")
.field("filter", false)
.field("child_values", 5l)
.endObject()
.endObject()
.endObject()).execute().actionGet();
// sum: 7
client().prepareIndex("test", "type1", Integer.toString(2)).setSource(jsonBuilder().startObject()
.field("grand_parent_values", 2l)
.startObject("parent")
.field("filter", false)
.field("parent_values", 2l)
.startObject("child")
.field("filter", true)
.field("child_values", 2l)
.startObject("child_obj")
.field("value", 2l)
.endObject()
.endObject()
.startObject("child")
.field("filter", false)
.field("child_values", 4l)
.endObject()
.endObject()
.startObject("parent")
.field("parent_values", 3l)
.field("filter", true)
.startObject("child")
.field("child_values", -2l)
.field("filter", false)
.endObject()
.startObject("child")
.field("filter", false)
.field("child_values", 3l)
.endObject()
.endObject()
.endObject()).execute().actionGet();
// sum: 2
client().prepareIndex("test", "type1", Integer.toString(3)).setSource(jsonBuilder().startObject()
.field("grand_parent_values", 3l)
.startObject("parent")
.field("parent_values", 3l)
.field("filter", false)
.startObject("child")
.field("filter", true)
.field("child_values", 3l)
.startObject("child_obj")
.field("value", 3l)
.endObject()
.endObject()
.startObject("child")
.field("filter", false)
.field("child_values", 1l)
.endObject()
.endObject()
.startObject("parent")
.field("parent_values", 4l)
.field("filter", true)
.startObject("child")
.field("filter", false)
.field("child_values", -3l)
.endObject()
.startObject("child")
.field("filter", false)
.field("child_values", 1l)
.endObject()
.endObject()
.endObject()).execute().actionGet();
refresh();
// Without nested filter
SearchResponse searchResponse = client().prepareSearch()
.setQuery(matchAllQuery())
.addSort(
SortBuilders.fieldSort("parent.child.child_values")
.setNestedPath("parent.child")
.order(SortOrder.ASC)
)
.execute().actionGet();
assertHitCount(searchResponse, 3);
assertThat(searchResponse.getHits().getHits().length, equalTo(3));
assertThat(searchResponse.getHits().getHits()[0].getId(), equalTo("3"));
assertThat(searchResponse.getHits().getHits()[0].sortValues()[0].toString(), equalTo("-3"));
assertThat(searchResponse.getHits().getHits()[1].getId(), equalTo("2"));
assertThat(searchResponse.getHits().getHits()[1].sortValues()[0].toString(), equalTo("-2"));
assertThat(searchResponse.getHits().getHits()[2].getId(), equalTo("1"));
assertThat(searchResponse.getHits().getHits()[2].sortValues()[0].toString(), equalTo("-1"));
// With nested filter
searchResponse = client().prepareSearch()
.setQuery(matchAllQuery())
.addSort(
SortBuilders.fieldSort("parent.child.child_values")
.setNestedPath("parent.child")
.setNestedFilter(QueryBuilders.termQuery("parent.child.filter", true))
.order(SortOrder.ASC)
)
.execute().actionGet();
assertHitCount(searchResponse, 3);
assertThat(searchResponse.getHits().getHits().length, equalTo(3));
assertThat(searchResponse.getHits().getHits()[0].getId(), equalTo("1"));
assertThat(searchResponse.getHits().getHits()[0].sortValues()[0].toString(), equalTo("1"));
assertThat(searchResponse.getHits().getHits()[1].getId(), equalTo("2"));
assertThat(searchResponse.getHits().getHits()[1].sortValues()[0].toString(), equalTo("2"));
assertThat(searchResponse.getHits().getHits()[2].getId(), equalTo("3"));
assertThat(searchResponse.getHits().getHits()[2].sortValues()[0].toString(), equalTo("3"));
// Nested path should be automatically detected, expect same results as above search request
searchResponse = client().prepareSearch()
.setQuery(matchAllQuery())
.addSort(
SortBuilders.fieldSort("parent.child.child_values")
.setNestedFilter(QueryBuilders.termQuery("parent.child.filter", true))
.order(SortOrder.ASC)
)
.execute().actionGet();
assertHitCount(searchResponse, 3);
assertThat(searchResponse.getHits().getHits().length, equalTo(3));
assertThat(searchResponse.getHits().getHits()[0].getId(), equalTo("1"));
assertThat(searchResponse.getHits().getHits()[0].sortValues()[0].toString(), equalTo("1"));
assertThat(searchResponse.getHits().getHits()[1].getId(), equalTo("2"));
assertThat(searchResponse.getHits().getHits()[1].sortValues()[0].toString(), equalTo("2"));
assertThat(searchResponse.getHits().getHits()[2].getId(), equalTo("3"));
assertThat(searchResponse.getHits().getHits()[2].sortValues()[0].toString(), equalTo("3"));
searchResponse = client().prepareSearch()
.setQuery(matchAllQuery())
.addSort(
SortBuilders.fieldSort("parent.parent_values")
.setNestedPath("parent.child")
.setNestedFilter(QueryBuilders.termQuery("parent.filter", false))
.order(SortOrder.ASC)
)
.execute().actionGet();
assertHitCount(searchResponse, 3);
assertThat(searchResponse.getHits().getHits().length, equalTo(3));
assertThat(searchResponse.getHits().getHits()[0].getId(), equalTo("1"));
assertThat(searchResponse.getHits().getHits()[0].sortValues()[0].toString(), equalTo("1"));
assertThat(searchResponse.getHits().getHits()[1].getId(), equalTo("2"));
assertThat(searchResponse.getHits().getHits()[1].sortValues()[0].toString(), equalTo("2"));
assertThat(searchResponse.getHits().getHits()[2].getId(), equalTo("3"));
assertThat(searchResponse.getHits().getHits()[2].sortValues()[0].toString(), equalTo("3"));
searchResponse = client().prepareSearch()
.setQuery(matchAllQuery())
.addSort(
SortBuilders.fieldSort("parent.child.child_values")
.setNestedPath("parent.child")
.setNestedFilter(QueryBuilders.termQuery("parent.filter", false))
.order(SortOrder.ASC)
)
.execute().actionGet();
assertHitCount(searchResponse, 3);
assertThat(searchResponse.getHits().getHits().length, equalTo(3));
// TODO: If we expose ToChildBlockJoinQuery we can filter sort values based on a higher level nested objects
// assertThat(searchResponse.getHits().getHits()[0].getId(), equalTo("3"));
// assertThat(searchResponse.getHits().getHits()[0].sortValues()[0].toString(), equalTo("-3"));
// assertThat(searchResponse.getHits().getHits()[1].getId(), equalTo("2"));
// assertThat(searchResponse.getHits().getHits()[1].sortValues()[0].toString(), equalTo("-2"));
// assertThat(searchResponse.getHits().getHits()[2].getId(), equalTo("1"));
// assertThat(searchResponse.getHits().getHits()[2].sortValues()[0].toString(), equalTo("-1"));
// Check if closest nested type is resolved
searchResponse = client().prepareSearch()
.setQuery(matchAllQuery())
.addSort(
SortBuilders.fieldSort("parent.child.child_obj.value")
.setNestedFilter(QueryBuilders.termQuery("parent.child.filter", true))
.order(SortOrder.ASC)
)
.execute().actionGet();
assertHitCount(searchResponse, 3);
assertThat(searchResponse.getHits().getHits().length, equalTo(3));
assertThat(searchResponse.getHits().getHits()[0].getId(), equalTo("1"));
assertThat(searchResponse.getHits().getHits()[0].sortValues()[0].toString(), equalTo("1"));
assertThat(searchResponse.getHits().getHits()[1].getId(), equalTo("2"));
assertThat(searchResponse.getHits().getHits()[1].sortValues()[0].toString(), equalTo("2"));
assertThat(searchResponse.getHits().getHits()[2].getId(), equalTo("3"));
assertThat(searchResponse.getHits().getHits()[2].sortValues()[0].toString(), equalTo("3"));
// Sort mode: sum
searchResponse = client().prepareSearch()
.setQuery(matchAllQuery())
.addSort(
SortBuilders.fieldSort("parent.child.child_values")
.setNestedPath("parent.child")
.sortMode("sum")
.order(SortOrder.ASC)
)
.execute().actionGet();
assertHitCount(searchResponse, 3);
assertThat(searchResponse.getHits().getHits().length, equalTo(3));
assertThat(searchResponse.getHits().getHits()[0].getId(), equalTo("3"));
assertThat(searchResponse.getHits().getHits()[0].sortValues()[0].toString(), equalTo("2"));
assertThat(searchResponse.getHits().getHits()[1].getId(), equalTo("2"));
assertThat(searchResponse.getHits().getHits()[1].sortValues()[0].toString(), equalTo("7"));
assertThat(searchResponse.getHits().getHits()[2].getId(), equalTo("1"));
assertThat(searchResponse.getHits().getHits()[2].sortValues()[0].toString(), equalTo("11"));
searchResponse = client().prepareSearch()
.setQuery(matchAllQuery())
.addSort(
SortBuilders.fieldSort("parent.child.child_values")
.setNestedPath("parent.child")
.sortMode("sum")
.order(SortOrder.DESC)
)
.execute().actionGet();
assertHitCount(searchResponse, 3);
assertThat(searchResponse.getHits().getHits().length, equalTo(3));
assertThat(searchResponse.getHits().getHits()[0].getId(), equalTo("1"));
assertThat(searchResponse.getHits().getHits()[0].sortValues()[0].toString(), equalTo("11"));
assertThat(searchResponse.getHits().getHits()[1].getId(), equalTo("2"));
assertThat(searchResponse.getHits().getHits()[1].sortValues()[0].toString(), equalTo("7"));
assertThat(searchResponse.getHits().getHits()[2].getId(), equalTo("3"));
assertThat(searchResponse.getHits().getHits()[2].sortValues()[0].toString(), equalTo("2"));
// Sort mode: sum with filter
searchResponse = client().prepareSearch()
.setQuery(matchAllQuery())
.addSort(
SortBuilders.fieldSort("parent.child.child_values")
.setNestedPath("parent.child")
.setNestedFilter(QueryBuilders.termQuery("parent.child.filter", true))
.sortMode("sum")
.order(SortOrder.ASC)
)
.execute().actionGet();
assertHitCount(searchResponse, 3);
assertThat(searchResponse.getHits().getHits().length, equalTo(3));
assertThat(searchResponse.getHits().getHits()[0].getId(), equalTo("1"));
assertThat(searchResponse.getHits().getHits()[0].sortValues()[0].toString(), equalTo("1"));
assertThat(searchResponse.getHits().getHits()[1].getId(), equalTo("2"));
assertThat(searchResponse.getHits().getHits()[1].sortValues()[0].toString(), equalTo("2"));
assertThat(searchResponse.getHits().getHits()[2].getId(), equalTo("3"));
assertThat(searchResponse.getHits().getHits()[2].sortValues()[0].toString(), equalTo("3"));
// Sort mode: avg
searchResponse = client().prepareSearch()
.setQuery(matchAllQuery())
.addSort(
SortBuilders.fieldSort("parent.child.child_values")
.setNestedPath("parent.child")
.sortMode("avg")
.order(SortOrder.ASC)
)
.execute().actionGet();
assertHitCount(searchResponse, 3);
assertThat(searchResponse.getHits().getHits().length, equalTo(3));
assertThat(searchResponse.getHits().getHits()[0].getId(), equalTo("3"));
assertThat(searchResponse.getHits().getHits()[0].sortValues()[0].toString(), equalTo("1"));
assertThat(searchResponse.getHits().getHits()[1].getId(), equalTo("2"));
assertThat(searchResponse.getHits().getHits()[1].sortValues()[0].toString(), equalTo("2"));
assertThat(searchResponse.getHits().getHits()[2].getId(), equalTo("1"));
assertThat(searchResponse.getHits().getHits()[2].sortValues()[0].toString(), equalTo("3"));
searchResponse = client().prepareSearch()
.setQuery(matchAllQuery())
.addSort(
SortBuilders.fieldSort("parent.child.child_values")
.setNestedPath("parent.child")
.sortMode("avg")
.order(SortOrder.DESC)
)
.execute().actionGet();
assertHitCount(searchResponse, 3);
assertThat(searchResponse.getHits().getHits().length, equalTo(3));
assertThat(searchResponse.getHits().getHits()[0].getId(), equalTo("1"));
assertThat(searchResponse.getHits().getHits()[0].sortValues()[0].toString(), equalTo("3"));
assertThat(searchResponse.getHits().getHits()[1].getId(), equalTo("2"));
assertThat(searchResponse.getHits().getHits()[1].sortValues()[0].toString(), equalTo("2"));
assertThat(searchResponse.getHits().getHits()[2].getId(), equalTo("3"));
assertThat(searchResponse.getHits().getHits()[2].sortValues()[0].toString(), equalTo("1"));
// Sort mode: avg with filter
searchResponse = client().prepareSearch()
.setQuery(matchAllQuery())
.addSort(
SortBuilders.fieldSort("parent.child.child_values")
.setNestedPath("parent.child")
.setNestedFilter(QueryBuilders.termQuery("parent.child.filter", true))
.sortMode("avg")
.order(SortOrder.ASC)
)
.execute().actionGet();
assertHitCount(searchResponse, 3);
assertThat(searchResponse.getHits().getHits().length, equalTo(3));
assertThat(searchResponse.getHits().getHits()[0].getId(), equalTo("1"));
assertThat(searchResponse.getHits().getHits()[0].sortValues()[0].toString(), equalTo("1"));
assertThat(searchResponse.getHits().getHits()[1].getId(), equalTo("2"));
assertThat(searchResponse.getHits().getHits()[1].sortValues()[0].toString(), equalTo("2"));
assertThat(searchResponse.getHits().getHits()[2].getId(), equalTo("3"));
assertThat(searchResponse.getHits().getHits()[2].sortValues()[0].toString(), equalTo("3"));
}
@Test
// https://github.com/elasticsearch/elasticsearch/issues/9305
public void testNestedSortingWithNestedFilterAsFilter() throws Exception {
assertAcked(prepareCreate("test").addMapping("type", jsonBuilder().startObject().startObject("properties")
.startObject("officelocation").field("type", "string").endObject()
.startObject("users")
.field("type", "nested")
.startObject("properties")
.startObject("first").field("type", "string").endObject()
.startObject("last").field("type", "string").endObject()
.startObject("workstations")
.field("type", "nested")
.startObject("properties")
.startObject("stationid").field("type", "string").endObject()
.startObject("phoneid").field("type", "string").endObject()
.endObject()
.endObject()
.endObject()
.endObject()
.endObject().endObject()));
client().prepareIndex("test", "type", "1").setSource(jsonBuilder().startObject()
.field("officelocation", "gendale")
.startArray("users")
.startObject()
.field("first", "fname1")
.field("last", "lname1")
.startArray("workstations")
.startObject()
.field("stationid", "s1")
.field("phoneid", "p1")
.endObject()
.startObject()
.field("stationid", "s2")
.field("phoneid", "p2")
.endObject()
.endArray()
.endObject()
.startObject()
.field("first", "fname2")
.field("last", "lname2")
.startArray("workstations")
.startObject()
.field("stationid", "s3")
.field("phoneid", "p3")
.endObject()
.startObject()
.field("stationid", "s4")
.field("phoneid", "p4")
.endObject()
.endArray()
.endObject()
.startObject()
.field("first", "fname3")
.field("last", "lname3")
.startArray("workstations")
.startObject()
.field("stationid", "s5")
.field("phoneid", "p5")
.endObject()
.startObject()
.field("stationid", "s6")
.field("phoneid", "p6")
.endObject()
.endArray()
.endObject()
.endArray()
.endObject()).get();
client().prepareIndex("test", "type", "2").setSource(jsonBuilder().startObject()
.field("officelocation", "gendale")
.startArray("users")
.startObject()
.field("first", "fname4")
.field("last", "lname4")
.startArray("workstations")
.startObject()
.field("stationid", "s1")
.field("phoneid", "p1")
.endObject()
.startObject()
.field("stationid", "s2")
.field("phoneid", "p2")
.endObject()
.endArray()
.endObject()
.startObject()
.field("first", "fname5")
.field("last", "lname5")
.startArray("workstations")
.startObject()
.field("stationid", "s3")
.field("phoneid", "p3")
.endObject()
.startObject()
.field("stationid", "s4")
.field("phoneid", "p4")
.endObject()
.endArray()
.endObject()
.startObject()
.field("first", "fname1")
.field("last", "lname1")
.startArray("workstations")
.startObject()
.field("stationid", "s5")
.field("phoneid", "p5")
.endObject()
.startObject()
.field("stationid", "s6")
.field("phoneid", "p6")
.endObject()
.endArray()
.endObject()
.endArray()
.endObject()).get();
refresh();
SearchResponse searchResponse = client().prepareSearch("test")
.addSort(SortBuilders.fieldSort("users.first")
.order(SortOrder.ASC))
.addSort(SortBuilders.fieldSort("users.first")
.order(SortOrder.ASC)
.setNestedPath("users")
.setNestedFilter(nestedQuery("users.workstations", termQuery("users.workstations.stationid", "s5"))))
.get();
assertNoFailures(searchResponse);
assertHitCount(searchResponse, 2);
assertThat(searchResponse.getHits().getAt(0).id(), equalTo("2"));
assertThat(searchResponse.getHits().getAt(0).sortValues()[0].toString(), equalTo("fname1"));
assertThat(searchResponse.getHits().getAt(0).sortValues()[1].toString(), equalTo("fname1"));
assertThat(searchResponse.getHits().getAt(1).id(), equalTo("1"));
assertThat(searchResponse.getHits().getAt(1).sortValues()[0].toString(), equalTo("fname1"));
assertThat(searchResponse.getHits().getAt(1).sortValues()[1].toString(), equalTo("fname3"));
}
@Test
public void testCheckFixedBitSetCache() throws Exception {
boolean loadFixedBitSeLazily = randomBoolean();
Settings.Builder settingsBuilder = Settings.builder().put(indexSettings())
.put("index.refresh_interval", -1);
if (loadFixedBitSeLazily) {
settingsBuilder.put("index.load_fixed_bitset_filters_eagerly", false);
}
assertAcked(prepareCreate("test")
.setSettings(settingsBuilder)
.addMapping("type")
);
client().prepareIndex("test", "type", "0").setSource("field", "value").get();
client().prepareIndex("test", "type", "1").setSource("field", "value").get();
refresh();
ensureSearchable("test");
// No nested mapping yet, there shouldn't be anything in the fixed bit set cache
ClusterStatsResponse clusterStatsResponse = client().admin().cluster().prepareClusterStats().get();
assertThat(clusterStatsResponse.getIndicesStats().getSegments().getBitsetMemoryInBytes(), equalTo(0l));
// Now add nested mapping
assertAcked(
client().admin().indices().preparePutMapping("test").setType("type").setSource("array1", "type=nested")
);
XContentBuilder builder = jsonBuilder().startObject()
.startArray("array1").startObject().field("field1", "value1").endObject().endArray()
.endObject();
// index simple data
client().prepareIndex("test", "type", "2").setSource(builder).get();
client().prepareIndex("test", "type", "3").setSource(builder).get();
client().prepareIndex("test", "type", "4").setSource(builder).get();
client().prepareIndex("test", "type", "5").setSource(builder).get();
client().prepareIndex("test", "type", "6").setSource(builder).get();
refresh();
ensureSearchable("test");
if (loadFixedBitSeLazily) {
clusterStatsResponse = client().admin().cluster().prepareClusterStats().get();
assertThat(clusterStatsResponse.getIndicesStats().getSegments().getBitsetMemoryInBytes(), equalTo(0l));
// only when querying with nested the fixed bitsets are loaded
SearchResponse searchResponse = client().prepareSearch("test")
.setQuery(nestedQuery("array1", termQuery("array1.field1", "value1")))
.get();
assertNoFailures(searchResponse);
assertThat(searchResponse.getHits().totalHits(), equalTo(5l));
}
clusterStatsResponse = client().admin().cluster().prepareClusterStats().get();
assertThat(clusterStatsResponse.getIndicesStats().getSegments().getBitsetMemoryInBytes(), greaterThan(0l));
assertAcked(client().admin().indices().prepareDelete("test"));
clusterStatsResponse = client().admin().cluster().prepareClusterStats().get();
assertThat(clusterStatsResponse.getIndicesStats().getSegments().getBitsetMemoryInBytes(), equalTo(0l));
}
/**
*/
private void assertDocumentCount(String index, long numdocs) {
IndicesStatsResponse stats = admin().indices().prepareStats(index).clear().setDocs(true).get();
assertNoFailures(stats);
assertThat(stats.getIndex(index).getPrimaries().docs.getCount(), is(numdocs));
}
}
| |
package com.cloudrain.derecho.sandbox.chart;
/* ===========================================================
* JFreeChart : a free chart library for the Java(tm) platform
* ===========================================================
*
* (C) Copyright 2000-2004, by Object Refinery Limited and Contributors.
*
* Project Info: http://www.jfree.org/jfreechart/index.html
*
* This library is free software; you can redistribute it and/or modify it under the terms
* of the GNU Lesser General Public License as published by the Free Software Foundation;
* either version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
* without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
* See the GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License along with this
* library; if not, write to the Free Software Foundation, Inc., 59 Temple Place, Suite 330,
* Boston, MA 02111-1307, USA.
*
* [Java is a trademark or registered trademark of Sun Microsystems, Inc.
* in the United States and other countries.]
*
* -------------------------
* StackedBarChartDemo4.java
* -------------------------
* (C) Copyright 2004, by Object Refinery Limited and Contributors.
*
* Original Author: David Gilbert (for Object Refinery Limited);
* Contributor(s): -;
*
* $Id: StackedBarChartDemo4.java,v 1.6 2004/05/12 16:01:58 mungady Exp $
*
* Changes
* -------
* 29-Apr-2004 : Version 1 (DG);
*
*/
import org.jfree.data.category.CategoryDataset;
import org.jfree.ui.ApplicationFrame;
import java.awt.Color;
import java.awt.GradientPaint;
import java.awt.Paint;
import org.jfree.chart.ChartFactory;
import org.jfree.chart.ChartPanel;
import org.jfree.chart.JFreeChart;
import org.jfree.chart.LegendItemCollection;
import org.jfree.chart.axis.SubCategoryAxis;
import org.jfree.chart.plot.CategoryPlot;
import org.jfree.chart.plot.PlotOrientation;
import org.jfree.chart.renderer.category.GroupedStackedBarRenderer;
import org.jfree.data.KeyToGroupMap;
import org.jfree.data.category.DefaultCategoryDataset;
import org.jfree.ui.GradientPaintTransformType;
import org.jfree.ui.RefineryUtilities;
import org.jfree.ui.StandardGradientPaintTransformer;
/**
* A simple demonstration application showing how to create a stacked bar chart
* using data from a {@link CategoryDataset}.
*/
public class MultiStackedBarChartTest extends ApplicationFrame {
/**
* Creates a new demo.
*
* @param title the frame title.
*/
public MultiStackedBarChartTest(final String title) {
super(title);
final CategoryDataset dataset = createDataset();
final JFreeChart chart = createChart(dataset);
final ChartPanel chartPanel = new ChartPanel(chart);
chartPanel.setPreferredSize(new java.awt.Dimension(590, 350));
setContentPane(chartPanel);
}
/**
* Creates a sample dataset.
*
* @return A sample dataset.
*/
private CategoryDataset createDataset() {
DefaultCategoryDataset result = new DefaultCategoryDataset();
result.addValue(20.3, "Product 1 (US)", "Jan 04");
result.addValue(27.2, "Product 1 (US)", "Feb 04");
result.addValue(19.7, "Product 1 (US)", "Mar 04");
result.addValue(19.4, "Product 1 (Europe)", "Jan 04");
result.addValue(10.9, "Product 1 (Europe)", "Feb 04");
result.addValue(18.4, "Product 1 (Europe)", "Mar 04");
result.addValue(16.5, "Product 1 (Asia)", "Jan 04");
result.addValue(15.9, "Product 1 (Asia)", "Feb 04");
result.addValue(16.1, "Product 1 (Asia)", "Mar 04");
result.addValue(13.2, "Product 1 (Middle East)", "Jan 04");
result.addValue(14.4, "Product 1 (Middle East)", "Feb 04");
result.addValue(13.7, "Product 1 (Middle East)", "Mar 04");
result.addValue(23.3, "Product 2 (US)", "Jan 04");
result.addValue(16.2, "Product 2 (US)", "Feb 04");
result.addValue(28.7, "Product 2 (US)", "Mar 04");
result.addValue(12.7, "Product 2 (Europe)", "Jan 04");
result.addValue(17.9, "Product 2 (Europe)", "Feb 04");
result.addValue(12.6, "Product 2 (Europe)", "Mar 04");
result.addValue(15.4, "Product 2 (Asia)", "Jan 04");
result.addValue(21.0, "Product 2 (Asia)", "Feb 04");
result.addValue(11.1, "Product 2 (Asia)", "Mar 04");
result.addValue(23.8, "Product 2 (Middle East)", "Jan 04");
result.addValue(23.4, "Product 2 (Middle East)", "Feb 04");
result.addValue(19.3, "Product 2 (Middle East)", "Mar 04");
result.addValue(11.9, "Product 3 (US)", "Jan 04");
result.addValue(31.0, "Product 3 (US)", "Feb 04");
result.addValue(22.7, "Product 3 (US)", "Mar 04");
result.addValue(15.3, "Product 3 (Europe)", "Jan 04");
result.addValue(14.4, "Product 3 (Europe)", "Feb 04");
result.addValue(25.3, "Product 3 (Europe)", "Mar 04");
result.addValue(23.9, "Product 3 (Asia)", "Jan 04");
result.addValue(19.0, "Product 3 (Asia)", "Feb 04");
result.addValue(10.1, "Product 3 (Asia)", "Mar 04");
result.addValue(13.2, "Product 3 (Middle East)", "Jan 04");
result.addValue(15.5, "Product 3 (Middle East)", "Feb 04");
result.addValue(10.1, "Product 3 (Middle East)", "Mar 04");
return result;
}
/**
* Creates a sample chart.
*
* @param dataset the dataset for the chart.
*
* @return A sample chart.
*/
private JFreeChart createChart(final CategoryDataset dataset) {
final JFreeChart chart = ChartFactory.createStackedBarChart(
"Stacked Bar Chart Demo 4", // chart title
"Category", // domain axis label
"Value", // range axis label
dataset, // data
PlotOrientation.VERTICAL, // the plot orientation
true, // legend
true, // tooltips
false // urls
);
GroupedStackedBarRenderer renderer = new GroupedStackedBarRenderer();
KeyToGroupMap map = new KeyToGroupMap("G1");
map.mapKeyToGroup("Product 1 (US)", "G1");
map.mapKeyToGroup("Product 1 (Europe)", "G1");
map.mapKeyToGroup("Product 1 (Asia)", "G1");
map.mapKeyToGroup("Product 1 (Middle East)", "G1");
map.mapKeyToGroup("Product 2 (US)", "G2");
map.mapKeyToGroup("Product 2 (Europe)", "G2");
map.mapKeyToGroup("Product 2 (Asia)", "G2");
map.mapKeyToGroup("Product 2 (Middle East)", "G2");
map.mapKeyToGroup("Product 3 (US)", "G3");
map.mapKeyToGroup("Product 3 (Europe)", "G3");
map.mapKeyToGroup("Product 3 (Asia)", "G3");
map.mapKeyToGroup("Product 3 (Middle East)", "G3");
renderer.setSeriesToGroupMap(map);
renderer.setItemMargin(0.0);
Paint p1 = new GradientPaint(
0.0f, 0.0f, new Color(0x22, 0x22, 0xFF), 0.0f, 0.0f, new Color(0x88, 0x88, 0xFF)
);
renderer.setSeriesPaint(0, p1);
renderer.setSeriesPaint(4, p1);
renderer.setSeriesPaint(8, p1);
Paint p2 = new GradientPaint(
0.0f, 0.0f, new Color(0x22, 0xFF, 0x22), 0.0f, 0.0f, new Color(0x88, 0xFF, 0x88)
);
renderer.setSeriesPaint(1, p2);
renderer.setSeriesPaint(5, p2);
renderer.setSeriesPaint(9, p2);
Paint p3 = new GradientPaint(
0.0f, 0.0f, new Color(0xFF, 0x22, 0x22), 0.0f, 0.0f, new Color(0xFF, 0x88, 0x88)
);
renderer.setSeriesPaint(2, p3);
renderer.setSeriesPaint(6, p3);
renderer.setSeriesPaint(10, p3);
Paint p4 = new GradientPaint(
0.0f, 0.0f, new Color(0xFF, 0xFF, 0x22), 0.0f, 0.0f, new Color(0xFF, 0xFF, 0x88)
);
renderer.setSeriesPaint(3, p4);
renderer.setSeriesPaint(7, p4);
renderer.setSeriesPaint(11, p4);
renderer.setGradientPaintTransformer(
new StandardGradientPaintTransformer(GradientPaintTransformType.HORIZONTAL)
);
SubCategoryAxis domainAxis = new SubCategoryAxis("Product / Month");
domainAxis.setCategoryMargin(0.05);
domainAxis.addSubCategory("Product 1");
domainAxis.addSubCategory("Product 2");
domainAxis.addSubCategory("Product 3");
CategoryPlot plot = (CategoryPlot) chart.getPlot();
plot.setDomainAxis(domainAxis);
//plot.setDomainAxisLocation(AxisLocation.TOP_OR_RIGHT);
plot.setRenderer(renderer);
plot.setFixedLegendItems(createLegendItems());
return chart;
}
/**
* Creates the legend items for the chart. In this case, we set them manually because we
* only want legend items for a subset of the data series.
*
* @return The legend items.
*/
private LegendItemCollection createLegendItems() {
LegendItemCollection result = new LegendItemCollection();
// LegendItem item1 = new LegendItem("US", new Color(0x22, 0x22, 0xFF));
// LegendItem item2 = new LegendItem("Europe", new Color(0x22, 0xFF, 0x22));
// LegendItem item3 = new LegendItem("Asia", new Color(0xFF, 0x22, 0x22));
// LegendItem item4 = new LegendItem("Middle East", new Color(0xFF, 0xFF, 0x22));
// result.add(item1);
// result.add(item2);
// result.add(item3);
// result.add(item4);
return result;
}
// ****************************************************************************
// * JFREECHART DEVELOPER GUIDE *
// * The JFreeChart Developer Guide, written by David Gilbert, is available *
// * to purchase from Object Refinery Limited: *
// * *
// * http://www.object-refinery.com/jfreechart/guide.html *
// * *
// * Sales are used to provide funding for the JFreeChart project - please *
// * support us so that we can continue developing free software. *
// ****************************************************************************
/**
* Starting point for the demonstration application.
*
* @param args ignored.
*/
public static void main(final String[] args) {
final MultiStackedBarChartTest demo = new MultiStackedBarChartTest("Stacked Bar Chart Demo 4");
demo.pack();
RefineryUtilities.centerFrameOnScreen(demo);
demo.setVisible(true);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.carbondata.processing.sortandgroupby.sortdata;
import java.io.File;
import java.util.Arrays;
import java.util.List;
import org.apache.carbondata.common.logging.LogService;
import org.apache.carbondata.common.logging.LogServiceFactory;
import org.apache.carbondata.core.carbon.CarbonTableIdentifier;
import org.apache.carbondata.core.carbon.metadata.CarbonMetadata;
import org.apache.carbondata.core.carbon.metadata.schema.table.CarbonTable;
import org.apache.carbondata.core.carbon.metadata.schema.table.column.CarbonMeasure;
import org.apache.carbondata.core.constants.CarbonCommonConstants;
import org.apache.carbondata.core.util.CarbonProperties;
import org.apache.carbondata.core.util.DataTypeUtil;
import org.apache.carbondata.processing.newflow.CarbonDataLoadConfiguration;
import org.apache.carbondata.processing.schema.metadata.SortObserver;
import org.apache.carbondata.processing.util.CarbonDataProcessorUtil;
public class SortParameters {
private static final LogService LOGGER =
LogServiceFactory.getLogService(SortParameters.class.getName());
/**
* tempFileLocation
*/
private String tempFileLocation;
/**
* sortBufferSize
*/
private int sortBufferSize;
/**
* measure count
*/
private int measureColCount;
/**
* measure count
*/
private int dimColCount;
/**
* measure count
*/
private int complexDimColCount;
/**
* fileBufferSize
*/
private int fileBufferSize;
/**
* numberOfIntermediateFileToBeMerged
*/
private int numberOfIntermediateFileToBeMerged;
/**
* fileWriteBufferSize
*/
private int fileWriteBufferSize;
/**
* observer
*/
private SortObserver observer;
/**
* sortTempFileNoOFRecordsInCompression
*/
private int sortTempFileNoOFRecordsInCompression;
/**
* isSortTempFileCompressionEnabled
*/
private boolean isSortFileCompressionEnabled;
/**
* prefetch
*/
private boolean prefetch;
/**
* bufferSize
*/
private int bufferSize;
private String databaseName;
private String tableName;
private char[] aggType;
/**
* To know how many columns are of high cardinality.
*/
private int noDictionaryCount;
/**
* partitionID
*/
private String partitionID;
/**
* Id of the load folder
*/
private String segmentId;
/**
* task id, each spark task has a unique id
*/
private String taskNo;
/**
* This will tell whether dimension is dictionary or not.
*/
private boolean[] noDictionaryDimnesionColumn;
private int numberOfCores;
public String getTempFileLocation() {
return tempFileLocation;
}
public void setTempFileLocation(String tempFileLocation) {
this.tempFileLocation = tempFileLocation;
}
public int getSortBufferSize() {
return sortBufferSize;
}
public void setSortBufferSize(int sortBufferSize) {
this.sortBufferSize = sortBufferSize;
}
public int getMeasureColCount() {
return measureColCount;
}
public void setMeasureColCount(int measureColCount) {
this.measureColCount = measureColCount;
}
public int getDimColCount() {
return dimColCount;
}
public void setDimColCount(int dimColCount) {
this.dimColCount = dimColCount;
}
public int getComplexDimColCount() {
return complexDimColCount;
}
public void setComplexDimColCount(int complexDimColCount) {
this.complexDimColCount = complexDimColCount;
}
public int getFileBufferSize() {
return fileBufferSize;
}
public void setFileBufferSize(int fileBufferSize) {
this.fileBufferSize = fileBufferSize;
}
public int getNumberOfIntermediateFileToBeMerged() {
return numberOfIntermediateFileToBeMerged;
}
public void setNumberOfIntermediateFileToBeMerged(int numberOfIntermediateFileToBeMerged) {
this.numberOfIntermediateFileToBeMerged = numberOfIntermediateFileToBeMerged;
}
public int getFileWriteBufferSize() {
return fileWriteBufferSize;
}
public void setFileWriteBufferSize(int fileWriteBufferSize) {
this.fileWriteBufferSize = fileWriteBufferSize;
}
public SortObserver getObserver() {
return observer;
}
public void setObserver(SortObserver observer) {
this.observer = observer;
}
public int getSortTempFileNoOFRecordsInCompression() {
return sortTempFileNoOFRecordsInCompression;
}
public void setSortTempFileNoOFRecordsInCompression(int sortTempFileNoOFRecordsInCompression) {
this.sortTempFileNoOFRecordsInCompression = sortTempFileNoOFRecordsInCompression;
}
public boolean isSortFileCompressionEnabled() {
return isSortFileCompressionEnabled;
}
public void setSortFileCompressionEnabled(boolean sortFileCompressionEnabled) {
isSortFileCompressionEnabled = sortFileCompressionEnabled;
}
public boolean isPrefetch() {
return prefetch;
}
public void setPrefetch(boolean prefetch) {
this.prefetch = prefetch;
}
public int getBufferSize() {
return bufferSize;
}
public void setBufferSize(int bufferSize) {
this.bufferSize = bufferSize;
}
public String getDatabaseName() {
return databaseName;
}
public void setDatabaseName(String databaseName) {
this.databaseName = databaseName;
}
public String getTableName() {
return tableName;
}
public void setTableName(String tableName) {
this.tableName = tableName;
}
public char[] getAggType() {
return aggType;
}
public void setAggType(char[] aggType) {
this.aggType = aggType;
}
public int getNoDictionaryCount() {
return noDictionaryCount;
}
public void setNoDictionaryCount(int noDictionaryCount) {
this.noDictionaryCount = noDictionaryCount;
}
public String getPartitionID() {
return partitionID;
}
public void setPartitionID(String partitionID) {
this.partitionID = partitionID;
}
public String getSegmentId() {
return segmentId;
}
public void setSegmentId(String segmentId) {
this.segmentId = segmentId;
}
public String getTaskNo() {
return taskNo;
}
public void setTaskNo(String taskNo) {
this.taskNo = taskNo;
}
public boolean[] getNoDictionaryDimnesionColumn() {
return noDictionaryDimnesionColumn;
}
public void setNoDictionaryDimnesionColumn(boolean[] noDictionaryDimnesionColumn) {
this.noDictionaryDimnesionColumn = noDictionaryDimnesionColumn;
}
public int getNumberOfCores() {
return numberOfCores;
}
public void setNumberOfCores(int numberOfCores) {
this.numberOfCores = numberOfCores;
}
public static SortParameters createSortParameters(CarbonDataLoadConfiguration configuration) {
SortParameters parameters = new SortParameters();
CarbonTableIdentifier tableIdentifier =
configuration.getTableIdentifier().getCarbonTableIdentifier();
CarbonProperties carbonProperties = CarbonProperties.getInstance();
parameters.setDatabaseName(tableIdentifier.getDatabaseName());
parameters.setTableName(tableIdentifier.getTableName());
parameters.setPartitionID(configuration.getPartitionId());
parameters.setSegmentId(configuration.getSegmentId());
parameters.setTaskNo(configuration.getTaskNo());
parameters.setMeasureColCount(configuration.getMeasureCount());
parameters.setDimColCount(
configuration.getDimensionCount() - configuration.getComplexDimensionCount());
parameters.setNoDictionaryCount(configuration.getNoDictionaryCount());
parameters.setComplexDimColCount(configuration.getComplexDimensionCount());
parameters.setNoDictionaryDimnesionColumn(
CarbonDataProcessorUtil.getNoDictionaryMapping(configuration.getDataFields()));
parameters.setObserver(new SortObserver());
// get sort buffer size
parameters.setSortBufferSize(Integer.parseInt(carbonProperties
.getProperty(CarbonCommonConstants.SORT_SIZE,
CarbonCommonConstants.SORT_SIZE_DEFAULT_VAL)));
LOGGER.info("Sort size for table: " + parameters.getSortBufferSize());
// set number of intermedaite file to merge
parameters.setNumberOfIntermediateFileToBeMerged(Integer.parseInt(carbonProperties
.getProperty(CarbonCommonConstants.SORT_INTERMEDIATE_FILES_LIMIT,
CarbonCommonConstants.SORT_INTERMEDIATE_FILES_LIMIT_DEFAULT_VALUE)));
LOGGER.info("Number of intermediate file to be merged: " + parameters
.getNumberOfIntermediateFileToBeMerged());
// get file buffer size
parameters.setFileBufferSize(CarbonDataProcessorUtil
.getFileBufferSize(parameters.getNumberOfIntermediateFileToBeMerged(), carbonProperties,
CarbonCommonConstants.CONSTANT_SIZE_TEN));
LOGGER.info("File Buffer Size: " + parameters.getFileBufferSize());
String carbonDataDirectoryPath = CarbonDataProcessorUtil
.getLocalDataFolderLocation(tableIdentifier.getDatabaseName(),
tableIdentifier.getTableName(), configuration.getTaskNo(),
configuration.getPartitionId(), configuration.getSegmentId(), false);
parameters.setTempFileLocation(
carbonDataDirectoryPath + File.separator + CarbonCommonConstants.SORT_TEMP_FILE_LOCATION);
LOGGER.info("temp file location" + parameters.getTempFileLocation());
int numberOfCores;
try {
numberOfCores = Integer.parseInt(carbonProperties
.getProperty(CarbonCommonConstants.NUM_CORES_LOADING,
CarbonCommonConstants.NUM_CORES_DEFAULT_VAL));
numberOfCores = numberOfCores / 2;
} catch (NumberFormatException exc) {
numberOfCores = Integer.parseInt(CarbonCommonConstants.NUM_CORES_DEFAULT_VAL);
}
parameters.setNumberOfCores(numberOfCores);
parameters.setFileWriteBufferSize(Integer.parseInt(carbonProperties
.getProperty(CarbonCommonConstants.CARBON_SORT_FILE_WRITE_BUFFER_SIZE,
CarbonCommonConstants.CARBON_SORT_FILE_WRITE_BUFFER_SIZE_DEFAULT_VALUE)));
parameters.setSortFileCompressionEnabled(Boolean.parseBoolean(carbonProperties
.getProperty(CarbonCommonConstants.IS_SORT_TEMP_FILE_COMPRESSION_ENABLED,
CarbonCommonConstants.IS_SORT_TEMP_FILE_COMPRESSION_ENABLED_DEFAULTVALUE)));
int sortTempFileNoOFRecordsInCompression;
try {
sortTempFileNoOFRecordsInCompression = Integer.parseInt(carbonProperties
.getProperty(CarbonCommonConstants.SORT_TEMP_FILE_NO_OF_RECORDS_FOR_COMPRESSION,
CarbonCommonConstants.SORT_TEMP_FILE_NO_OF_RECORD_FOR_COMPRESSION_DEFAULTVALUE));
if (sortTempFileNoOFRecordsInCompression < 1) {
LOGGER.error("Invalid value for: "
+ CarbonCommonConstants.SORT_TEMP_FILE_NO_OF_RECORDS_FOR_COMPRESSION
+ ":Only Positive Integer value(greater than zero) is allowed.Default value will "
+ "be used");
sortTempFileNoOFRecordsInCompression = Integer.parseInt(
CarbonCommonConstants.SORT_TEMP_FILE_NO_OF_RECORD_FOR_COMPRESSION_DEFAULTVALUE);
}
} catch (NumberFormatException e) {
LOGGER.error(
"Invalid value for: " + CarbonCommonConstants.SORT_TEMP_FILE_NO_OF_RECORDS_FOR_COMPRESSION
+ ", only Positive Integer value is allowed. Default value will be used");
sortTempFileNoOFRecordsInCompression = Integer
.parseInt(CarbonCommonConstants.SORT_TEMP_FILE_NO_OF_RECORD_FOR_COMPRESSION_DEFAULTVALUE);
}
parameters.setSortTempFileNoOFRecordsInCompression(sortTempFileNoOFRecordsInCompression);
if (parameters.isSortFileCompressionEnabled()) {
LOGGER.info("Compression will be used for writing the sort temp File");
}
parameters.setPrefetch(CarbonCommonConstants.CARBON_PREFETCH_IN_MERGE_VALUE);
parameters.setBufferSize(CarbonCommonConstants.CARBON_PREFETCH_BUFFERSIZE);
char[] aggType = new char[parameters.getMeasureColCount()];
Arrays.fill(aggType, 'n');
CarbonTable carbonTable = CarbonMetadata.getInstance().getCarbonTable(
parameters.getDatabaseName() + CarbonCommonConstants.UNDERSCORE + parameters
.getTableName());
List<CarbonMeasure> measures = carbonTable.getMeasureByTableName(parameters.getTableName());
for (int i = 0; i < aggType.length; i++) {
aggType[i] = DataTypeUtil.getAggType(measures.get(i).getDataType());
}
parameters.setAggType(aggType);
return parameters;
}
public static SortParameters createSortParameters(String databaseName, String tableName,
int dimColCount, int complexDimColCount, int measureColCount, SortObserver observer,
int noDictionaryCount, String partitionID, String segmentId, String taskNo,
boolean[] noDictionaryColMaping) {
SortParameters parameters = new SortParameters();
CarbonProperties carbonProperties = CarbonProperties.getInstance();
parameters.setDatabaseName(databaseName);
parameters.setTableName(tableName);
parameters.setPartitionID(partitionID);
parameters.setSegmentId(segmentId);
parameters.setTaskNo(taskNo);
parameters.setMeasureColCount(measureColCount);
parameters.setDimColCount(dimColCount - complexDimColCount);
parameters.setNoDictionaryCount(noDictionaryCount);
parameters.setComplexDimColCount(complexDimColCount);
parameters.setNoDictionaryDimnesionColumn(noDictionaryColMaping);
parameters.setObserver(new SortObserver());
// get sort buffer size
parameters.setSortBufferSize(Integer.parseInt(carbonProperties
.getProperty(CarbonCommonConstants.SORT_SIZE,
CarbonCommonConstants.SORT_SIZE_DEFAULT_VAL)));
LOGGER.info("Sort size for table: " + parameters.getSortBufferSize());
// set number of intermedaite file to merge
parameters.setNumberOfIntermediateFileToBeMerged(Integer.parseInt(carbonProperties
.getProperty(CarbonCommonConstants.SORT_INTERMEDIATE_FILES_LIMIT,
CarbonCommonConstants.SORT_INTERMEDIATE_FILES_LIMIT_DEFAULT_VALUE)));
LOGGER.info("Number of intermediate file to be merged: " + parameters
.getNumberOfIntermediateFileToBeMerged());
// get file buffer size
parameters.setFileBufferSize(CarbonDataProcessorUtil
.getFileBufferSize(parameters.getNumberOfIntermediateFileToBeMerged(), carbonProperties,
CarbonCommonConstants.CONSTANT_SIZE_TEN));
LOGGER.info("File Buffer Size: " + parameters.getFileBufferSize());
String carbonDataDirectoryPath = CarbonDataProcessorUtil
.getLocalDataFolderLocation(databaseName, tableName, taskNo, partitionID, segmentId, false);
parameters.setTempFileLocation(
carbonDataDirectoryPath + File.separator + CarbonCommonConstants.SORT_TEMP_FILE_LOCATION);
LOGGER.info("temp file location" + parameters.getTempFileLocation());
int numberOfCores;
try {
numberOfCores = Integer.parseInt(carbonProperties
.getProperty(CarbonCommonConstants.NUM_CORES_LOADING,
CarbonCommonConstants.NUM_CORES_DEFAULT_VAL));
numberOfCores = numberOfCores / 2;
} catch (NumberFormatException exc) {
numberOfCores = Integer.parseInt(CarbonCommonConstants.NUM_CORES_DEFAULT_VAL);
}
parameters.setNumberOfCores(numberOfCores);
parameters.setFileWriteBufferSize(Integer.parseInt(carbonProperties
.getProperty(CarbonCommonConstants.CARBON_SORT_FILE_WRITE_BUFFER_SIZE,
CarbonCommonConstants.CARBON_SORT_FILE_WRITE_BUFFER_SIZE_DEFAULT_VALUE)));
parameters.setSortFileCompressionEnabled(Boolean.parseBoolean(carbonProperties
.getProperty(CarbonCommonConstants.IS_SORT_TEMP_FILE_COMPRESSION_ENABLED,
CarbonCommonConstants.IS_SORT_TEMP_FILE_COMPRESSION_ENABLED_DEFAULTVALUE)));
int sortTempFileNoOFRecordsInCompression;
try {
sortTempFileNoOFRecordsInCompression = Integer.parseInt(carbonProperties
.getProperty(CarbonCommonConstants.SORT_TEMP_FILE_NO_OF_RECORDS_FOR_COMPRESSION,
CarbonCommonConstants.SORT_TEMP_FILE_NO_OF_RECORD_FOR_COMPRESSION_DEFAULTVALUE));
if (sortTempFileNoOFRecordsInCompression < 1) {
LOGGER.error("Invalid value for: "
+ CarbonCommonConstants.SORT_TEMP_FILE_NO_OF_RECORDS_FOR_COMPRESSION
+ ":Only Positive Integer value(greater than zero) is allowed.Default value will "
+ "be used");
sortTempFileNoOFRecordsInCompression = Integer.parseInt(
CarbonCommonConstants.SORT_TEMP_FILE_NO_OF_RECORD_FOR_COMPRESSION_DEFAULTVALUE);
}
} catch (NumberFormatException e) {
LOGGER.error(
"Invalid value for: " + CarbonCommonConstants.SORT_TEMP_FILE_NO_OF_RECORDS_FOR_COMPRESSION
+ ", only Positive Integer value is allowed. Default value will be used");
sortTempFileNoOFRecordsInCompression = Integer
.parseInt(CarbonCommonConstants.SORT_TEMP_FILE_NO_OF_RECORD_FOR_COMPRESSION_DEFAULTVALUE);
}
parameters.setSortTempFileNoOFRecordsInCompression(sortTempFileNoOFRecordsInCompression);
if (parameters.isSortFileCompressionEnabled()) {
LOGGER.info("Compression will be used for writing the sort temp File");
}
parameters.setPrefetch(CarbonCommonConstants.CARBON_PREFETCH_IN_MERGE_VALUE);
parameters.setBufferSize(CarbonCommonConstants.CARBON_PREFETCH_BUFFERSIZE);
char[] aggType = new char[parameters.getMeasureColCount()];
Arrays.fill(aggType, 'n');
CarbonTable carbonTable = CarbonMetadata.getInstance().getCarbonTable(
parameters.getDatabaseName() + CarbonCommonConstants.UNDERSCORE + parameters
.getTableName());
List<CarbonMeasure> measures = carbonTable.getMeasureByTableName(parameters.getTableName());
for (int i = 0; i < aggType.length; i++) {
aggType[i] = DataTypeUtil.getAggType(measures.get(i).getDataType());
}
parameters.setAggType(aggType);
return parameters;
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.tez.dag.history.recovery;
import java.io.IOException;
import java.util.EnumSet;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.atomic.AtomicBoolean;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.service.AbstractService;
import org.apache.tez.common.TezCommonUtils;
import org.apache.tez.dag.api.TezConfiguration;
import org.apache.tez.dag.api.TezConstants;
import org.apache.tez.dag.app.AppContext;
import org.apache.tez.dag.history.DAGHistoryEvent;
import org.apache.tez.dag.history.HistoryEventType;
import org.apache.tez.dag.history.SummaryEvent;
import org.apache.tez.dag.history.events.DAGSubmittedEvent;
import org.apache.tez.dag.records.TezDAGID;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.annotations.VisibleForTesting;
public class RecoveryService extends AbstractService {
private static final Logger LOG = LoggerFactory.getLogger(RecoveryService.class);
private final AppContext appContext;
public static final String RECOVERY_FATAL_OCCURRED_DIR =
"RecoveryFatalErrorOccurred";
/**
* whether to handle remaining event in the eventqueue when AM is stopped
*/
@VisibleForTesting
public static final String TEZ_TEST_RECOVERY_DRAIN_EVENTS_WHEN_STOPPED =
TezConfiguration.TEZ_PREFIX + "test.recovery.drain_event";
/**
* by default handle remaining event when AM is stopped.
* This should be helpful for recovery
*/
@VisibleForTesting
public static final boolean TEZ_TEST_RECOVERY_DRAIN_EVENTS_WHEN_STOPPED_DEFAULT = true;
private LinkedBlockingQueue<DAGHistoryEvent> eventQueue =
new LinkedBlockingQueue<DAGHistoryEvent>();
private Set<TezDAGID> completedDAGs = new HashSet<TezDAGID>();
private Set<TezDAGID> skippedDAGs = new HashSet<TezDAGID>();
private Thread eventHandlingThread;
private AtomicBoolean stopped = new AtomicBoolean(false);
private AtomicBoolean started = new AtomicBoolean(false);
private int eventCounter = 0;
private int eventsProcessed = 0;
private final Object lock = new Object();
private FileSystem recoveryDirFS; // FS where staging dir exists
Path recoveryPath;
@VisibleForTesting
public Map<TezDAGID, FSDataOutputStream> outputStreamMap = new
HashMap<TezDAGID, FSDataOutputStream>();
private int bufferSize;
@VisibleForTesting
public FSDataOutputStream summaryStream;
private int unflushedEventsCount = 0;
private long lastFlushTime = -1;
private int maxUnflushedEvents;
private int flushInterval;
private AtomicBoolean recoveryFatalErrorOccurred = new AtomicBoolean(false);
private boolean drainEventsFlag;
// Indicates all the remaining events on stop have been drained
// and processed.
private volatile boolean drained = true;
private Object waitForDrained = new Object();
public RecoveryService(AppContext appContext) {
super(RecoveryService.class.getName());
this.appContext = appContext;
}
@Override
public void serviceInit(Configuration conf) throws Exception {
LOG.info("Initializing RecoveryService");
recoveryPath = appContext.getCurrentRecoveryDir();
recoveryDirFS = FileSystem.get(recoveryPath.toUri(), conf);
bufferSize = conf.getInt(TezConfiguration.DAG_RECOVERY_FILE_IO_BUFFER_SIZE,
TezConfiguration.DAG_RECOVERY_FILE_IO_BUFFER_SIZE_DEFAULT);
flushInterval = conf.getInt(TezConfiguration.DAG_RECOVERY_FLUSH_INTERVAL_SECS,
TezConfiguration.DAG_RECOVERY_FLUSH_INTERVAL_SECS_DEFAULT);
maxUnflushedEvents = conf.getInt(TezConfiguration.DAG_RECOVERY_MAX_UNFLUSHED_EVENTS,
TezConfiguration.DAG_RECOVERY_MAX_UNFLUSHED_EVENTS_DEFAULT);
drainEventsFlag = conf.getBoolean(
TEZ_TEST_RECOVERY_DRAIN_EVENTS_WHEN_STOPPED,
TEZ_TEST_RECOVERY_DRAIN_EVENTS_WHEN_STOPPED_DEFAULT);
}
@Override
public void serviceStart() {
LOG.info("Starting RecoveryService");
lastFlushTime = appContext.getClock().getTime();
eventHandlingThread = new Thread(new Runnable() {
@Override
public void run() {
DAGHistoryEvent event;
while (!stopped.get() && !Thread.currentThread().isInterrupted()) {
drained = eventQueue.isEmpty();
// adding this service state check is to avoid the overhead of acquiring the lock
// and calling notify every time in the normal run of the loop.
if (getServiceState() == STATE.STOPPED) {
synchronized (waitForDrained) {
if (drained) {
waitForDrained.notify();
}
}
}
if (recoveryFatalErrorOccurred.get()) {
LOG.error("Recovery failure occurred. Stopping recovery thread."
+ " Current eventQueueSize=" + eventQueue.size());
eventQueue.clear();
return;
}
// Log the size of the event-queue every so often.
if (eventCounter != 0 && eventCounter % 1000 == 0) {
LOG.info("Event queue stats"
+ ", eventsProcessedSinceLastUpdate=" + eventsProcessed
+ ", eventQueueSize=" + eventQueue.size());
eventCounter = 0;
eventsProcessed = 0;
} else {
++eventCounter;
}
try {
event = eventQueue.take();
} catch (InterruptedException e) {
LOG.info("EventQueue take interrupted. Returning");
return;
}
synchronized (lock) {
try {
++eventsProcessed;
handleRecoveryEvent(event);
} catch (Exception e) {
// For now, ignore any such errors as these are non-critical
// All summary event related errors are handled as critical
LOG.warn("Error handling recovery event", e);
}
}
}
}
}, "RecoveryEventHandlingThread");
eventHandlingThread.start();
started.set(true);
}
@Override
public void serviceStop() throws Exception {
LOG.info("Stopping RecoveryService");
if (drainEventsFlag) {
LOG.info("Handle the remaining events in queue, queue size=" + eventQueue.size());
synchronized (waitForDrained) {
while (!drained && eventHandlingThread.isAlive()) {
waitForDrained.wait(1000);
LOG.info("Waiting for RecoveryEventHandlingThread to drain.");
}
}
}
stopped.set(true);
if (eventHandlingThread != null) {
eventHandlingThread.interrupt();
try {
eventHandlingThread.join();
} catch (InterruptedException ie) {
LOG.warn("Interrupted Exception while stopping", ie);
}
}
synchronized (lock) {
if (summaryStream != null) {
try {
LOG.info("Closing Summary Stream");
summaryStream.hflush();
summaryStream.close();
} catch (IOException ioe) {
LOG.warn("Error when closing summary stream", ioe);
}
}
for (Entry<TezDAGID, FSDataOutputStream> entry : outputStreamMap.entrySet()) {
try {
LOG.info("Closing Output Stream for DAG " + entry.getKey());
entry.getValue().hflush();
entry.getValue().close();
} catch (IOException ioe) {
LOG.warn("Error when closing output stream", ioe);
}
}
}
}
// ---------- IMPORTANT ----------------------
// ALWAYS USE THIS METHOD TO ADD EVENT TO QUEUE
private void addToEventQueue(DAGHistoryEvent event) {
drained = false;
eventQueue.add(event);
}
public void handle(DAGHistoryEvent event) throws IOException {
if (stopped.get()) {
LOG.warn("Igoring event as service stopped, eventType"
+ event.getHistoryEvent().getEventType());
return;
}
HistoryEventType eventType = event.getHistoryEvent().getEventType();
if (recoveryFatalErrorOccurred.get()) {
return;
}
if (!started.get()) {
LOG.warn("Adding event of type " + eventType
+ " to queue as service not started");
addToEventQueue(event);
return;
}
TezDAGID dagId = event.getDagID();
if (eventType.equals(HistoryEventType.DAG_SUBMITTED)) {
DAGSubmittedEvent dagSubmittedEvent =
(DAGSubmittedEvent) event.getHistoryEvent();
String dagName = dagSubmittedEvent.getDAGName();
if (dagName != null
&& dagName.startsWith(
TezConstants.TEZ_PREWARM_DAG_NAME_PREFIX)) {
// Skip recording pre-warm DAG events
skippedDAGs.add(dagId);
return;
}
}
if (dagId == null || skippedDAGs.contains(dagId)) {
if (LOG.isDebugEnabled()) {
LOG.debug("Skipping event for DAG"
+ ", eventType=" + eventType
+ ", dagId=" + (dagId == null ? "null" : dagId.toString())
+ ", isSkippedDAG=" + (dagId == null ? "null"
: skippedDAGs.contains(dagId)));
}
return;
}
if (event.getHistoryEvent() instanceof SummaryEvent) {
synchronized (lock) {
if (stopped.get()) {
LOG.warn("Igoring event as service stopped, eventType"
+ event.getHistoryEvent().getEventType());
return;
}
try {
SummaryEvent summaryEvent = (SummaryEvent) event.getHistoryEvent();
handleSummaryEvent(dagId, eventType, summaryEvent);
summaryStream.hflush();
if (summaryEvent.writeToRecoveryImmediately()) {
handleRecoveryEvent(event);
doFlush(outputStreamMap.get(event.getDagID()),
appContext.getClock().getTime());
} else {
if (LOG.isDebugEnabled()) {
LOG.debug("Queueing Non-immediate Summary/Recovery event of type"
+ eventType.name());
}
addToEventQueue(event);
}
if (eventType.equals(HistoryEventType.DAG_FINISHED)) {
LOG.info("DAG completed"
+ ", dagId=" + event.getDagID()
+ ", queueSize=" + eventQueue.size());
completedDAGs.add(dagId);
if (outputStreamMap.containsKey(dagId)) {
try {
outputStreamMap.get(dagId).close();
outputStreamMap.remove(dagId);
} catch (IOException ioe) {
LOG.warn("Error when trying to flush/close recovery file for"
+ " dag, dagId=" + event.getDagID());
}
}
}
} catch (IOException ioe) {
LOG.error("Error handling summary event"
+ ", eventType=" + event.getHistoryEvent().getEventType(), ioe);
Path fatalErrorDir = new Path(recoveryPath, RECOVERY_FATAL_OCCURRED_DIR);
try {
LOG.error("Adding a flag to ensure next AM attempt does not start up"
+ ", flagFile=" + fatalErrorDir.toString());
recoveryFatalErrorOccurred.set(true);
recoveryDirFS.mkdirs(fatalErrorDir);
if (recoveryDirFS.exists(fatalErrorDir)) {
LOG.error("Recovery failure occurred. Skipping all events");
} else {
// throw error if fatal error flag could not be set
throw ioe;
}
} catch (IOException e) {
LOG.error("Failed to create fatal error flag dir "
+ fatalErrorDir.toString(), e);
throw ioe;
}
if (eventType.equals(HistoryEventType.DAG_SUBMITTED)) {
// Throw error to tell client that dag submission failed
throw ioe;
}
}
}
} else {
// All other events just get queued
if (LOG.isDebugEnabled()) {
LOG.debug("Queueing Non-Summary Recovery event of type " + eventType.name());
}
addToEventQueue(event);
}
}
private void handleSummaryEvent(TezDAGID dagID,
HistoryEventType eventType,
SummaryEvent summaryEvent) throws IOException {
if (LOG.isDebugEnabled()) {
LOG.debug("Handling summary event"
+ ", dagID=" + dagID
+ ", eventType=" + eventType);
}
if (summaryStream == null) {
Path summaryPath = TezCommonUtils.getSummaryRecoveryPath(recoveryPath);
if (LOG.isDebugEnabled()) {
LOG.debug("AppId :" + appContext.getApplicationID() + " summaryPath " + summaryPath);
}
if (!recoveryDirFS.exists(summaryPath)) {
summaryStream = recoveryDirFS.create(summaryPath, false,
bufferSize);
} else {
summaryStream = recoveryDirFS.append(summaryPath, bufferSize);
}
}
if (LOG.isDebugEnabled()) {
LOG.debug("Writing recovery event to summary stream"
+ ", dagId=" + dagID
+ ", eventType=" + eventType);
}
summaryEvent.toSummaryProtoStream(summaryStream);
}
@VisibleForTesting
protected void handleRecoveryEvent(DAGHistoryEvent event) throws IOException {
HistoryEventType eventType = event.getHistoryEvent().getEventType();
if (LOG.isDebugEnabled()) {
LOG.debug("Handling recovery event of type "
+ event.getHistoryEvent().getEventType());
}
TezDAGID dagID = event.getDagID();
if (completedDAGs.contains(dagID)) {
// no need to recover completed DAGs
if (LOG.isDebugEnabled()) {
LOG.debug("Skipping Recovery Event as DAG completed"
+ ", dagId=" + dagID
+ ", completed=" + completedDAGs.contains(dagID)
+ ", skipped=" + skippedDAGs.contains(dagID)
+ ", eventType=" + eventType);
}
return;
}
if (!outputStreamMap.containsKey(dagID)) {
Path dagFilePath = TezCommonUtils.getDAGRecoveryPath(recoveryPath, dagID.toString());
FSDataOutputStream outputStream;
if (recoveryDirFS.exists(dagFilePath)) {
if (LOG.isDebugEnabled()) {
LOG.debug("Opening DAG recovery file in append mode"
+ ", filePath=" + dagFilePath);
}
outputStream = recoveryDirFS.append(dagFilePath, bufferSize);
} else {
if (LOG.isDebugEnabled()) {
LOG.debug("Opening DAG recovery file in create mode"
+ ", filePath=" + dagFilePath);
}
outputStream = recoveryDirFS.create(dagFilePath, false, bufferSize);
}
outputStreamMap.put(dagID, outputStream);
}
FSDataOutputStream outputStream = outputStreamMap.get(dagID);
if (LOG.isDebugEnabled()) {
LOG.debug("Writing recovery event to output stream"
+ ", dagId=" + dagID
+ ", eventType=" + eventType);
}
++unflushedEventsCount;
outputStream.writeInt(event.getHistoryEvent().getEventType().ordinal());
event.getHistoryEvent().toProtoStream(outputStream);
if (!EnumSet.of(HistoryEventType.DAG_SUBMITTED,
HistoryEventType.DAG_FINISHED).contains(eventType)) {
maybeFlush(outputStream);
}
}
private void maybeFlush(FSDataOutputStream outputStream) throws IOException {
long currentTime = appContext.getClock().getTime();
boolean doFlush = false;
if (maxUnflushedEvents >=0
&& unflushedEventsCount >= maxUnflushedEvents) {
if (LOG.isDebugEnabled()) {
LOG.debug("Max unflushed events count reached. Flushing recovery data"
+ ", unflushedEventsCount=" + unflushedEventsCount
+ ", maxUnflushedEvents=" + maxUnflushedEvents);
}
doFlush = true;
} else if (flushInterval >= 0
&& ((currentTime - lastFlushTime) >= (flushInterval*1000))) {
LOG.debug("Flush interval time period elapsed. Flushing recovery data"
+ ", lastTimeSinceFLush=" + lastFlushTime
+ ", timeSinceLastFlush=" + (currentTime - lastFlushTime));
doFlush = true;
}
if (!doFlush) {
return;
}
doFlush(outputStream, currentTime);
}
private void doFlush(FSDataOutputStream outputStream,
long currentTime) throws IOException {
outputStream.hflush();
if (LOG.isDebugEnabled()) {
LOG.debug("Flushing output stream"
+ ", lastTimeSinceFLush=" + lastFlushTime
+ ", timeSinceLastFlush=" + (currentTime - lastFlushTime)
+ ", unflushedEventsCount=" + unflushedEventsCount
+ ", maxUnflushedEvents=" + maxUnflushedEvents);
}
unflushedEventsCount = 0;
lastFlushTime = currentTime;
}
public boolean hasRecoveryFailed() {
return recoveryFatalErrorOccurred.get();
}
public void await() {
while (!this.drained) {
Thread.yield();
}
}
}
| |
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package com.produccion.entidades;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import javax.persistence.Basic;
import javax.persistence.CascadeType;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.FetchType;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
import javax.persistence.JoinColumn;
import javax.persistence.ManyToOne;
import javax.persistence.OneToMany;
import javax.persistence.OrderBy;
import javax.persistence.Table;
import javax.persistence.Temporal;
import javax.persistence.TemporalType;
import javax.validation.constraints.Size;
/**
*
* @author mpluas
*/
@Entity
@Table(name = "menu")
public class Menu implements Serializable{
private static final long serialVersionUID = 1L;
@Id
@GeneratedValue(strategy = GenerationType.IDENTITY)
@Basic(optional = false)
@Column(name = "idmenu")
private Integer idmenu;
@Size(max = 2)
@Column(name = "estado")
private String estado;
@Size(max = 250)
@Column(name = "accion")
private String accion;
@Size(max = 250)
@Column(name = "descripcion")
private String descripcion;
@Size(max = 250)
@Column(name = "opcion")
private String opcion;
@Column(name = "orden")
private Integer orden;
@Size(max = 250)
@Column(name = "ruta_imagen")
private String rutaImagen;
@Size(max = 2)
@Column(name = "tipo")
private String tipo;
@Column(name = "fecha_registro")
@Temporal(TemporalType.DATE)
private Date fecha_registro;
// @ManyToOne
// @JoinColumn(name = "rol_idrol", referencedColumnName = "idrol")
// private Rol rol;
@OneToMany(cascade = CascadeType.ALL, mappedBy = "menuPadre", fetch = FetchType.EAGER)
@OrderBy("orden ASC")
private List<Menu> menuPadreeList;
@ManyToOne(optional = false, fetch = FetchType.LAZY)
@JoinColumn(name = "menu_padre", referencedColumnName = "idmenu")
private Menu menuPadre;
@OneToMany(mappedBy="menu" , fetch = FetchType.EAGER)
private List<MenuRol> menuRoles;
public Menu() {
this.menuPadreeList = new ArrayList<>();
this.menuRoles = new ArrayList<>();
}
public Menu(Integer idmenu) {
this.idmenu = idmenu;
this.menuPadreeList = new ArrayList<>();
this.menuRoles = new ArrayList<>();
}
public Menu(String estado, String accion, String descripcion, String opcion, Integer orden, String rutaImagen, String tipo, Date fecha_registro) {
this.estado = estado;
this.accion = accion;
this.descripcion = descripcion;
this.opcion = opcion;
this.orden = orden;
this.rutaImagen = rutaImagen;
this.tipo = tipo;
this.fecha_registro=fecha_registro;
this.menuPadreeList = new ArrayList<>();
this.menuRoles = new ArrayList<>();
}
public String getEstado() {
return estado;
}
public void setEstado(String estado) {
this.estado = estado;
}
public String getAccion() {
return accion;
}
public void setAccion(String accion) {
this.accion = accion;
}
public String getDescripcion() {
return descripcion;
}
public void setDescripcion(String descripcion) {
this.descripcion = descripcion;
}
public String getOpcion() {
return opcion;
}
public void setOpcion(String opcion) {
this.opcion = opcion;
}
public Integer getOrden() {
return orden;
}
public void setOrden(Integer orden) {
this.orden = orden;
}
public String getRutaImagen() {
return rutaImagen;
}
public void setRutaImagen(String rutaImagen) {
this.rutaImagen = rutaImagen;
}
public String getTipo() {
return tipo;
}
public void setTipo(String tipo) {
this.tipo = tipo;
}
public Date getFecha_registro() {
return fecha_registro;
}
public void setFecha_registro(Date fecha_registro) {
this.fecha_registro = fecha_registro;
}
public Menu getMenuPadre() {
return menuPadre;
}
public void setMenuPadre(Menu menuPadre) {
this.menuPadre = menuPadre;
}
public Integer getIdmenu() {
return idmenu;
}
public void setIdmenu(Integer idmenu) {
this.idmenu = idmenu;
}
// public Rol getRol() {
// return rol;
// }
// public void setRol(Rol rol) {
// this.rol = rol;
// if(!this.rol.getMenuList().contains(this)){
// this.rol.getMenuList().add(this);
// }
// }
public List<Menu> getMenuPadreeList() {
return menuPadreeList;
}
public void setMenuPadreeList(List<Menu> menuPadreeList) {
this.menuPadreeList = menuPadreeList;
}
public List<MenuRol> getMenuRoles() {
return menuRoles;
}
public void setMenuRoles(List<MenuRol> menuRoles) {
this.menuRoles = menuRoles;
}
}
| |
/**
* MenuFragment.java
*
* Copyright (C) 2017, Nariaki Iwatani(Anno Lab Inc.) and Shunichi Yamamoto(Yamamoto Works Ltd.)
*
* This software is released under the MIT License.
* http://opensource.org/licenses/mit-license.php
**/
package com.jins_meme.bridge;
import android.content.Context;
import android.content.SharedPreferences;
import android.graphics.Color;
import android.os.Bundle;
import android.os.Handler;
import android.support.v4.app.Fragment;
import android.support.v4.content.ContextCompat;
import android.support.v7.widget.CardView;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.ImageView;
import android.widget.Switch;
import android.widget.TextView;
import com.jins_meme.bridge.BridgeUIView.Adapter;
import com.jins_meme.bridge.BridgeUIView.CardHolder;
import com.jins_meme.bridge.BridgeUIView.IResultListener;
/**
* A simple {@link Fragment} subclass.
* Activities that contain this fragment must implement the
* {@link RootMenuFragment.OnFragmentInteractionListener} interface
* to handle interaction events.
*/
public class RootMenuFragment extends MenuFragmentBase {
private OnFragmentInteractionListener mListener;
public RootMenuFragment() {
// Required empty public constructor
}
@Override
public void onAttach(Context context) {
super.onAttach(context);
Log.d("DEBUG", "ROOT:: onAttach");
if (context instanceof OnFragmentInteractionListener) {
mListener = (OnFragmentInteractionListener) context;
} else {
throw new RuntimeException(context.toString()
+ " must implement OnFragmentInteractionListener");
}
}
@Override
public void onDetach() {
super.onDetach();
Log.d("DEBUG", "ROOT:: onDetach");
mListener = null;
}
@Override
protected Adapter createAdapter() {
return new CardAdapter(getContext(), this);
}
@Override
protected SharedPreferences getPreferences() {
return getContext().getSharedPreferences("root_menu", Context.MODE_PRIVATE);
}
@Override
public void onViewCreated(View view, Bundle savedInstanceState) {
super.onViewCreated(view, savedInstanceState);
Log.d("DEBUG", "ROOT:: onViewCreated");
((MainActivity) getActivity()).changeMainBackgroud(R.color.no4);
((MainActivity) getActivity()).updateActionBar(getString(R.string.actionbar_title), false);
//((MainActivity) getActivity()).updateActionBarLogo(false);
((MainActivity) getActivity()).setIsCameraMenuFragment(false);
((MainActivity) getActivity()).changeSettingButton(false);
}
public interface OnFragmentInteractionListener {
void openNextMenu(int card_id);
}
@Override
public void onEndCardSelected(int id) {
super.onEndCardSelected(id);
mListener.openNextMenu(id);
}
private class CardAdapter extends BridgeUIView.Adapter<BridgeUIView.CardHolder> {
Context mContext;
LayoutInflater mInflater;
CardAdapter(Context context, IResultListener listener) {
super(listener);
mContext = context;
mInflater = LayoutInflater.from(context);
}
@Override
public CardHolder onCreateCardHolder(ViewGroup parent, int card_type) {
return new MyCardHolder(mInflater.inflate(R.layout.card_default, parent, false));
}
@Override
public void onBindCardHolder(CardHolder cardHolder, int id) {
if (((MainActivity) getActivity()).getSavedValue("ENABLE_DARK", true)) {
((MyCardHolder) cardHolder).mCardView.setCardBackgroundColor(ContextCompat.getColor(getContext(), R.color.no5));
} else {
((MyCardHolder) cardHolder).mCardView.setCardBackgroundColor(ContextCompat.getColor(getContext(), R.color.no4));
}
switch (id) {
case R.string.camera:
((MyCardHolder) cardHolder).mTitle.setTextColor(ContextCompat.getColor(getContext(), R.color.no3));
((MyCardHolder) cardHolder).mImageView.setImageResource(R.drawable.card_camera);
((MyCardHolder) cardHolder).mTitle.setText(getResources().getString(id));
((MyCardHolder) cardHolder).mSubtitle.setText("");
break;
case R.string.spotify:
((MyCardHolder) cardHolder).mTitle.setTextColor(ContextCompat.getColor(getContext(), R.color.spotify));
((MyCardHolder) cardHolder).mImageView.setImageResource(R.drawable.card_spotify);
((MyCardHolder) cardHolder).mTitle.setText(getResources().getString(id));
((MyCardHolder) cardHolder).mSubtitle.setText("");
break;
case R.string.remo:
((MyCardHolder) cardHolder).mTitle.setTextColor(ContextCompat.getColor(getContext(), R.color.remo));
((MyCardHolder) cardHolder).mImageView.setImageResource(R.drawable.card_remo);
((MyCardHolder) cardHolder).mTitle.setText(getResources().getString(id));
((MyCardHolder) cardHolder).mSubtitle.setText("");
break;
case R.string.hue:
((MyCardHolder) cardHolder).mTitle.setTextColor(ContextCompat.getColor(getContext(), R.color.hue));
((MyCardHolder) cardHolder).mImageView.setImageResource(R.drawable.card_hue);
((MyCardHolder) cardHolder).mTitle.setText(getResources().getString(id));
((MyCardHolder) cardHolder).mSubtitle.setText("");
break;
case R.string.vdj:
((MyCardHolder) cardHolder).mTitle.setTextColor(ContextCompat.getColor(getContext(), R.color.eyevdj));
((MyCardHolder) cardHolder).mImageView.setImageResource(R.drawable.card_eyevdj);
((MyCardHolder) cardHolder).mTitle.setText(getResources().getString(id));
((MyCardHolder) cardHolder).mSubtitle.setText("");
break;
/*
default:
((MyCardHolder) cardHolder).mImageView.setImageResource(R.drawable.card_default);
((MyCardHolder) cardHolder).mTitle.setText(getResources().getString(id));
((MyCardHolder) cardHolder).mSubtitle.setText("");
break;
*/
}
}
@Override
public CardFunction getCardFunction(int id) {
return CardFunction.END;
}
@Override
public int getCardId(int parent_id, int position) {
//Log.d("DEBUG", "ROOT:: getCardId");
int id = NO_ID;
switch (parent_id) {
case NO_ID:
switch (position) {
case 0:
case 1:
case 2:
case 3:
case 4:
id = ((MainActivity) getActivity()).getRootCardId(position);
break;
}
break;
}
return id;
}
@Override
public int getChildCardCount(int parent_id) {
switch (parent_id) {
case NO_ID:
return ((MainActivity) getActivity()).getEnabledCardNum();
}
return 0;
}
@Override
public int getCardType(int id) {
return getResources().getInteger(R.integer.CARD_TYPE_ONLY_TITLE);
//return CARD_TYPE_ONLY_TITLE;
}
private class MyCardHolder extends CardHolder {
CardView mCardView;
ImageView mImageView;
TextView mTitle;
TextView mSubtitle;
TextView mValue;
Handler mHandler = new Handler();
MyCardHolder(View itemView) {
super(itemView);
mCardView = (CardView) itemView.findViewById(R.id.card_view);
mImageView = (ImageView) itemView.findViewById(R.id.funcicon);
mTitle = (TextView) itemView.findViewById(R.id.card_text);
mSubtitle = (TextView) itemView.findViewById(R.id.card_subtext);
mValue = (TextView) itemView.findViewById(R.id.card_select);
}
void setText(String text) {
//mValue.setText(getString(R.string.selected));
mValue.setText(text);
}
void setText(String text, int msec) {
//mValue.setText(getString(R.string.selected));
mValue.setText(text);
mHandler.postDelayed(new Runnable() {
@Override
public void run() {
mValue.setText(" ");
}
}, msec);
}
void clearText() {
mValue.setText(" ");
}
}
}
}
| |
package org.apache.hadoop.fs.s3;
import java.io.IOException;
import java.io.InputStream;
import java.io.UnsupportedEncodingException;
import java.net.URI;
import java.net.URLDecoder;
import java.net.URLEncoder;
import java.util.Set;
import java.util.TreeSet;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.s3.INode.FileType;
import org.jets3t.service.S3Service;
import org.jets3t.service.S3ServiceException;
import org.jets3t.service.impl.rest.httpclient.RestS3Service;
import org.jets3t.service.model.S3Bucket;
import org.jets3t.service.model.S3Object;
import org.jets3t.service.security.AWSCredentials;
class Jets3tFileSystemStore implements FileSystemStore {
private static final String PATH_DELIMITER = urlEncode(Path.SEPARATOR);
private static final String BLOCK_PREFIX = "block_";
private S3Service s3Service;
private S3Bucket bucket;
public void initialize(URI uri, Configuration conf) throws IOException {
try {
String accessKey = null;
String secretAccessKey = null;
String userInfo = uri.getUserInfo();
if (userInfo != null) {
int index = userInfo.indexOf(':');
if (index != -1) {
accessKey = userInfo.substring(0, index);
secretAccessKey = userInfo.substring(index + 1);
} else {
accessKey = userInfo;
}
}
if (accessKey == null) {
accessKey = conf.get("fs.s3.awsAccessKeyId");
}
if (secretAccessKey == null) {
secretAccessKey = conf.get("fs.s3.awsSecretAccessKey");
}
if (accessKey == null && secretAccessKey == null) {
throw new IllegalArgumentException("AWS " +
"Access Key ID and Secret Access Key " +
"must be specified as the username " +
"or password (respectively) of a s3 URL, " +
"or by setting the " +
"fs.s3.awsAccessKeyId or " +
"fs.s3.awsSecretAccessKey properties (respectively).");
} else if (accessKey == null) {
throw new IllegalArgumentException("AWS " +
"Access Key ID must be specified " +
"as the username of a s3 URL, or by setting the " +
"fs.s3.awsAccessKeyId property.");
} else if (secretAccessKey == null) {
throw new IllegalArgumentException("AWS " +
"Secret Access Key must be specified " +
"as the password of a s3 URL, or by setting the " +
"fs.s3.awsSecretAccessKey property.");
}
AWSCredentials awsCredentials = new AWSCredentials(accessKey, secretAccessKey);
this.s3Service = new RestS3Service(awsCredentials);
} catch (S3ServiceException e) {
if (e.getCause() instanceof IOException) {
throw (IOException) e.getCause();
}
throw new S3Exception(e);
}
bucket = new S3Bucket(uri.getHost());
createBucket(bucket.getName());
}
private void createBucket(String bucketName) throws IOException {
try {
s3Service.createBucket(bucketName);
} catch (S3ServiceException e) {
if (e.getCause() instanceof IOException) {
throw (IOException) e.getCause();
}
throw new S3Exception(e);
}
}
private void delete(String key) throws IOException {
try {
s3Service.deleteObject(bucket, key);
} catch (S3ServiceException e) {
if (e.getCause() instanceof IOException) {
throw (IOException) e.getCause();
}
throw new S3Exception(e);
}
}
public void deleteINode(Path path) throws IOException {
delete(pathToKey(path));
}
public void deleteBlock(Block block) throws IOException {
delete(blockToKey(block));
}
public boolean inodeExists(Path path) throws IOException {
InputStream in = get(pathToKey(path));
if (in == null) {
return false;
}
in.close();
return true;
}
public boolean blockExists(long blockId) throws IOException {
InputStream in = get(blockToKey(blockId));
if (in == null) {
return false;
}
in.close();
return true;
}
private InputStream get(String key) throws IOException {
try {
S3Object object = s3Service.getObject(bucket, key);
return object.getDataInputStream();
} catch (S3ServiceException e) {
if (e.getErrorCode().equals("NoSuchKey")) {
return null;
}
if (e.getCause() instanceof IOException) {
throw (IOException) e.getCause();
}
throw new S3Exception(e);
}
}
private InputStream get(String key, long byteRangeStart) throws IOException {
try {
S3Object object = s3Service.getObject(bucket, key, null, null, null,
null, byteRangeStart, null);
return object.getDataInputStream();
} catch (S3ServiceException e) {
if (e.getErrorCode().equals("NoSuchKey")) {
return null;
}
if (e.getCause() instanceof IOException) {
throw (IOException) e.getCause();
}
throw new S3Exception(e);
}
}
public INode getINode(Path path) throws IOException {
return INode.deserialize(get(pathToKey(path)));
}
public InputStream getBlockStream(Block block, long byteRangeStart)
throws IOException {
return get(blockToKey(block), byteRangeStart);
}
public Set<Path> listSubPaths(Path path) throws IOException {
try {
String prefix = pathToKey(path);
if (!prefix.endsWith(PATH_DELIMITER)) {
prefix += PATH_DELIMITER;
}
S3Object[] objects = s3Service.listObjects(bucket, prefix, PATH_DELIMITER, 0);
Set<Path> prefixes = new TreeSet<Path>();
for (int i = 0; i < objects.length; i++) {
prefixes.add(keyToPath(objects[i].getKey()));
}
prefixes.remove(path);
return prefixes;
} catch (S3ServiceException e) {
if (e.getCause() instanceof IOException) {
throw (IOException) e.getCause();
}
throw new S3Exception(e);
}
}
private void put(String key, InputStream in, long length) throws IOException {
try {
S3Object object = new S3Object(key);
object.setDataInputStream(in);
object.setContentType("binary/octet-stream");
object.setContentLength(length);
s3Service.putObject(bucket, object);
} catch (S3ServiceException e) {
if (e.getCause() instanceof IOException) {
throw (IOException) e.getCause();
}
throw new S3Exception(e);
}
}
public void storeINode(Path path, INode inode) throws IOException {
put(pathToKey(path), inode.serialize(), inode.getSerializedLength());
}
public void storeBlock(Block block, InputStream in) throws IOException {
put(blockToKey(block), in, block.getLength());
}
private String pathToKey(Path path) {
if (!path.isAbsolute()) {
throw new IllegalArgumentException("Path must be absolute: " + path);
}
return urlEncode(path.toString());
}
private Path keyToPath(String key) {
return new Path(urlDecode(key));
}
private static String urlEncode(String s) {
try {
return URLEncoder.encode(s, "UTF-8");
} catch (UnsupportedEncodingException e) {
// Should never happen since every implementation of the Java Platform
// is required to support UTF-8.
// See http://java.sun.com/j2se/1.5.0/docs/api/java/nio/charset/Charset.html
throw new IllegalStateException(e);
}
}
private static String urlDecode(String s) {
try {
return URLDecoder.decode(s, "UTF-8");
} catch (UnsupportedEncodingException e) {
// Should never happen since every implementation of the Java Platform
// is required to support UTF-8.
// See http://java.sun.com/j2se/1.5.0/docs/api/java/nio/charset/Charset.html
throw new IllegalStateException(e);
}
}
private String blockToKey(long blockId) {
return BLOCK_PREFIX + blockId;
}
private String blockToKey(Block block) {
return blockToKey(block.getId());
}
public void purge() throws IOException {
try {
S3Object[] objects = s3Service.listObjects(bucket);
for (int i = 0; i < objects.length; i++) {
s3Service.deleteObject(bucket, objects[i].getKey());
}
} catch (S3ServiceException e) {
if (e.getCause() instanceof IOException) {
throw (IOException) e.getCause();
}
throw new S3Exception(e);
}
}
public void dump() throws IOException {
StringBuilder sb = new StringBuilder("S3 Filesystem, ");
sb.append(bucket.getName()).append("\n");
try {
S3Object[] objects = s3Service.listObjects(bucket, PATH_DELIMITER, null);
for (int i = 0; i < objects.length; i++) {
Path path = keyToPath(objects[i].getKey());
sb.append(path).append("\n");
INode m = getINode(path);
sb.append("\t").append(m.getFileType()).append("\n");
if (m.getFileType() == FileType.DIRECTORY) {
continue;
}
for (int j = 0; j < m.getBlocks().length; j++) {
sb.append("\t").append(m.getBlocks()[j]).append("\n");
}
}
} catch (S3ServiceException e) {
if (e.getCause() instanceof IOException) {
throw (IOException) e.getCause();
}
throw new S3Exception(e);
}
System.out.println(sb);
}
}
| |
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/videointelligence/v1p1beta1/video_intelligence.proto
package com.google.cloud.videointelligence.v1p1beta1;
/**
*
*
* <pre>
* Detected entity from video analysis.
* </pre>
*
* Protobuf type {@code google.cloud.videointelligence.v1p1beta1.Entity}
*/
public final class Entity extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.videointelligence.v1p1beta1.Entity)
EntityOrBuilder {
private static final long serialVersionUID = 0L;
// Use Entity.newBuilder() to construct.
private Entity(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private Entity() {
entityId_ = "";
description_ = "";
languageCode_ = "";
}
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet getUnknownFields() {
return this.unknownFields;
}
private Entity(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
java.lang.String s = input.readStringRequireUtf8();
entityId_ = s;
break;
}
case 18:
{
java.lang.String s = input.readStringRequireUtf8();
description_ = s;
break;
}
case 26:
{
java.lang.String s = input.readStringRequireUtf8();
languageCode_ = s;
break;
}
default:
{
if (!parseUnknownFieldProto3(input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.videointelligence.v1p1beta1.VideoIntelligenceServiceProto
.internal_static_google_cloud_videointelligence_v1p1beta1_Entity_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.videointelligence.v1p1beta1.VideoIntelligenceServiceProto
.internal_static_google_cloud_videointelligence_v1p1beta1_Entity_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.videointelligence.v1p1beta1.Entity.class,
com.google.cloud.videointelligence.v1p1beta1.Entity.Builder.class);
}
public static final int ENTITY_ID_FIELD_NUMBER = 1;
private volatile java.lang.Object entityId_;
/**
*
*
* <pre>
* Opaque entity ID. Some IDs may be available in
* [Google Knowledge Graph Search
* API](https://developers.google.com/knowledge-graph/).
* </pre>
*
* <code>string entity_id = 1;</code>
*/
public java.lang.String getEntityId() {
java.lang.Object ref = entityId_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
entityId_ = s;
return s;
}
}
/**
*
*
* <pre>
* Opaque entity ID. Some IDs may be available in
* [Google Knowledge Graph Search
* API](https://developers.google.com/knowledge-graph/).
* </pre>
*
* <code>string entity_id = 1;</code>
*/
public com.google.protobuf.ByteString getEntityIdBytes() {
java.lang.Object ref = entityId_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
entityId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int DESCRIPTION_FIELD_NUMBER = 2;
private volatile java.lang.Object description_;
/**
*
*
* <pre>
* Textual description, e.g. `Fixed-gear bicycle`.
* </pre>
*
* <code>string description = 2;</code>
*/
public java.lang.String getDescription() {
java.lang.Object ref = description_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
description_ = s;
return s;
}
}
/**
*
*
* <pre>
* Textual description, e.g. `Fixed-gear bicycle`.
* </pre>
*
* <code>string description = 2;</code>
*/
public com.google.protobuf.ByteString getDescriptionBytes() {
java.lang.Object ref = description_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
description_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int LANGUAGE_CODE_FIELD_NUMBER = 3;
private volatile java.lang.Object languageCode_;
/**
*
*
* <pre>
* Language code for `description` in BCP-47 format.
* </pre>
*
* <code>string language_code = 3;</code>
*/
public java.lang.String getLanguageCode() {
java.lang.Object ref = languageCode_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
languageCode_ = s;
return s;
}
}
/**
*
*
* <pre>
* Language code for `description` in BCP-47 format.
* </pre>
*
* <code>string language_code = 3;</code>
*/
public com.google.protobuf.ByteString getLanguageCodeBytes() {
java.lang.Object ref = languageCode_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
languageCode_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!getEntityIdBytes().isEmpty()) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, entityId_);
}
if (!getDescriptionBytes().isEmpty()) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, description_);
}
if (!getLanguageCodeBytes().isEmpty()) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, languageCode_);
}
unknownFields.writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!getEntityIdBytes().isEmpty()) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, entityId_);
}
if (!getDescriptionBytes().isEmpty()) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, description_);
}
if (!getLanguageCodeBytes().isEmpty()) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, languageCode_);
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.videointelligence.v1p1beta1.Entity)) {
return super.equals(obj);
}
com.google.cloud.videointelligence.v1p1beta1.Entity other =
(com.google.cloud.videointelligence.v1p1beta1.Entity) obj;
boolean result = true;
result = result && getEntityId().equals(other.getEntityId());
result = result && getDescription().equals(other.getDescription());
result = result && getLanguageCode().equals(other.getLanguageCode());
result = result && unknownFields.equals(other.unknownFields);
return result;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + ENTITY_ID_FIELD_NUMBER;
hash = (53 * hash) + getEntityId().hashCode();
hash = (37 * hash) + DESCRIPTION_FIELD_NUMBER;
hash = (53 * hash) + getDescription().hashCode();
hash = (37 * hash) + LANGUAGE_CODE_FIELD_NUMBER;
hash = (53 * hash) + getLanguageCode().hashCode();
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.videointelligence.v1p1beta1.Entity parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.videointelligence.v1p1beta1.Entity parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.videointelligence.v1p1beta1.Entity parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.videointelligence.v1p1beta1.Entity parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.videointelligence.v1p1beta1.Entity parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.videointelligence.v1p1beta1.Entity parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.videointelligence.v1p1beta1.Entity parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.videointelligence.v1p1beta1.Entity parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.videointelligence.v1p1beta1.Entity parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.videointelligence.v1p1beta1.Entity parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.videointelligence.v1p1beta1.Entity parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.videointelligence.v1p1beta1.Entity parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.videointelligence.v1p1beta1.Entity prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Detected entity from video analysis.
* </pre>
*
* Protobuf type {@code google.cloud.videointelligence.v1p1beta1.Entity}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.videointelligence.v1p1beta1.Entity)
com.google.cloud.videointelligence.v1p1beta1.EntityOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.videointelligence.v1p1beta1.VideoIntelligenceServiceProto
.internal_static_google_cloud_videointelligence_v1p1beta1_Entity_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.videointelligence.v1p1beta1.VideoIntelligenceServiceProto
.internal_static_google_cloud_videointelligence_v1p1beta1_Entity_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.videointelligence.v1p1beta1.Entity.class,
com.google.cloud.videointelligence.v1p1beta1.Entity.Builder.class);
}
// Construct using com.google.cloud.videointelligence.v1p1beta1.Entity.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {}
}
@java.lang.Override
public Builder clear() {
super.clear();
entityId_ = "";
description_ = "";
languageCode_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.videointelligence.v1p1beta1.VideoIntelligenceServiceProto
.internal_static_google_cloud_videointelligence_v1p1beta1_Entity_descriptor;
}
@java.lang.Override
public com.google.cloud.videointelligence.v1p1beta1.Entity getDefaultInstanceForType() {
return com.google.cloud.videointelligence.v1p1beta1.Entity.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.videointelligence.v1p1beta1.Entity build() {
com.google.cloud.videointelligence.v1p1beta1.Entity result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.videointelligence.v1p1beta1.Entity buildPartial() {
com.google.cloud.videointelligence.v1p1beta1.Entity result =
new com.google.cloud.videointelligence.v1p1beta1.Entity(this);
result.entityId_ = entityId_;
result.description_ = description_;
result.languageCode_ = languageCode_;
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return (Builder) super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return (Builder) super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return (Builder) super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return (Builder) super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return (Builder) super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return (Builder) super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.videointelligence.v1p1beta1.Entity) {
return mergeFrom((com.google.cloud.videointelligence.v1p1beta1.Entity) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.videointelligence.v1p1beta1.Entity other) {
if (other == com.google.cloud.videointelligence.v1p1beta1.Entity.getDefaultInstance())
return this;
if (!other.getEntityId().isEmpty()) {
entityId_ = other.entityId_;
onChanged();
}
if (!other.getDescription().isEmpty()) {
description_ = other.description_;
onChanged();
}
if (!other.getLanguageCode().isEmpty()) {
languageCode_ = other.languageCode_;
onChanged();
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.cloud.videointelligence.v1p1beta1.Entity parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage =
(com.google.cloud.videointelligence.v1p1beta1.Entity) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private java.lang.Object entityId_ = "";
/**
*
*
* <pre>
* Opaque entity ID. Some IDs may be available in
* [Google Knowledge Graph Search
* API](https://developers.google.com/knowledge-graph/).
* </pre>
*
* <code>string entity_id = 1;</code>
*/
public java.lang.String getEntityId() {
java.lang.Object ref = entityId_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
entityId_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Opaque entity ID. Some IDs may be available in
* [Google Knowledge Graph Search
* API](https://developers.google.com/knowledge-graph/).
* </pre>
*
* <code>string entity_id = 1;</code>
*/
public com.google.protobuf.ByteString getEntityIdBytes() {
java.lang.Object ref = entityId_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
entityId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Opaque entity ID. Some IDs may be available in
* [Google Knowledge Graph Search
* API](https://developers.google.com/knowledge-graph/).
* </pre>
*
* <code>string entity_id = 1;</code>
*/
public Builder setEntityId(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
entityId_ = value;
onChanged();
return this;
}
/**
*
*
* <pre>
* Opaque entity ID. Some IDs may be available in
* [Google Knowledge Graph Search
* API](https://developers.google.com/knowledge-graph/).
* </pre>
*
* <code>string entity_id = 1;</code>
*/
public Builder clearEntityId() {
entityId_ = getDefaultInstance().getEntityId();
onChanged();
return this;
}
/**
*
*
* <pre>
* Opaque entity ID. Some IDs may be available in
* [Google Knowledge Graph Search
* API](https://developers.google.com/knowledge-graph/).
* </pre>
*
* <code>string entity_id = 1;</code>
*/
public Builder setEntityIdBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
entityId_ = value;
onChanged();
return this;
}
private java.lang.Object description_ = "";
/**
*
*
* <pre>
* Textual description, e.g. `Fixed-gear bicycle`.
* </pre>
*
* <code>string description = 2;</code>
*/
public java.lang.String getDescription() {
java.lang.Object ref = description_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
description_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Textual description, e.g. `Fixed-gear bicycle`.
* </pre>
*
* <code>string description = 2;</code>
*/
public com.google.protobuf.ByteString getDescriptionBytes() {
java.lang.Object ref = description_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
description_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Textual description, e.g. `Fixed-gear bicycle`.
* </pre>
*
* <code>string description = 2;</code>
*/
public Builder setDescription(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
description_ = value;
onChanged();
return this;
}
/**
*
*
* <pre>
* Textual description, e.g. `Fixed-gear bicycle`.
* </pre>
*
* <code>string description = 2;</code>
*/
public Builder clearDescription() {
description_ = getDefaultInstance().getDescription();
onChanged();
return this;
}
/**
*
*
* <pre>
* Textual description, e.g. `Fixed-gear bicycle`.
* </pre>
*
* <code>string description = 2;</code>
*/
public Builder setDescriptionBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
description_ = value;
onChanged();
return this;
}
private java.lang.Object languageCode_ = "";
/**
*
*
* <pre>
* Language code for `description` in BCP-47 format.
* </pre>
*
* <code>string language_code = 3;</code>
*/
public java.lang.String getLanguageCode() {
java.lang.Object ref = languageCode_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
languageCode_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Language code for `description` in BCP-47 format.
* </pre>
*
* <code>string language_code = 3;</code>
*/
public com.google.protobuf.ByteString getLanguageCodeBytes() {
java.lang.Object ref = languageCode_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
languageCode_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Language code for `description` in BCP-47 format.
* </pre>
*
* <code>string language_code = 3;</code>
*/
public Builder setLanguageCode(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
languageCode_ = value;
onChanged();
return this;
}
/**
*
*
* <pre>
* Language code for `description` in BCP-47 format.
* </pre>
*
* <code>string language_code = 3;</code>
*/
public Builder clearLanguageCode() {
languageCode_ = getDefaultInstance().getLanguageCode();
onChanged();
return this;
}
/**
*
*
* <pre>
* Language code for `description` in BCP-47 format.
* </pre>
*
* <code>string language_code = 3;</code>
*/
public Builder setLanguageCodeBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
languageCode_ = value;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFieldsProto3(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.videointelligence.v1p1beta1.Entity)
}
// @@protoc_insertion_point(class_scope:google.cloud.videointelligence.v1p1beta1.Entity)
private static final com.google.cloud.videointelligence.v1p1beta1.Entity DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.videointelligence.v1p1beta1.Entity();
}
public static com.google.cloud.videointelligence.v1p1beta1.Entity getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<Entity> PARSER =
new com.google.protobuf.AbstractParser<Entity>() {
@java.lang.Override
public Entity parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new Entity(input, extensionRegistry);
}
};
public static com.google.protobuf.Parser<Entity> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<Entity> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.videointelligence.v1p1beta1.Entity getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
| |
/* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.flowable.engine.impl.bpmn.helper;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.apache.commons.lang3.StringUtils;
import org.flowable.bpmn.model.BoundaryEvent;
import org.flowable.bpmn.model.BpmnModel;
import org.flowable.bpmn.model.CallActivity;
import org.flowable.bpmn.model.ErrorEventDefinition;
import org.flowable.bpmn.model.Event;
import org.flowable.bpmn.model.EventDefinition;
import org.flowable.bpmn.model.EventSubProcess;
import org.flowable.bpmn.model.FlowElement;
import org.flowable.bpmn.model.FlowElementsContainer;
import org.flowable.bpmn.model.MapExceptionEntry;
import org.flowable.bpmn.model.Process;
import org.flowable.bpmn.model.StartEvent;
import org.flowable.engine.common.api.FlowableException;
import org.flowable.engine.common.api.delegate.event.FlowableEngineEventType;
import org.flowable.engine.common.impl.util.CollectionUtil;
import org.flowable.engine.common.impl.util.ReflectUtil;
import org.flowable.engine.delegate.BpmnError;
import org.flowable.engine.delegate.DelegateExecution;
import org.flowable.engine.delegate.event.impl.FlowableEventBuilder;
import org.flowable.engine.impl.persistence.entity.ExecutionEntity;
import org.flowable.engine.impl.persistence.entity.ExecutionEntityManager;
import org.flowable.engine.impl.util.CommandContextUtil;
import org.flowable.engine.impl.util.ProcessDefinitionUtil;
/**
* This class is responsible for finding and executing error handlers for BPMN Errors.
*
* Possible error handlers include Error Intermediate Events and Error Event Sub-Processes.
*
* @author Tijs Rademakers
* @author Saeid Mirzaei
*/
public class ErrorPropagation {
public static void propagateError(BpmnError error, DelegateExecution execution) {
propagateError(error.getErrorCode(), execution);
}
public static void propagateError(String errorCode, DelegateExecution execution) {
Map<String, List<Event>> eventMap = new HashMap<>();
Set<String> rootProcessDefinitionIds = new HashSet<>();
if (!execution.getProcessInstanceId().equals(execution.getRootProcessInstanceId())) {
ExecutionEntity parentExecution = (ExecutionEntity) execution;
while (parentExecution.getParentId() != null || parentExecution.getSuperExecutionId() != null) {
if (parentExecution.getParentId() != null) {
parentExecution = parentExecution.getParent();
} else {
parentExecution = parentExecution.getSuperExecution();
rootProcessDefinitionIds.add(parentExecution.getProcessDefinitionId());
}
}
}
if (rootProcessDefinitionIds.size() > 0) {
for (String processDefinitionId : rootProcessDefinitionIds) {
eventMap.putAll(findCatchingEventsForProcess(processDefinitionId, errorCode));
}
}
eventMap.putAll(findCatchingEventsForProcess(execution.getProcessDefinitionId(), errorCode));
if (eventMap.size() > 0) {
executeCatch(eventMap, execution, errorCode);
}
if (eventMap.size() == 0) {
throw new BpmnError(errorCode, "No catching boundary event found for error with errorCode '" + errorCode + "', neither in same process nor in parent process");
}
}
protected static void executeCatch(Map<String, List<Event>> eventMap, DelegateExecution delegateExecution, String errorId) {
Set<String> toDeleteProcessInstanceIds = new HashSet<>();
Event matchingEvent = null;
ExecutionEntity currentExecution = (ExecutionEntity) delegateExecution;
ExecutionEntity parentExecution = null;
if (eventMap.containsKey(currentExecution.getActivityId() + "#" + currentExecution.getProcessDefinitionId())) {
// Check for multi instance
if (currentExecution.getParentId() != null && currentExecution.getParent().isMultiInstanceRoot()) {
parentExecution = currentExecution.getParent();
} else {
parentExecution = currentExecution;
}
matchingEvent = getCatchEventFromList(eventMap.get(currentExecution.getActivityId() +
"#" + currentExecution.getProcessDefinitionId()), parentExecution);
} else {
parentExecution = currentExecution.getParent();
// Traverse parents until one is found that is a scope and matches the activity the boundary event is defined on
while (matchingEvent == null && parentExecution != null) {
FlowElementsContainer currentContainer = null;
if (parentExecution.getCurrentFlowElement() instanceof FlowElementsContainer) {
currentContainer = (FlowElementsContainer) parentExecution.getCurrentFlowElement();
} else if (parentExecution.getId().equals(parentExecution.getProcessInstanceId())) {
currentContainer = ProcessDefinitionUtil.getProcess(parentExecution.getProcessDefinitionId());
}
if (currentContainer != null) {
for (String refId : eventMap.keySet()) {
List<Event> events = eventMap.get(refId);
if (CollectionUtil.isNotEmpty(events) && events.get(0) instanceof StartEvent) {
String refActivityId = refId.substring(0, refId.indexOf('#'));
String refProcessDefinitionId = refId.substring(refId.indexOf('#') + 1);
if (parentExecution.getProcessDefinitionId().equals(refProcessDefinitionId) &&
currentContainer.getFlowElement(refActivityId) != null) {
matchingEvent = getCatchEventFromList(events, parentExecution);
String errorCode = getErrorCodeFromErrorEventDefinition(matchingEvent);
if (StringUtils.isNotEmpty(errorCode)) {
break;
}
}
}
}
}
if (matchingEvent == null) {
if (eventMap.containsKey(parentExecution.getActivityId() + "#" + parentExecution.getProcessDefinitionId())) {
// Check for multi instance
if (parentExecution.getParentId() != null && parentExecution.getParent().isMultiInstanceRoot()) {
parentExecution = parentExecution.getParent();
}
matchingEvent = getCatchEventFromList(eventMap.get(parentExecution.getActivityId() +
"#" + parentExecution.getProcessDefinitionId()), parentExecution);
} else if (StringUtils.isNotEmpty(parentExecution.getParentId())) {
parentExecution = parentExecution.getParent();
} else {
if (parentExecution.getProcessInstanceId().equals(parentExecution.getRootProcessInstanceId()) == false) {
toDeleteProcessInstanceIds.add(parentExecution.getProcessInstanceId());
parentExecution = parentExecution.getSuperExecution();
} else {
parentExecution = null;
}
}
}
}
}
if (matchingEvent != null && parentExecution != null) {
for (String processInstanceId : toDeleteProcessInstanceIds) {
ExecutionEntityManager executionEntityManager = CommandContextUtil.getExecutionEntityManager();
ExecutionEntity processInstanceEntity = executionEntityManager.findById(processInstanceId);
// Delete
executionEntityManager.deleteProcessInstanceExecutionEntity(processInstanceEntity.getId(),
currentExecution.getCurrentFlowElement() != null ? currentExecution.getCurrentFlowElement().getId() : null,
"ERROR_EVENT " + errorId, false, false, false);
// Event
if (CommandContextUtil.getProcessEngineConfiguration() != null && CommandContextUtil.getProcessEngineConfiguration().getEventDispatcher().isEnabled()) {
CommandContextUtil.getProcessEngineConfiguration().getEventDispatcher()
.dispatchEvent(FlowableEventBuilder.createEntityEvent(FlowableEngineEventType.PROCESS_COMPLETED_WITH_ERROR_END_EVENT, processInstanceEntity));
}
}
executeEventHandler(matchingEvent, parentExecution, currentExecution, errorId);
} else {
throw new FlowableException("No matching parent execution for error code " + errorId + " found");
}
}
protected static void executeEventHandler(Event event, ExecutionEntity parentExecution, ExecutionEntity currentExecution, String errorId) {
if (CommandContextUtil.getProcessEngineConfiguration() != null && CommandContextUtil.getProcessEngineConfiguration().getEventDispatcher().isEnabled()) {
BpmnModel bpmnModel = ProcessDefinitionUtil.getBpmnModel(parentExecution.getProcessDefinitionId());
if (bpmnModel != null) {
String errorCode = bpmnModel.getErrors().get(errorId);
if (errorCode == null) {
errorCode = errorId;
}
CommandContextUtil.getProcessEngineConfiguration().getEventDispatcher().dispatchEvent(
FlowableEventBuilder.createErrorEvent(FlowableEngineEventType.ACTIVITY_ERROR_RECEIVED, event.getId(), errorId, errorCode, parentExecution.getId(),
parentExecution.getProcessInstanceId(), parentExecution.getProcessDefinitionId()));
}
}
if (event instanceof StartEvent) {
ExecutionEntityManager executionEntityManager = CommandContextUtil.getExecutionEntityManager();
if (parentExecution.isProcessInstanceType()) {
executionEntityManager.deleteChildExecutions(parentExecution, null, true);
} else if (!currentExecution.getParentId().equals(parentExecution.getId())) {
CommandContextUtil.getAgenda().planDestroyScopeOperation(currentExecution);
} else {
executionEntityManager.deleteExecutionAndRelatedData(currentExecution, null);
}
ExecutionEntity eventSubProcessExecution = executionEntityManager.createChildExecution(parentExecution);
eventSubProcessExecution.setCurrentFlowElement(event.getSubProcess() != null ? event.getSubProcess() : event);
CommandContextUtil.getAgenda().planContinueProcessOperation(eventSubProcessExecution);
} else {
ExecutionEntity boundaryExecution = null;
List<? extends ExecutionEntity> childExecutions = parentExecution.getExecutions();
for (ExecutionEntity childExecution : childExecutions) {
if (childExecution != null
&& childExecution.getActivityId() != null
&& childExecution.getActivityId().equals(event.getId())) {
boundaryExecution = childExecution;
}
}
CommandContextUtil.getAgenda().planTriggerExecutionOperation(boundaryExecution);
}
}
protected static Map<String, List<Event>> findCatchingEventsForProcess(String processDefinitionId, String errorCode) {
Map<String, List<Event>> eventMap = new HashMap<>();
Process process = ProcessDefinitionUtil.getProcess(processDefinitionId);
BpmnModel bpmnModel = ProcessDefinitionUtil.getBpmnModel(processDefinitionId);
String compareErrorCode = retrieveErrorCode(bpmnModel, errorCode);
List<EventSubProcess> subProcesses = process.findFlowElementsOfType(EventSubProcess.class, true);
for (EventSubProcess eventSubProcess : subProcesses) {
for (FlowElement flowElement : eventSubProcess.getFlowElements()) {
if (flowElement instanceof StartEvent) {
StartEvent startEvent = (StartEvent) flowElement;
if (CollectionUtil.isNotEmpty(startEvent.getEventDefinitions()) && startEvent.getEventDefinitions().get(0) instanceof ErrorEventDefinition) {
ErrorEventDefinition errorEventDef = (ErrorEventDefinition) startEvent.getEventDefinitions().get(0);
String eventErrorCode = retrieveErrorCode(bpmnModel, errorEventDef.getErrorCode());
if (eventErrorCode == null || compareErrorCode == null || eventErrorCode.equals(compareErrorCode)) {
List<Event> startEvents = new ArrayList<>();
startEvents.add(startEvent);
eventMap.put(eventSubProcess.getId() + "#" + processDefinitionId, startEvents);
}
}
}
}
}
List<BoundaryEvent> boundaryEvents = process.findFlowElementsOfType(BoundaryEvent.class, true);
for (BoundaryEvent boundaryEvent : boundaryEvents) {
if (boundaryEvent.getAttachedToRefId() != null && CollectionUtil.isNotEmpty(boundaryEvent.getEventDefinitions()) && boundaryEvent.getEventDefinitions().get(0) instanceof ErrorEventDefinition) {
ErrorEventDefinition errorEventDef = (ErrorEventDefinition) boundaryEvent.getEventDefinitions().get(0);
String eventErrorCode = retrieveErrorCode(bpmnModel, errorEventDef.getErrorCode());
if (eventErrorCode == null || compareErrorCode == null || eventErrorCode.equals(compareErrorCode)) {
List<Event> elementBoundaryEvents = null;
if (!eventMap.containsKey(boundaryEvent.getAttachedToRefId() + "#" + processDefinitionId)) {
elementBoundaryEvents = new ArrayList<>();
eventMap.put(boundaryEvent.getAttachedToRefId() + "#" + processDefinitionId, elementBoundaryEvents);
} else {
elementBoundaryEvents = eventMap.get(boundaryEvent.getAttachedToRefId() + "#" + processDefinitionId);
}
elementBoundaryEvents.add(boundaryEvent);
}
}
}
return eventMap;
}
public static boolean mapException(Exception e, ExecutionEntity execution, List<MapExceptionEntry> exceptionMap) {
String errorCode = findMatchingExceptionMapping(e, exceptionMap);
if (errorCode != null) {
propagateError(errorCode, execution);
return true;
} else {
ExecutionEntity callActivityExecution = null;
ExecutionEntity parentExecution = execution.getParent();
while (parentExecution != null && callActivityExecution == null) {
if (parentExecution.getId().equals(parentExecution.getProcessInstanceId())) {
if (parentExecution.getSuperExecution() != null) {
callActivityExecution = parentExecution.getSuperExecution();
} else {
parentExecution = null;
}
} else {
parentExecution = parentExecution.getParent();
}
}
if (callActivityExecution != null) {
CallActivity callActivity = (CallActivity) callActivityExecution.getCurrentFlowElement();
if (CollectionUtil.isNotEmpty(callActivity.getMapExceptions())) {
errorCode = findMatchingExceptionMapping(e, callActivity.getMapExceptions());
if (errorCode != null) {
propagateError(errorCode, callActivityExecution);
return true;
}
}
}
return false;
}
}
public static String findMatchingExceptionMapping(Exception e, List<MapExceptionEntry> exceptionMap) {
String defaultExceptionMapping = null;
for (MapExceptionEntry me : exceptionMap) {
String exceptionClass = me.getClassName();
String errorCode = me.getErrorCode();
// save the first mapping with no exception class as default map
if (StringUtils.isNotEmpty(errorCode) && StringUtils.isEmpty(exceptionClass) && defaultExceptionMapping == null) {
defaultExceptionMapping = errorCode;
continue;
}
// ignore if error code or class are not defined
if (StringUtils.isEmpty(errorCode) || StringUtils.isEmpty(exceptionClass)) {
continue;
}
if (e.getClass().getName().equals(exceptionClass)) {
return errorCode;
}
if (me.isAndChildren()) {
Class<?> exceptionClassClass = ReflectUtil.loadClass(exceptionClass);
if (exceptionClassClass.isAssignableFrom(e.getClass())) {
return errorCode;
}
}
}
return defaultExceptionMapping;
}
protected static Event getCatchEventFromList(List<Event> events, ExecutionEntity parentExecution) {
Event selectedEvent = null;
String selectedEventErrorCode = null;
BpmnModel bpmnModel = ProcessDefinitionUtil.getBpmnModel(parentExecution.getProcessDefinitionId());
for (Event event : events) {
String errorCode = getErrorCodeFromErrorEventDefinition(event);
if (bpmnModel != null) {
errorCode = retrieveErrorCode(bpmnModel, errorCode);
}
if (selectedEvent == null || (StringUtils.isEmpty(selectedEventErrorCode) && StringUtils.isNotEmpty(errorCode))) {
selectedEvent = event;
selectedEventErrorCode = errorCode;
}
}
return selectedEvent;
}
protected static String getErrorCodeFromErrorEventDefinition(Event event) {
for (EventDefinition eventDefinition : event.getEventDefinitions()) {
if (eventDefinition instanceof ErrorEventDefinition) {
return ((ErrorEventDefinition) eventDefinition).getErrorCode();
}
}
return null;
}
protected static String retrieveErrorCode(BpmnModel bpmnModel, String errorCode) {
String finalErrorCode = null;
if (errorCode != null && bpmnModel.containsErrorRef(errorCode)) {
finalErrorCode = bpmnModel.getErrors().get(errorCode);
} else {
finalErrorCode = errorCode;
}
return finalErrorCode;
}
}
| |
package com.github.mikephil.charting.renderer;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.graphics.Path;
import android.graphics.drawable.Drawable;
import com.github.mikephil.charting.animation.ChartAnimator;
import com.github.mikephil.charting.charts.LineChart;
import com.github.mikephil.charting.data.DataSet;
import com.github.mikephil.charting.data.Entry;
import com.github.mikephil.charting.data.LineData;
import com.github.mikephil.charting.highlight.Highlight;
import com.github.mikephil.charting.interfaces.dataprovider.LineDataProvider;
import com.github.mikephil.charting.interfaces.datasets.ILineDataSet;
import com.github.mikephil.charting.utils.Transformer;
import com.github.mikephil.charting.utils.ViewPortHandler;
import java.lang.ref.WeakReference;
import java.util.List;
public class LineChartRenderer extends LineRadarRenderer {
protected LineDataProvider mChart;
/**
* paint for the inner circle of the value indicators
*/
protected Paint mCirclePaintInner;
/**
* Bitmap object used for drawing the paths (otherwise they are too long if
* rendered directly on the canvas)
*/
protected WeakReference<Bitmap> mDrawBitmap;
/**
* on this canvas, the paths are rendered, it is initialized with the
* pathBitmap
*/
protected Canvas mBitmapCanvas;
/**
* the bitmap configuration to be used
*/
protected Bitmap.Config mBitmapConfig = Bitmap.Config.ARGB_8888;
protected Path cubicPath = new Path();
protected Path cubicFillPath = new Path();
public LineChartRenderer(LineDataProvider chart, ChartAnimator animator,
ViewPortHandler viewPortHandler) {
super(animator, viewPortHandler);
mChart = chart;
mCirclePaintInner = new Paint(Paint.ANTI_ALIAS_FLAG);
mCirclePaintInner.setStyle(Paint.Style.FILL);
mCirclePaintInner.setColor(Color.WHITE);
}
@Override
public void initBuffers() {
}
@Override
public void drawData(Canvas c) {
int width = (int) mViewPortHandler.getChartWidth();
int height = (int) mViewPortHandler.getChartHeight();
if (mDrawBitmap == null
|| (mDrawBitmap.get().getWidth() != width)
|| (mDrawBitmap.get().getHeight() != height)) {
if (width > 0 && height > 0) {
mDrawBitmap = new WeakReference<Bitmap>(Bitmap.createBitmap(width, height, mBitmapConfig));
mBitmapCanvas = new Canvas(mDrawBitmap.get());
} else
return;
}
mDrawBitmap.get().eraseColor(Color.TRANSPARENT);
LineData lineData = mChart.getLineData();
for (ILineDataSet set : lineData.getDataSets()) {
if (set.isVisible() && set.getEntryCount() > 0)
drawDataSet(c, set);
}
c.drawBitmap(mDrawBitmap.get(), 0, 0, mRenderPaint);
}
protected void drawDataSet(Canvas c, ILineDataSet dataSet) {
if (dataSet.getEntryCount() < 1)
return;
mRenderPaint.setStrokeWidth(dataSet.getLineWidth());
mRenderPaint.setPathEffect(dataSet.getDashPathEffect());
// if drawing cubic lines is enabled
if (dataSet.isDrawCubicEnabled()) {
drawCubic(c, dataSet);
// draw normal (straight) lines
} else {
drawLinear(c, dataSet);
}
mRenderPaint.setPathEffect(null);
}
/**
* Draws a cubic line.
*
* @param c
* @param dataSet
*/
protected void drawCubic(Canvas c, ILineDataSet dataSet) {
Transformer trans = mChart.getTransformer(dataSet.getAxisDependency());
int entryCount = dataSet.getEntryCount();
Entry entryFrom = dataSet.getEntryForXIndex((mMinX < 0) ? 0 : mMinX, DataSet.Rounding.DOWN);
Entry entryTo = dataSet.getEntryForXIndex(mMaxX, DataSet.Rounding.UP);
int diff = (entryFrom == entryTo) ? 1 : 0;
int minx = Math.max(dataSet.getEntryIndex(entryFrom) - diff, 0);
int maxx = Math.min(Math.max(minx + 2, dataSet.getEntryIndex(entryTo) + 1), entryCount);
float phaseX = mAnimator.getPhaseX();
float phaseY = mAnimator.getPhaseY();
float intensity = dataSet.getCubicIntensity();
cubicPath.reset();
int size = (int) Math.ceil((maxx - minx) * phaseX + minx);
if (size - minx >= 2) {
float prevDx = 0f;
float prevDy = 0f;
float curDx = 0f;
float curDy = 0f;
Entry prevPrev = dataSet.getEntryForIndex(minx);
Entry prev = prevPrev;
Entry cur = prev;
Entry next = dataSet.getEntryForIndex(minx + 1);
// let the spline start
cubicPath.moveTo(cur.getXIndex(), cur.getVal() * phaseY);
for (int j = minx + 1, count = Math.min(size, entryCount - 1); j < count; j++) {
prevPrev = dataSet.getEntryForIndex(j == 1 ? 0 : j - 2);
prev = dataSet.getEntryForIndex(j - 1);
cur = dataSet.getEntryForIndex(j);
next = dataSet.getEntryForIndex(j + 1);
prevDx = (cur.getXIndex() - prevPrev.getXIndex()) * intensity;
prevDy = (cur.getVal() - prevPrev.getVal()) * intensity;
curDx = (next.getXIndex() - prev.getXIndex()) * intensity;
curDy = (next.getVal() - prev.getVal()) * intensity;
cubicPath.cubicTo(prev.getXIndex() + prevDx, (prev.getVal() + prevDy) * phaseY,
cur.getXIndex() - curDx,
(cur.getVal() - curDy) * phaseY, cur.getXIndex(), cur.getVal() * phaseY);
}
if (size > entryCount - 1) {
prevPrev = dataSet.getEntryForIndex((entryCount >= 3) ? entryCount - 3
: entryCount - 2);
prev = dataSet.getEntryForIndex(entryCount - 2);
cur = dataSet.getEntryForIndex(entryCount - 1);
next = cur;
prevDx = (cur.getXIndex() - prevPrev.getXIndex()) * intensity;
prevDy = (cur.getVal() - prevPrev.getVal()) * intensity;
curDx = (next.getXIndex() - prev.getXIndex()) * intensity;
curDy = (next.getVal() - prev.getVal()) * intensity;
// the last cubic
cubicPath.cubicTo(prev.getXIndex() + prevDx, (prev.getVal() + prevDy) * phaseY,
cur.getXIndex() - curDx,
(cur.getVal() - curDy) * phaseY, cur.getXIndex(), cur.getVal() * phaseY);
}
}
// if filled is enabled, close the path
if (dataSet.isDrawFilledEnabled()) {
cubicFillPath.reset();
cubicFillPath.addPath(cubicPath);
// create a new path, this is bad for performance
drawCubicFill(mBitmapCanvas, dataSet, cubicFillPath, trans,
minx, size);
}
mRenderPaint.setColor(dataSet.getColor());
mRenderPaint.setStyle(Paint.Style.STROKE);
trans.pathValueToPixel(cubicPath);
mBitmapCanvas.drawPath(cubicPath, mRenderPaint);
mRenderPaint.setPathEffect(null);
}
protected void drawCubicFill(Canvas c, ILineDataSet dataSet, Path spline, Transformer trans,
int from, int to) {
if (to - from <= 1)
return;
float fillMin = dataSet.getFillFormatter()
.getFillLinePosition(dataSet, mChart);
// Take the from/to xIndex from the entries themselves,
// so missing entries won't screw up the filling.
// What we need to draw is line from points of the xIndexes - not arbitrary entry indexes!
final Entry toEntry = dataSet.getEntryForIndex(to - 1);
final Entry fromEntry = dataSet.getEntryForIndex(from);
final float xTo = toEntry == null ? 0 : toEntry.getXIndex();
final float xFrom = fromEntry == null ? 0 : fromEntry.getXIndex();
spline.lineTo(xTo, fillMin);
spline.lineTo(xFrom, fillMin);
spline.close();
trans.pathValueToPixel(spline);
final Drawable drawable = dataSet.getFillDrawable();
if (drawable != null) {
drawFilledPath(c, spline, drawable);
} else {
drawFilledPath(c, spline, dataSet.getFillColor(), dataSet.getFillAlpha());
}
}
private float[] mLineBuffer = new float[4];
/**
* Draws a normal line.
*
* @param c
* @param dataSet
*/
protected void drawLinear(Canvas c, ILineDataSet dataSet) {
int entryCount = dataSet.getEntryCount();
final boolean isDrawSteppedEnabled = dataSet.isDrawSteppedEnabled();
final int pointsPerEntryPair = isDrawSteppedEnabled ? 4 : 2;
Transformer trans = mChart.getTransformer(dataSet.getAxisDependency());
float phaseX = mAnimator.getPhaseX();
float phaseY = mAnimator.getPhaseY();
mRenderPaint.setStyle(Paint.Style.STROKE);
Canvas canvas = null;
// if the data-set is dashed, draw on bitmap-canvas
if (dataSet.isDashedLineEnabled()) {
canvas = mBitmapCanvas;
} else {
canvas = c;
}
Entry entryFrom = dataSet.getEntryForXIndex((mMinX < 0) ? 0 : mMinX, DataSet.Rounding.DOWN);
Entry entryTo = dataSet.getEntryForXIndex(mMaxX, DataSet.Rounding.UP);
int diff = (entryFrom == entryTo) ? 1 : 0;
int minx = Math.max(dataSet.getEntryIndex(entryFrom) - diff, 0);
int maxx = Math.min(Math.max(minx + 2, dataSet.getEntryIndex(entryTo) + 1), entryCount);
final int count = (int)(Math.ceil((float)(maxx - minx) * phaseX + (float)(minx)));
// more than 1 color
if (dataSet.getColors().size() > 1) {
if (mLineBuffer.length != pointsPerEntryPair * 2)
mLineBuffer = new float[pointsPerEntryPair * 2];
for (int j = minx;
j < count;
j++) {
if (count > 1 && j == count - 1) {
// Last point, we have already drawn a line to this point
break;
}
Entry e = dataSet.getEntryForIndex(j);
if (e == null) continue;
mLineBuffer[0] = e.getXIndex();
mLineBuffer[1] = e.getVal() * phaseY;
if (j + 1 < count) {
e = dataSet.getEntryForIndex(j + 1);
if (e == null) break;
if (isDrawSteppedEnabled) {
mLineBuffer[2] = e.getXIndex();
mLineBuffer[3] = mLineBuffer[1];
mLineBuffer[4] = mLineBuffer[2];
mLineBuffer[5] = mLineBuffer[3];
mLineBuffer[6] = e.getXIndex();
mLineBuffer[7] = e.getVal() * phaseY;
} else {
mLineBuffer[2] = e.getXIndex();
mLineBuffer[3] = e.getVal() * phaseY;
}
} else {
mLineBuffer[2] = mLineBuffer[0];
mLineBuffer[3] = mLineBuffer[1];
}
trans.pointValuesToPixel(mLineBuffer);
if (!mViewPortHandler.isInBoundsRight(mLineBuffer[0]))
break;
// make sure the lines don't do shitty things outside
// bounds
if (!mViewPortHandler.isInBoundsLeft(mLineBuffer[2])
|| (!mViewPortHandler.isInBoundsTop(mLineBuffer[1]) && !mViewPortHandler
.isInBoundsBottom(mLineBuffer[3]))
|| (!mViewPortHandler.isInBoundsTop(mLineBuffer[1]) && !mViewPortHandler
.isInBoundsBottom(mLineBuffer[3])))
continue;
// get the color that is set for this line-segment
mRenderPaint.setColor(dataSet.getColor(j));
canvas.drawLines(mLineBuffer, 0, pointsPerEntryPair * 2, mRenderPaint);
}
} else { // only one color per dataset
if (mLineBuffer.length != Math.max((entryCount - 1) * pointsPerEntryPair, pointsPerEntryPair) * 2)
mLineBuffer = new float[Math.max((entryCount - 1) * pointsPerEntryPair, pointsPerEntryPair) * 2];
Entry e1, e2;
e1 = dataSet.getEntryForIndex(minx);
if (e1 != null) {
for (int x = count > 1 ? minx + 1 : minx, j = 0; x < count; x++) {
e1 = dataSet.getEntryForIndex(x == 0 ? 0 : (x - 1));
e2 = dataSet.getEntryForIndex(x);
if (e1 == null || e2 == null) continue;
mLineBuffer[j++] = e1.getXIndex();
mLineBuffer[j++] = e1.getVal() * phaseY;
if (isDrawSteppedEnabled) {
mLineBuffer[j++] = e2.getXIndex();
mLineBuffer[j++] = e1.getVal() * phaseY;
mLineBuffer[j++] = e2.getXIndex();
mLineBuffer[j++] = e1.getVal() * phaseY;
}
mLineBuffer[j++] = e2.getXIndex();
mLineBuffer[j++] = e2.getVal() * phaseY;
}
trans.pointValuesToPixel(mLineBuffer);
final int size = Math.max((count - minx - 1) * pointsPerEntryPair, pointsPerEntryPair) * 2;
mRenderPaint.setColor(dataSet.getColor());
canvas.drawLines(mLineBuffer, 0, size,
mRenderPaint);
}
}
mRenderPaint.setPathEffect(null);
// if drawing filled is enabled
if (dataSet.isDrawFilledEnabled() && entryCount > 0) {
drawLinearFill(c, dataSet, minx, maxx, trans);
}
}
protected void drawLinearFill(Canvas c, ILineDataSet dataSet, int minx,
int maxx,
Transformer trans) {
Path filled = generateFilledPath(
dataSet, minx, maxx);
trans.pathValueToPixel(filled);
final Drawable drawable = dataSet.getFillDrawable();
if (drawable != null) {
drawFilledPath(c, filled, drawable);
} else {
drawFilledPath(c, filled, dataSet.getFillColor(), dataSet.getFillAlpha());
}
}
/**
* Generates the path that is used for filled drawing.
*
* @param dataSet
* @return
*/
private Path generateFilledPath(ILineDataSet dataSet, int from, int to) {
float fillMin = dataSet.getFillFormatter().getFillLinePosition(dataSet, mChart);
float phaseX = mAnimator.getPhaseX();
float phaseY = mAnimator.getPhaseY();
final boolean isDrawSteppedEnabled = dataSet.isDrawSteppedEnabled();
Path filled = new Path();
Entry entry = dataSet.getEntryForIndex(from);
filled.moveTo(entry.getXIndex(), fillMin);
filled.lineTo(entry.getXIndex(), entry.getVal() * phaseY);
// create a new path
for (int x = from + 1, count = (int) Math.ceil((to - from) * phaseX + from); x < count; x++) {
Entry e = dataSet.getEntryForIndex(x);
if (isDrawSteppedEnabled) {
final Entry ePrev = dataSet.getEntryForIndex(x - 1);
if (ePrev == null) continue;
filled.lineTo(e.getXIndex(), ePrev.getVal() * phaseY);
}
filled.lineTo(e.getXIndex(), e.getVal() * phaseY);
}
// close up
filled.lineTo(
dataSet.getEntryForIndex(
Math.max(
Math.min((int) Math.ceil((to - from) * phaseX + from) - 1,
dataSet.getEntryCount() - 1), 0)).getXIndex(), fillMin);
filled.close();
return filled;
}
@Override
public void drawValues(Canvas c) {
if (mChart.getLineData().getYValCount() < mChart.getMaxVisibleCount()
* mViewPortHandler.getScaleX()) {
List<ILineDataSet> dataSets = mChart.getLineData().getDataSets();
for (int i = 0; i < dataSets.size(); i++) {
ILineDataSet dataSet = dataSets.get(i);
if (!dataSet.isDrawValuesEnabled() || dataSet.getEntryCount() == 0)
continue;
// apply the text-styling defined by the DataSet
applyValueTextStyle(dataSet);
Transformer trans = mChart.getTransformer(dataSet.getAxisDependency());
// make sure the values do not interfear with the circles
int valOffset = (int) (dataSet.getCircleRadius() * 1.75f);
if (!dataSet.isDrawCirclesEnabled())
valOffset = valOffset / 2;
int entryCount = dataSet.getEntryCount();
Entry entryFrom = dataSet.getEntryForXIndex((mMinX < 0) ? 0 : mMinX,
DataSet.Rounding.DOWN);
Entry entryTo = dataSet.getEntryForXIndex(mMaxX, DataSet.Rounding.UP);
int diff = (entryFrom == entryTo) ? 1 : 0;
int minx = Math.max(dataSet.getEntryIndex(entryFrom) - diff, 0);
int maxx = Math.min(Math.max(minx + 2, dataSet.getEntryIndex(entryTo) + 1), entryCount);
float[] positions = trans.generateTransformedValuesLine(
dataSet, mAnimator.getPhaseX(), mAnimator.getPhaseY(), minx, maxx);
for (int j = 0; j < positions.length; j += 2) {
float x = positions[j];
float y = positions[j + 1];
if (!mViewPortHandler.isInBoundsRight(x))
break;
if (!mViewPortHandler.isInBoundsLeft(x) || !mViewPortHandler.isInBoundsY(y))
continue;
Entry entry = dataSet.getEntryForIndex(j / 2 + minx);
drawValue(c, dataSet.getValueFormatter(), entry.getVal(), entry, i, x,
y - valOffset, dataSet.getValueTextColor(j / 2));
}
}
}
}
@Override
public void drawExtras(Canvas c) {
drawCircles(c);
}
protected void drawCircles(Canvas c) {
mRenderPaint.setStyle(Paint.Style.FILL);
float phaseX = mAnimator.getPhaseX();
float phaseY = mAnimator.getPhaseY();
float[] circlesBuffer = new float[2];
List<ILineDataSet> dataSets = mChart.getLineData().getDataSets();
for (int i = 0; i < dataSets.size(); i++) {
ILineDataSet dataSet = dataSets.get(i);
if (!dataSet.isVisible() || !dataSet.isDrawCirclesEnabled() ||
dataSet.getEntryCount() == 0)
continue;
mCirclePaintInner.setColor(dataSet.getCircleHoleColor());
Transformer trans = mChart.getTransformer(dataSet.getAxisDependency());
int entryCount = dataSet.getEntryCount();
Entry entryFrom = dataSet.getEntryForXIndex((mMinX < 0) ? 0 : mMinX,
DataSet.Rounding.DOWN);
Entry entryTo = dataSet.getEntryForXIndex(mMaxX, DataSet.Rounding.UP);
int diff = (entryFrom == entryTo) ? 1 : 0;
int minx = Math.max(dataSet.getEntryIndex(entryFrom) - diff, 0);
int maxx = Math.min(Math.max(minx + 2, dataSet.getEntryIndex(entryTo) + 1), entryCount);
float halfsize = dataSet.getCircleRadius() / 2f;
for (int j = minx,
count = (int) Math.ceil((maxx - minx) * phaseX + minx);
j < count;
j ++) {
Entry e = dataSet.getEntryForIndex(j);
if (e == null) break;
circlesBuffer[0] = e.getXIndex();
circlesBuffer[1] = e.getVal() * phaseY;
trans.pointValuesToPixel(circlesBuffer);
if (!mViewPortHandler.isInBoundsRight(circlesBuffer[0]))
break;
// make sure the circles don't do shitty things outside
// bounds
if (!mViewPortHandler.isInBoundsLeft(circlesBuffer[0]) ||
!mViewPortHandler.isInBoundsY(circlesBuffer[1]))
continue;
int circleColor = dataSet.getCircleColor(j);
mRenderPaint.setColor(circleColor);
c.drawCircle(circlesBuffer[0], circlesBuffer[1], dataSet.getCircleRadius(),
mRenderPaint);
if (dataSet.isDrawCircleHoleEnabled()
&& circleColor != mCirclePaintInner.getColor())
c.drawCircle(circlesBuffer[0], circlesBuffer[1],
halfsize,
mCirclePaintInner);
}
}
}
@Override
public void drawHighlighted(Canvas c, Highlight[] indices) {
for (int i = 0; i < indices.length; i++) {
ILineDataSet set = mChart.getLineData().getDataSetByIndex(indices[i]
.getDataSetIndex());
if (set == null || !set.isHighlightEnabled())
continue;
int xIndex = indices[i].getXIndex(); // get the
// x-position
if (xIndex > mChart.getXChartMax() * mAnimator.getPhaseX())
continue;
final float yVal = set.getYValForXIndex(xIndex);
if (yVal == Float.NaN)
continue;
float y = yVal * mAnimator.getPhaseY(); // get
// the
// y-position
float[] pts = new float[]{
xIndex, y
};
mChart.getTransformer(set.getAxisDependency()).pointValuesToPixel(pts);
// draw the lines
drawHighlightLines(c, pts, set);
}
}
/**
* Sets the Bitmap.Config to be used by this renderer.
* Default: Bitmap.Config.ARGB_8888
* Use Bitmap.Config.ARGB_4444 to consume less memory.
*
* @param config
*/
public void setBitmapConfig(Bitmap.Config config) {
mBitmapConfig = config;
releaseBitmap();
}
/**
* Returns the Bitmap.Config that is used by this renderer.
*
* @return
*/
public Bitmap.Config getBitmapConfig() {
return mBitmapConfig;
}
/**
* Releases the drawing bitmap. This should be called when {@link LineChart#onDetachedFromWindow()}.
*/
public void releaseBitmap() {
if (mDrawBitmap != null) {
mDrawBitmap.get().recycle();
mDrawBitmap.clear();
mDrawBitmap = null;
}
}
}
| |
/*
* Copyright (C) 2015 Haruki Hasegawa
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.h6ah4i.android.widget.advrecyclerview.expandable;
import android.support.v7.widget.RecyclerView;
import java.util.Arrays;
class ExpandablePositionTranslator {
private final static int ALLOCATE_UNIT = 256;
private final static long FLAG_EXPANDED = 0x0000000080000000L;
private final static long LOWER_31BIT_MASK = 0x000000007fffffffL;
private final static long LOWER_32BIT_MASK = 0x00000000ffffffffL;
private final static long UPPER_32BIT_MASK = 0xffffffff00000000L;
/*
* bit 64-32: offset (use for caching purpose)
* bit 31: expanded or not
* bit 30-0: child count
*/
private long[] mCachedGroupPosInfo;
/*
* bit 31: reserved
* bit 30-0: group id
*/
private int[] mCachedGroupId;
private int mGroupCount;
private int mExpandedGroupCount;
private int mExpandedChildCount;
private int mEndOfCalculatedOffsetGroupPosition = RecyclerView.NO_POSITION;
private ExpandableItemAdapter mAdapter;
public ExpandablePositionTranslator() {
}
public void build(ExpandableItemAdapter adapter, boolean allExpanded) {
final int groupCount = adapter.getGroupCount();
enlargeArraysIfNeeded(groupCount, false);
final long[] info = mCachedGroupPosInfo;
final int[] ids = mCachedGroupId;
int totalChildCount = 0;
for (int i = 0; i < groupCount; i++) {
final long groupId = adapter.getGroupId(i);
final int childCount = adapter.getChildCount(i);
if (allExpanded) {
info[i] = (((long) (i + totalChildCount) << 32) | childCount) | FLAG_EXPANDED;
} else {
info[i] = (((long) i << 32) | childCount);
}
ids[i] = (int) (groupId & LOWER_32BIT_MASK);
totalChildCount += childCount;
}
mAdapter = adapter;
mGroupCount = groupCount;
mExpandedGroupCount = (allExpanded) ? groupCount : 0;
mExpandedChildCount = (allExpanded) ? totalChildCount : 0;
mEndOfCalculatedOffsetGroupPosition = Math.max(0, groupCount - 1);
}
public void restoreExpandedGroupItems(
long[] restoreGroupIds,
ExpandableItemAdapter adapter,
RecyclerViewExpandableItemManager.OnGroupExpandListener expandListener,
RecyclerViewExpandableItemManager.OnGroupCollapseListener collapseListener) {
if (restoreGroupIds == null || restoreGroupIds.length == 0) {
return;
}
if (mCachedGroupPosInfo == null) {
return;
}
// make ID + position packed array
final long[] idAndPos = new long[mGroupCount];
for (int i = 0; i < mGroupCount; i++) {
idAndPos[i] = ((long) mCachedGroupId[i] << 32) | i;
}
// sort both arrays
Arrays.sort(idAndPos);
final boolean fromUser = false;
// find matched items & apply
int index = 0;
//noinspection ForLoopReplaceableByForEach
for (int i = 0; i < restoreGroupIds.length; i++) {
final int id1 = (int) (restoreGroupIds[i] >>> 32);
final boolean expanded = ((restoreGroupIds[i] & FLAG_EXPANDED) != 0);
for (int j = index; j < idAndPos.length; j++) {
final int id2 = (int) (idAndPos[j] >>> 32);
final int position = (int) (idAndPos[j] & LOWER_31BIT_MASK);
if (id2 < id1) {
index = j;
} else if (id2 == id1) {
// matched
index = j + 1;
if (expanded) {
if (adapter == null || adapter.onHookGroupExpand(position, fromUser)) {
if (expandGroup(position)) {
if (expandListener != null) {
expandListener.onGroupExpand(position, fromUser);
}
}
}
} else {
if (adapter == null || adapter.onHookGroupCollapse(position, fromUser)) {
if (collapseGroup(position)) {
if (collapseListener != null) {
collapseListener.onGroupCollapse(position, fromUser);
}
}
}
}
} else { // id2 > id1
break;
}
}
}
}
public long[] getSavedStateArray() {
// bit 64-32: group id
// bit 31: expanded or not
// bit 30-0: reserved
long[] expandedGroups = new long[mGroupCount];
for (int i = 0; i < mGroupCount; i++) {
final long t = mCachedGroupPosInfo[i];
expandedGroups[i] = ((long) mCachedGroupId[i] << 32l) | (t & FLAG_EXPANDED);
}
Arrays.sort(expandedGroups);
return expandedGroups;
}
public int getItemCount() {
return mGroupCount + mExpandedChildCount;
}
public boolean isGroupExpanded(int groupPosition) {
return ((mCachedGroupPosInfo[groupPosition] & FLAG_EXPANDED) != 0);
}
public int getChildCount(int groupPosition) {
return (int) (mCachedGroupPosInfo[groupPosition] & LOWER_31BIT_MASK);
}
public int getVisibleChildCount(int groupPosition) {
if (isGroupExpanded(groupPosition)) {
return getChildCount(groupPosition);
} else {
return 0;
}
}
public boolean collapseGroup(int groupPosition) {
if ((mCachedGroupPosInfo[groupPosition] & FLAG_EXPANDED) == 0) {
return false;
}
final int childCount = (int) (mCachedGroupPosInfo[groupPosition] & LOWER_31BIT_MASK);
mCachedGroupPosInfo[groupPosition] &= (~FLAG_EXPANDED);
mExpandedGroupCount -= 1;
mExpandedChildCount -= childCount;
mEndOfCalculatedOffsetGroupPosition = Math.min(mEndOfCalculatedOffsetGroupPosition, groupPosition);
// requires notifyItemRangeRemoved()
return true;
}
public boolean expandGroup(int groupPosition) {
if ((mCachedGroupPosInfo[groupPosition] & FLAG_EXPANDED) != 0) {
return false;
}
final int childCount = (int) (mCachedGroupPosInfo[groupPosition] & LOWER_31BIT_MASK);
mCachedGroupPosInfo[groupPosition] |= FLAG_EXPANDED;
mExpandedGroupCount += 1;
mExpandedChildCount += childCount;
mEndOfCalculatedOffsetGroupPosition = Math.min(mEndOfCalculatedOffsetGroupPosition, groupPosition);
// requires notifyItemRangeInserted()
return true;
}
public void moveGroupItem(int fromGroupPosition, int toGroupPosition) {
if (fromGroupPosition == toGroupPosition) {
return;
}
final long tmp1 = mCachedGroupPosInfo[fromGroupPosition];
final int tmp2 = mCachedGroupId[fromGroupPosition];
if (toGroupPosition < fromGroupPosition) {
// shift to backward
for (int i = fromGroupPosition; i > toGroupPosition; i--) {
mCachedGroupPosInfo[i] = mCachedGroupPosInfo[i - 1];
mCachedGroupId[i] = mCachedGroupId[i - 1];
}
} else {
// shift to forward
for (int i = fromGroupPosition; i < toGroupPosition; i++) {
mCachedGroupPosInfo[i] = mCachedGroupPosInfo[i + 1];
mCachedGroupId[i] = mCachedGroupId[i + 1];
}
}
mCachedGroupPosInfo[toGroupPosition] = tmp1;
mCachedGroupId[toGroupPosition] = tmp2;
final int minPosition = Math.min(fromGroupPosition, toGroupPosition);
if (minPosition > 0) {
mEndOfCalculatedOffsetGroupPosition = Math.min(mEndOfCalculatedOffsetGroupPosition, minPosition - 1);
} else {
mEndOfCalculatedOffsetGroupPosition = RecyclerView.NO_POSITION;
}
}
public void moveChildItem(int fromGroupPosition, int fromChildPosition, int toGroupPosition, int toChildPosition) {
if (fromGroupPosition == toGroupPosition) {
return;
}
final int fromChildCount = (int) (mCachedGroupPosInfo[fromGroupPosition] & LOWER_31BIT_MASK);
final int toChildCount = (int) (mCachedGroupPosInfo[toGroupPosition] & LOWER_31BIT_MASK);
if (fromChildCount == 0) {
throw new IllegalStateException("moveChildItem(" +
"fromGroupPosition = " + fromGroupPosition +
", fromChildPosition = " + fromChildPosition +
", toGroupPosition = " + toGroupPosition +
", toChildPosition = " + toChildPosition + ") --- may be a bug.");
}
mCachedGroupPosInfo[fromGroupPosition] = (mCachedGroupPosInfo[fromGroupPosition] & (UPPER_32BIT_MASK | FLAG_EXPANDED)) | (fromChildCount - 1);
mCachedGroupPosInfo[toGroupPosition] = (mCachedGroupPosInfo[toGroupPosition] & (UPPER_32BIT_MASK | FLAG_EXPANDED)) | (toChildCount + 1);
if ((mCachedGroupPosInfo[fromGroupPosition] & FLAG_EXPANDED) != 0) {
mExpandedChildCount -= 1;
}
if ((mCachedGroupPosInfo[toGroupPosition] & FLAG_EXPANDED) != 0) {
mExpandedChildCount += 1;
}
final int minPosition = Math.min(fromGroupPosition, toGroupPosition);
if (minPosition > 0) {
mEndOfCalculatedOffsetGroupPosition = Math.min(mEndOfCalculatedOffsetGroupPosition, minPosition - 1);
} else {
mEndOfCalculatedOffsetGroupPosition = RecyclerView.NO_POSITION;
}
}
public long getExpandablePosition(int flatPosition) {
if (flatPosition == RecyclerView.NO_POSITION) {
return ExpandableAdapterHelper.NO_EXPANDABLE_POSITION;
}
final int groupCount = mGroupCount;
// final int startIndex = 0;
final int startIndex = binarySearchGroupPositionByFlatPosition(mCachedGroupPosInfo, mEndOfCalculatedOffsetGroupPosition, flatPosition);
long expandablePosition = ExpandableAdapterHelper.NO_EXPANDABLE_POSITION;
int endOfCalculatedOffsetGroupPosition = mEndOfCalculatedOffsetGroupPosition;
int offset = (startIndex == 0) ? 0 : (int) (mCachedGroupPosInfo[startIndex] >>> 32);
for (int i = startIndex; i < groupCount; i++) {
final long t = mCachedGroupPosInfo[i];
// update offset info
mCachedGroupPosInfo[i] = (((long) offset << 32) | (t & LOWER_32BIT_MASK));
endOfCalculatedOffsetGroupPosition = i;
if (offset >= flatPosition) {
// found (group item)
expandablePosition = ExpandableAdapterHelper.getPackedPositionForGroup(i);
break;
} else {
offset += 1;
}
if ((t & FLAG_EXPANDED) != 0) {
final int childCount = (int) (t & LOWER_31BIT_MASK);
if ((childCount > 0) && (offset + childCount - 1) >= flatPosition) {
// found (child item)
expandablePosition = ExpandableAdapterHelper.getPackedPositionForChild(i, (flatPosition - offset));
break;
} else {
offset += childCount;
}
}
}
mEndOfCalculatedOffsetGroupPosition = Math.max(mEndOfCalculatedOffsetGroupPosition, endOfCalculatedOffsetGroupPosition);
return expandablePosition;
}
public int getFlatPosition(long packedPosition) {
if (packedPosition == ExpandableAdapterHelper.NO_EXPANDABLE_POSITION) {
return RecyclerView.NO_POSITION;
}
final int groupPosition = ExpandableAdapterHelper.getPackedPositionGroup(packedPosition);
final int childPosition = ExpandableAdapterHelper.getPackedPositionChild(packedPosition);
final int groupCount = mGroupCount;
if (!(groupPosition >= 0 && groupPosition < groupCount)) {
return RecyclerView.NO_POSITION;
}
if (childPosition != RecyclerView.NO_POSITION) {
if (!isGroupExpanded(groupPosition)) {
return RecyclerView.NO_POSITION;
}
}
// final int startIndex = 0;
final int startIndex = Math.max(0, Math.min(groupPosition, mEndOfCalculatedOffsetGroupPosition));
int endOfCalculatedOffsetGroupPosition = mEndOfCalculatedOffsetGroupPosition;
int offset = (int) (mCachedGroupPosInfo[startIndex] >>> 32);
int flatPosition = RecyclerView.NO_POSITION;
for (int i = startIndex; i < groupCount; i++) {
final long t = mCachedGroupPosInfo[i];
// update offset info
mCachedGroupPosInfo[i] = (((long) offset << 32) | (t & LOWER_32BIT_MASK));
endOfCalculatedOffsetGroupPosition = i;
final int childCount = (int) (t & LOWER_31BIT_MASK);
if (i == groupPosition) {
if (childPosition == RecyclerView.NO_POSITION) {
flatPosition = offset;
} else if (childPosition < childCount) {
flatPosition = (offset + 1) + childPosition;
}
break;
} else {
offset += 1;
if ((t & FLAG_EXPANDED) != 0) {
offset += childCount;
}
}
}
mEndOfCalculatedOffsetGroupPosition = Math.max(mEndOfCalculatedOffsetGroupPosition, endOfCalculatedOffsetGroupPosition);
return flatPosition;
}
private static int binarySearchGroupPositionByFlatPosition(long[] array, int endArrayPosition, int flatPosition) {
if (endArrayPosition <= 0) {
return 0;
}
final int v1 = (int) (array[0] >>> 32);
final int v2 = (int) (array[endArrayPosition] >>> 32);
if (flatPosition <= v1) {
return 0;
} else if (flatPosition >= v2) {
return endArrayPosition;
}
int lastS = 0;
int s = 0;
int e = endArrayPosition;
while (s < e) {
final int mid = (s + e) >>> 1;
final int v = (int) (array[mid] >>> 32);
if (v < flatPosition) {
lastS = s;
s = mid + 1;
} else {
e = mid;
}
}
return lastS;
}
public void removeChildItem(int groupPosition, int childPosition) {
removeChildItems(groupPosition, childPosition, 1);
}
public void removeChildItems(int groupPosition, int childPositionStart, int count) {
final long t = mCachedGroupPosInfo[groupPosition];
final int curCount = (int) (t & LOWER_31BIT_MASK);
if (!((childPositionStart >= 0) && ((childPositionStart + count) <= curCount))) {
throw new IllegalStateException(
"Invalid child position " +
"removeChildItems(groupPosition = " + groupPosition + ", childPosition = " + childPositionStart + ", count = " + count + ")");
}
if ((t & FLAG_EXPANDED) != 0) {
mExpandedChildCount -= count;
}
mCachedGroupPosInfo[groupPosition] = (t & (UPPER_32BIT_MASK | FLAG_EXPANDED)) | (curCount - count);
mEndOfCalculatedOffsetGroupPosition = Math.min(mEndOfCalculatedOffsetGroupPosition, groupPosition - 1);
}
public void insertChildItem(int groupPosition, int childPosition) {
insertChildItems(groupPosition, childPosition, 1);
}
public void insertChildItems(int groupPosition, int childPositionStart, int count) {
final long t = mCachedGroupPosInfo[groupPosition];
final int curCount = (int) (t & LOWER_31BIT_MASK);
if (!((childPositionStart >= 0) && (childPositionStart <= curCount))) {
throw new IllegalStateException(
"Invalid child position " +
"insertChildItems(groupPosition = " + groupPosition + ", childPositionStart = " + childPositionStart + ", count = " + count + ")");
}
if ((t & FLAG_EXPANDED) != 0) {
mExpandedChildCount += count;
}
mCachedGroupPosInfo[groupPosition] = (t & (UPPER_32BIT_MASK | FLAG_EXPANDED)) | (curCount + count);
mEndOfCalculatedOffsetGroupPosition = Math.min(mEndOfCalculatedOffsetGroupPosition, groupPosition);
}
public int insertGroupItems(int groupPosition, int count, boolean expanded) {
if (count <= 0) {
return 0;
}
//noinspection UnnecessaryLocalVariable
final int n = count;
enlargeArraysIfNeeded(mGroupCount + n, true);
// shift to backward
final ExpandableItemAdapter adapter = mAdapter;
final long[] info = mCachedGroupPosInfo;
final int[] ids = mCachedGroupId;
int start = mGroupCount - 1 + n;
int end = groupPosition - 1 + n;
for (int i = start; i > end; i--) {
info[i] = info[i - n];
ids[i] = ids[i - n];
}
// insert items
final long expandedFlag = (expanded) ? FLAG_EXPANDED : 0;
int insertedChildCount = 0;
int end2 = groupPosition + n;
for (int i = groupPosition; i < end2; i++) {
final long groupId = adapter.getGroupId(i);
final int childCount = adapter.getChildCount(i);
info[i] = (((long) i << 32) | childCount) | expandedFlag;
ids[i] = (int) (groupId & LOWER_32BIT_MASK);
insertedChildCount += childCount;
}
mGroupCount += n;
if (expanded) {
mExpandedGroupCount += n;
mExpandedChildCount += insertedChildCount;
}
int calculatedOffset = (mGroupCount == 0) ? RecyclerView.NO_POSITION : (groupPosition - 1);
mEndOfCalculatedOffsetGroupPosition = Math.min(mEndOfCalculatedOffsetGroupPosition, calculatedOffset);
return (expanded) ? (n + insertedChildCount) : n;
}
public int insertGroupItem(int groupPosition, boolean expanded) {
return insertGroupItems(groupPosition, 1, expanded);
}
public int removeGroupItems(int groupPosition, int count) {
if (count <= 0) {
return 0;
}
//noinspection UnnecessaryLocalVariable
final int n = count;
int removedVisibleItemCount = 0;
for (int i = 0; i < n; i++) {
final long t = mCachedGroupPosInfo[groupPosition + i];
if ((t & FLAG_EXPANDED) != 0) {
int visibleChildCount = (int) (t & LOWER_31BIT_MASK);
removedVisibleItemCount += visibleChildCount;
mExpandedChildCount -= visibleChildCount;
mExpandedGroupCount -= 1;
}
}
removedVisibleItemCount += n;
mGroupCount -= n;
// shift to forward
for (int i = groupPosition; i < mGroupCount; i++) {
mCachedGroupPosInfo[i] = mCachedGroupPosInfo[i + n];
mCachedGroupId[i] = mCachedGroupId[i + n];
}
int calculatedOffset = (mGroupCount == 0) ? RecyclerView.NO_POSITION : (groupPosition - 1);
mEndOfCalculatedOffsetGroupPosition = Math.min(mEndOfCalculatedOffsetGroupPosition, calculatedOffset);
return removedVisibleItemCount;
}
public int removeGroupItem(int groupPosition) {
return removeGroupItems(groupPosition, 1);
}
private void enlargeArraysIfNeeded(int size, boolean preserveData) {
int allocSize = (size + (2 * ALLOCATE_UNIT - 1)) & ~(ALLOCATE_UNIT - 1);
long[] curInfo = mCachedGroupPosInfo;
int[] curId = mCachedGroupId;
long[] newInfo = curInfo;
int[] newId = curId;
if (curInfo == null || curInfo.length < size) {
newInfo = new long[allocSize];
}
if (curId == null || curId.length < size) {
newId = new int[allocSize];
}
if (preserveData) {
if (curInfo != null && curInfo != newInfo) {
System.arraycopy(curInfo, 0, newInfo, 0, curInfo.length);
}
if (curId != null && curId != newId) {
System.arraycopy(curId, 0, newId, 0, curId.length);
}
}
mCachedGroupPosInfo = newInfo;
mCachedGroupId = newId;
}
public int getExpandedGroupsCount() {
return mExpandedGroupCount;
}
public int getCollapsedGroupsCount() {
return mGroupCount - mExpandedGroupCount;
}
public boolean isAllExpanded() {
return !isEmpty() && (mExpandedGroupCount == mGroupCount);
}
public boolean isAllCollapsed() {
return isEmpty() || (mExpandedGroupCount == 0);
}
public boolean isEmpty() {
return mGroupCount == 0;
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.discovery.zen;
import org.apache.logging.log4j.message.ParameterizedMessage;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.cluster.ClusterName;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.ClusterStateUpdateTask;
import org.elasticsearch.cluster.NotMasterException;
import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.cluster.node.DiscoveryNodes;
import org.elasticsearch.cluster.service.MasterService;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException;
import org.elasticsearch.tasks.Task;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.ConnectTransportException;
import org.elasticsearch.transport.TransportChannel;
import org.elasticsearch.transport.TransportException;
import org.elasticsearch.transport.TransportRequest;
import org.elasticsearch.transport.TransportRequestHandler;
import org.elasticsearch.transport.TransportRequestOptions;
import org.elasticsearch.transport.TransportResponse;
import org.elasticsearch.transport.TransportResponseHandler;
import org.elasticsearch.transport.TransportService;
import java.io.IOException;
import java.util.concurrent.CopyOnWriteArrayList;
import java.util.concurrent.atomic.AtomicBoolean;
/**
* A fault detection that pings the master periodically to see if its alive.
*/
public class MasterFaultDetection extends FaultDetection {
public static final String MASTER_PING_ACTION_NAME = "internal:discovery/zen/fd/master_ping";
public interface Listener {
/** called when pinging the master failed, like a timeout, transport disconnects etc */
void onMasterFailure(DiscoveryNode masterNode, Throwable cause, String reason);
}
private final MasterService masterService;
private final java.util.function.Supplier<ClusterState> clusterStateSupplier;
private final CopyOnWriteArrayList<Listener> listeners = new CopyOnWriteArrayList<>();
private volatile MasterPinger masterPinger;
private final Object masterNodeMutex = new Object();
private volatile DiscoveryNode masterNode;
private volatile int retryCount;
private final AtomicBoolean notifiedMasterFailure = new AtomicBoolean();
public MasterFaultDetection(Settings settings, ThreadPool threadPool, TransportService transportService,
java.util.function.Supplier<ClusterState> clusterStateSupplier, MasterService masterService,
ClusterName clusterName) {
super(settings, threadPool, transportService, clusterName);
this.clusterStateSupplier = clusterStateSupplier;
this.masterService = masterService;
logger.debug("[master] uses ping_interval [{}], ping_timeout [{}], ping_retries [{}]", pingInterval, pingRetryTimeout,
pingRetryCount);
transportService.registerRequestHandler(
MASTER_PING_ACTION_NAME, MasterPingRequest::new, ThreadPool.Names.SAME, false, false, new MasterPingRequestHandler());
}
public DiscoveryNode masterNode() {
return this.masterNode;
}
public void addListener(Listener listener) {
listeners.add(listener);
}
public void removeListener(Listener listener) {
listeners.remove(listener);
}
public void restart(DiscoveryNode masterNode, String reason) {
synchronized (masterNodeMutex) {
if (logger.isDebugEnabled()) {
logger.debug("[master] restarting fault detection against master [{}], reason [{}]", masterNode, reason);
}
innerStop();
innerStart(masterNode);
}
}
private void innerStart(final DiscoveryNode masterNode) {
this.masterNode = masterNode;
this.retryCount = 0;
this.notifiedMasterFailure.set(false);
if (masterPinger != null) {
masterPinger.stop();
}
this.masterPinger = new MasterPinger();
// we start pinging slightly later to allow the chosen master to complete it's own master election
threadPool.schedule(pingInterval, ThreadPool.Names.SAME, masterPinger);
}
public void stop(String reason) {
synchronized (masterNodeMutex) {
if (masterNode != null) {
if (logger.isDebugEnabled()) {
logger.debug("[master] stopping fault detection against master [{}], reason [{}]", masterNode, reason);
}
}
innerStop();
}
}
private void innerStop() {
// also will stop the next ping schedule
this.retryCount = 0;
if (masterPinger != null) {
masterPinger.stop();
masterPinger = null;
}
this.masterNode = null;
}
@Override
public void close() {
super.close();
stop("closing");
this.listeners.clear();
}
@Override
protected void handleTransportDisconnect(DiscoveryNode node) {
synchronized (masterNodeMutex) {
if (!node.equals(this.masterNode)) {
return;
}
if (connectOnNetworkDisconnect) {
try {
transportService.connectToNode(node);
// if all is well, make sure we restart the pinger
if (masterPinger != null) {
masterPinger.stop();
}
this.masterPinger = new MasterPinger();
// we use schedule with a 0 time value to run the pinger on the pool as it will run on later
threadPool.schedule(TimeValue.timeValueMillis(0), ThreadPool.Names.SAME, masterPinger);
} catch (Exception e) {
logger.trace("[master] [{}] transport disconnected (with verified connect)", masterNode);
notifyMasterFailure(masterNode, null, "transport disconnected (with verified connect)");
}
} else {
logger.trace("[master] [{}] transport disconnected", node);
notifyMasterFailure(node, null, "transport disconnected");
}
}
}
private void notifyMasterFailure(final DiscoveryNode masterNode, final Throwable cause, final String reason) {
if (notifiedMasterFailure.compareAndSet(false, true)) {
try {
threadPool.generic().execute(() -> {
for (Listener listener : listeners) {
listener.onMasterFailure(masterNode, cause, reason);
}
});
} catch (EsRejectedExecutionException e) {
logger.error("master failure notification was rejected, it's highly likely the node is shutting down", e);
}
stop("master failure, " + reason);
}
}
private class MasterPinger implements Runnable {
private volatile boolean running = true;
public void stop() {
this.running = false;
}
@Override
public void run() {
if (!running) {
// return and don't spawn...
return;
}
final DiscoveryNode masterToPing = masterNode;
if (masterToPing == null) {
// master is null, should not happen, but we are still running, so reschedule
threadPool.schedule(pingInterval, ThreadPool.Names.SAME, MasterPinger.this);
return;
}
final MasterPingRequest request = new MasterPingRequest(
clusterStateSupplier.get().nodes().getLocalNode(), masterToPing, clusterName);
final TransportRequestOptions options = TransportRequestOptions.builder().withType(TransportRequestOptions.Type.PING)
.withTimeout(pingRetryTimeout).build();
transportService.sendRequest(masterToPing, MASTER_PING_ACTION_NAME, request, options,
new TransportResponseHandler<MasterPingResponseResponse>() {
@Override
public MasterPingResponseResponse newInstance() {
return new MasterPingResponseResponse();
}
@Override
public void handleResponse(MasterPingResponseResponse response) {
if (!running) {
return;
}
// reset the counter, we got a good result
MasterFaultDetection.this.retryCount = 0;
// check if the master node did not get switched on us..., if it did, we simply return with no reschedule
if (masterToPing.equals(MasterFaultDetection.this.masterNode())) {
// we don't stop on disconnection from master, we keep pinging it
threadPool.schedule(pingInterval, ThreadPool.Names.SAME, MasterPinger.this);
}
}
@Override
public void handleException(TransportException exp) {
if (!running) {
return;
}
synchronized (masterNodeMutex) {
// check if the master node did not get switched on us...
if (masterToPing.equals(MasterFaultDetection.this.masterNode())) {
if (exp instanceof ConnectTransportException || exp.getCause() instanceof ConnectTransportException) {
handleTransportDisconnect(masterToPing);
return;
} else if (exp.getCause() instanceof NotMasterException) {
logger.debug("[master] pinging a master {} that is no longer a master", masterNode);
notifyMasterFailure(masterToPing, exp, "no longer master");
return;
} else if (exp.getCause() instanceof ThisIsNotTheMasterYouAreLookingForException) {
logger.debug("[master] pinging a master {} that is not the master", masterNode);
notifyMasterFailure(masterToPing, exp,"not master");
return;
} else if (exp.getCause() instanceof NodeDoesNotExistOnMasterException) {
logger.debug("[master] pinging a master {} but we do not exists on it, act as if its master failure"
, masterNode);
notifyMasterFailure(masterToPing, exp,"do not exists on master, act as master failure");
return;
}
int retryCount = ++MasterFaultDetection.this.retryCount;
logger.trace(() -> new ParameterizedMessage(
"[master] failed to ping [{}], retry [{}] out of [{}]",
masterNode, retryCount, pingRetryCount), exp);
if (retryCount >= pingRetryCount) {
logger.debug("[master] failed to ping [{}], tried [{}] times, each with maximum [{}] timeout",
masterNode, pingRetryCount, pingRetryTimeout);
// not good, failure
notifyMasterFailure(masterToPing, null, "failed to ping, tried [" + pingRetryCount
+ "] times, each with maximum [" + pingRetryTimeout + "] timeout");
} else {
// resend the request, not reschedule, rely on send timeout
transportService.sendRequest(masterToPing, MASTER_PING_ACTION_NAME, request, options, this);
}
}
}
}
@Override
public String executor() {
return ThreadPool.Names.SAME;
}
}
);
}
}
/** Thrown when a ping reaches the wrong node */
static class ThisIsNotTheMasterYouAreLookingForException extends IllegalStateException {
ThisIsNotTheMasterYouAreLookingForException(String msg) {
super(msg);
}
ThisIsNotTheMasterYouAreLookingForException() {
}
@Override
public Throwable fillInStackTrace() {
return null;
}
}
static class NodeDoesNotExistOnMasterException extends IllegalStateException {
@Override
public Throwable fillInStackTrace() {
return null;
}
}
private class MasterPingRequestHandler implements TransportRequestHandler<MasterPingRequest> {
@Override
public void messageReceived(final MasterPingRequest request, final TransportChannel channel, Task task) throws Exception {
final DiscoveryNodes nodes = clusterStateSupplier.get().nodes();
// check if we are really the same master as the one we seemed to be think we are
// this can happen if the master got "kill -9" and then another node started using the same port
if (!request.masterNode.equals(nodes.getLocalNode())) {
throw new ThisIsNotTheMasterYouAreLookingForException();
}
// ping from nodes of version < 1.4.0 will have the clustername set to null
if (request.clusterName != null && !request.clusterName.equals(clusterName)) {
logger.trace("master fault detection ping request is targeted for a different [{}] cluster then us [{}]",
request.clusterName, clusterName);
throw new ThisIsNotTheMasterYouAreLookingForException("master fault detection ping request is targeted for a different ["
+ request.clusterName + "] cluster then us [" + clusterName + "]");
}
// when we are elected as master or when a node joins, we use a cluster state update thread
// to incorporate that information in the cluster state. That cluster state is published
// before we make it available locally. This means that a master ping can come from a node
// that has already processed the new CS but it is not known locally.
// Therefore, if we fail we have to check again under a cluster state thread to make sure
// all processing is finished.
//
if (!nodes.isLocalNodeElectedMaster() || !nodes.nodeExists(request.sourceNode)) {
logger.trace("checking ping from {} under a cluster state thread", request.sourceNode);
masterService.submitStateUpdateTask("master ping (from: " + request.sourceNode + ")", new ClusterStateUpdateTask() {
@Override
public ClusterState execute(ClusterState currentState) throws Exception {
// if we are no longer master, fail...
DiscoveryNodes nodes = currentState.nodes();
if (!nodes.nodeExists(request.sourceNode)) {
throw new NodeDoesNotExistOnMasterException();
}
return currentState;
}
@Override
public void onNoLongerMaster(String source) {
onFailure(source, new NotMasterException("local node is not master"));
}
@Override
public void onFailure(String source, @Nullable Exception e) {
if (e == null) {
e = new ElasticsearchException("unknown error while processing ping");
}
try {
channel.sendResponse(e);
} catch (IOException inner) {
inner.addSuppressed(e);
logger.warn("error while sending ping response", inner);
}
}
@Override
public void clusterStateProcessed(String source, ClusterState oldState, ClusterState newState) {
try {
channel.sendResponse(new MasterPingResponseResponse());
} catch (IOException e) {
logger.warn("error while sending ping response", e);
}
}
});
} else {
// send a response, and note if we are connected to the master or not
channel.sendResponse(new MasterPingResponseResponse());
}
}
}
public static class MasterPingRequest extends TransportRequest {
private DiscoveryNode sourceNode;
private DiscoveryNode masterNode;
private ClusterName clusterName;
public MasterPingRequest() {
}
private MasterPingRequest(DiscoveryNode sourceNode, DiscoveryNode masterNode, ClusterName clusterName) {
this.sourceNode = sourceNode;
this.masterNode = masterNode;
this.clusterName = clusterName;
}
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
sourceNode = new DiscoveryNode(in);
masterNode = new DiscoveryNode(in);
clusterName = new ClusterName(in);
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
sourceNode.writeTo(out);
masterNode.writeTo(out);
clusterName.writeTo(out);
}
}
private static class MasterPingResponseResponse extends TransportResponse {
private MasterPingResponseResponse() {
}
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
}
}
}
| |
/*
* Copyright 2010-2013 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package gemlite.shell.codegen.tools;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
public final class StringUtils
{
public static final String EMPTY = "";
public static final int INDEX_NOT_FOUND = -1;
private static final int PAD_LIMIT = 8192;
// Defaults
// -----------------------------------------------------------------------
public static String defaultString(String str)
{
return str == null ? "" : str;
}
public static String defaultString(String str, String defaultStr)
{
return str == null ? defaultStr : str;
}
public static String defaultIfEmpty(String str, String defaultStr)
{
return StringUtils.isEmpty(str) ? defaultStr : str;
}
public static String defaultIfBlank(String str, String defaultStr)
{
return StringUtils.isBlank(str) ? defaultStr : str;
}
// Empty checks
// -----------------------------------------------------------------------
public static boolean isEmpty(String str)
{
return str == null || str.length() == 0;
}
public static boolean isBlank(String str)
{
int strLen;
if (str == null || (strLen = str.length()) == 0)
{
return true;
}
for (int i = 0; i < strLen; i++)
{
if ((Character.isWhitespace(str.charAt(i)) == false))
{
return false;
}
}
return true;
}
// Count matches
// -----------------------------------------------------------------------
public static int countMatches(String str, String sub)
{
if (isEmpty(str) || isEmpty(sub))
{
return 0;
}
int count = 0;
int idx = 0;
while ((idx = str.indexOf(sub, idx)) != -1)
{
count++;
idx += sub.length();
}
return count;
}
// Padding
// -----------------------------------------------------------------------
public static String rightPad(String str, int size)
{
return rightPad(str, size, ' ');
}
public static String rightPad(String str, int size, char padChar)
{
if (str == null)
{
return null;
}
int pads = size - str.length();
if (pads <= 0)
{
return str; // returns original String when possible
}
if (pads > PAD_LIMIT)
{
return rightPad(str, size, String.valueOf(padChar));
}
return str.concat(padding(pads, padChar));
}
public static String rightPad(String str, int size, String padStr)
{
if (str == null)
{
return null;
}
if (isEmpty(padStr))
{
padStr = " ";
}
int padLen = padStr.length();
int strLen = str.length();
int pads = size - strLen;
if (pads <= 0)
{
return str; // returns original String when possible
}
if (padLen == 1 && pads <= PAD_LIMIT)
{
return rightPad(str, size, padStr.charAt(0));
}
if (pads == padLen)
{
return str.concat(padStr);
}
else if (pads < padLen)
{
return str.concat(padStr.substring(0, pads));
}
else
{
char[] padding = new char[pads];
char[] padChars = padStr.toCharArray();
for (int i = 0; i < pads; i++)
{
padding[i] = padChars[i % padLen];
}
return str.concat(new String(padding));
}
}
public static String leftPad(String str, int size)
{
return leftPad(str, size, ' ');
}
public static String leftPad(String str, int size, char padChar)
{
if (str == null)
{
return null;
}
int pads = size - str.length();
if (pads <= 0)
{
return str; // returns original String when possible
}
if (pads > PAD_LIMIT)
{
return leftPad(str, size, String.valueOf(padChar));
}
return padding(pads, padChar).concat(str);
}
public static String leftPad(String str, int size, String padStr)
{
if (str == null)
{
return null;
}
if (isEmpty(padStr))
{
padStr = " ";
}
int padLen = padStr.length();
int strLen = str.length();
int pads = size - strLen;
if (pads <= 0)
{
return str; // returns original String when possible
}
if (padLen == 1 && pads <= PAD_LIMIT)
{
return leftPad(str, size, padStr.charAt(0));
}
if (pads == padLen)
{
return padStr.concat(str);
}
else if (pads < padLen)
{
return padStr.substring(0, pads).concat(str);
}
else
{
char[] padding = new char[pads];
char[] padChars = padStr.toCharArray();
for (int i = 0; i < pads; i++)
{
padding[i] = padChars[i % padLen];
}
return new String(padding).concat(str);
}
}
private static String padding(int repeat, char padChar) throws IndexOutOfBoundsException
{
if (repeat < 0)
{
throw new IndexOutOfBoundsException("Cannot pad a negative amount: " + repeat);
}
final char[] buf = new char[repeat];
for (int i = 0; i < buf.length; i++)
{
buf[i] = padChar;
}
return new String(buf);
}
// Abbreviating
// -----------------------------------------------------------------------
public static String abbreviate(String str, int maxWidth)
{
return abbreviate(str, 0, maxWidth);
}
public static String abbreviate(String str, int offset, int maxWidth)
{
if (str == null)
{
return null;
}
if (maxWidth < 4)
{
throw new IllegalArgumentException("Minimum abbreviation width is 4");
}
if (str.length() <= maxWidth)
{
return str;
}
if (offset > str.length())
{
offset = str.length();
}
if ((str.length() - offset) < (maxWidth - 3))
{
offset = str.length() - (maxWidth - 3);
}
if (offset <= 4)
{
return str.substring(0, maxWidth - 3) + "...";
}
if (maxWidth < 7)
{
throw new IllegalArgumentException("Minimum abbreviation width with offset is 7");
}
if ((offset + (maxWidth - 3)) < str.length())
{
return "..." + abbreviate(str.substring(offset), maxWidth - 3);
}
return "..." + str.substring(str.length() - (maxWidth - 3));
}
// ContainsAny
// -----------------------------------------------------------------------
public static boolean containsAny(String str, char... searchChars)
{
if (str == null || str.length() == 0 || searchChars == null || searchChars.length == 0)
{
return false;
}
for (int i = 0; i < str.length(); i++)
{
char ch = str.charAt(i);
for (int j = 0; j < searchChars.length; j++)
{
if (searchChars[j] == ch)
{
return true;
}
}
}
return false;
}
public static String replace(String text, String searchString, String replacement)
{
return replace(text, searchString, replacement, -1);
}
public static String replace(String text, String searchString, String replacement, int max)
{
if (isEmpty(text) || isEmpty(searchString) || replacement == null || max == 0)
{
return text;
}
int start = 0;
int end = text.indexOf(searchString, start);
if (end == INDEX_NOT_FOUND)
{
return text;
}
int replLength = searchString.length();
int increase = replacement.length() - replLength;
increase = (increase < 0 ? 0 : increase);
increase *= (max < 0 ? 16 : (max > 64 ? 64 : max));
StringBuilder buf = new StringBuilder(text.length() + increase);
while (end != INDEX_NOT_FOUND)
{
buf.append(text.substring(start, end)).append(replacement);
start = end + replLength;
if (--max == 0)
{
break;
}
end = text.indexOf(searchString, start);
}
buf.append(text.substring(start));
return buf.toString();
}
public static String replaceEach(String text, String[] searchList, String[] replacementList)
{
return replaceEach(text, searchList, replacementList, false, 0);
}
private static String replaceEach(String text, String[] searchList, String[] replacementList, boolean repeat,
int timeToLive)
{
// mchyzer Performance note: This creates very few new objects (one major
// goal)
// let me know if there are performance requests, we can create a harness to
// measure
if (text == null || text.length() == 0 || searchList == null || searchList.length == 0 || replacementList == null
|| replacementList.length == 0)
{
return text;
}
// if recursing, this shouldnt be less than 0
if (timeToLive < 0)
{
throw new IllegalStateException("TimeToLive of " + timeToLive + " is less than 0: " + text);
}
int searchLength = searchList.length;
int replacementLength = replacementList.length;
// make sure lengths are ok, these need to be equal
if (searchLength != replacementLength)
{
throw new IllegalArgumentException("Search and Replace array lengths don't match: " + searchLength + " vs "
+ replacementLength);
}
// keep track of which still have matches
boolean[] noMoreMatchesForReplIndex = new boolean[searchLength];
// index on index that the match was found
int textIndex = -1;
int replaceIndex = -1;
int tempIndex = -1;
// index of replace array that will replace the search string found
// NOTE: logic duplicated below START
for (int i = 0; i < searchLength; i++)
{
if (noMoreMatchesForReplIndex[i] || searchList[i] == null || searchList[i].length() == 0
|| replacementList[i] == null)
{
continue;
}
tempIndex = text.indexOf(searchList[i]);
// see if we need to keep searching for this
if (tempIndex == -1)
{
noMoreMatchesForReplIndex[i] = true;
}
else
{
if (textIndex == -1 || tempIndex < textIndex)
{
textIndex = tempIndex;
replaceIndex = i;
}
}
}
// NOTE: logic mostly below END
// no search strings found, we are done
if (textIndex == -1)
{
return text;
}
int start = 0;
// get a good guess on the size of the result buffer so it doesnt have to
// double if it goes over a bit
int increase = 0;
// count the replacement text elements that are larger than their
// corresponding text being replaced
for (int i = 0; i < searchList.length; i++)
{
int greater = replacementList[i].length() - searchList[i].length();
if (greater > 0)
{
increase += 3 * greater; // assume 3 matches
}
}
// have upper-bound at 20% increase, then let Java take over
increase = Math.min(increase, text.length() / 5);
StringBuffer buf = new StringBuffer(text.length() + increase);
while (textIndex != -1)
{
for (int i = start; i < textIndex; i++)
{
buf.append(text.charAt(i));
}
buf.append(replacementList[replaceIndex]);
start = textIndex + searchList[replaceIndex].length();
textIndex = -1;
replaceIndex = -1;
tempIndex = -1;
// find the next earliest match
// NOTE: logic mostly duplicated above START
for (int i = 0; i < searchLength; i++)
{
if (noMoreMatchesForReplIndex[i] || searchList[i] == null || searchList[i].length() == 0
|| replacementList[i] == null)
{
continue;
}
tempIndex = text.indexOf(searchList[i], start);
// see if we need to keep searching for this
if (tempIndex == -1)
{
noMoreMatchesForReplIndex[i] = true;
}
else
{
if (textIndex == -1 || tempIndex < textIndex)
{
textIndex = tempIndex;
replaceIndex = i;
}
}
}
// NOTE: logic duplicated above END
}
int textLength = text.length();
for (int i = start; i < textLength; i++)
{
buf.append(text.charAt(i));
}
String result = buf.toString();
if (!repeat)
{
return result;
}
return replaceEach(result, searchList, replacementList, repeat, timeToLive - 1);
}
// Joining
// -----------------------------------------------------------------------
@SuppressWarnings("unchecked")
public static <T> String join(T... elements)
{
return join(elements, null);
}
public static String join(Object[] array, char separator)
{
if (array == null)
{
return null;
}
return join(array, separator, 0, array.length);
}
public static String join(Object[] array, char separator, int startIndex, int endIndex)
{
if (array == null)
{
return null;
}
int noOfItems = (endIndex - startIndex);
if (noOfItems <= 0)
{
return EMPTY;
}
StringBuilder buf = new StringBuilder(noOfItems * 16);
for (int i = startIndex; i < endIndex; i++)
{
if (i > startIndex)
{
buf.append(separator);
}
if (array[i] != null)
{
buf.append(array[i]);
}
}
return buf.toString();
}
public static String join(Object[] array, String separator)
{
if (array == null)
{
return null;
}
return join(array, separator, 0, array.length);
}
public static String join(Object[] array, String separator, int startIndex, int endIndex)
{
if (array == null)
{
return null;
}
if (separator == null)
{
separator = EMPTY;
}
// endIndex - startIndex > 0: Len = NofStrings *(len(firstString) +
// len(separator))
// (Assuming that all Strings are roughly equally long)
int noOfItems = (endIndex - startIndex);
if (noOfItems <= 0)
{
return EMPTY;
}
StringBuilder buf = new StringBuilder(noOfItems * 16);
for (int i = startIndex; i < endIndex; i++)
{
if (i > startIndex)
{
buf.append(separator);
}
if (array[i] != null)
{
buf.append(array[i]);
}
}
return buf.toString();
}
private StringUtils()
{
}
// -------------------------------------------------------------------------
// -------------------------------------------------------------------------
public static boolean equals(Object object1, Object object2)
{
if (object1 == object2)
{
return true;
}
if ((object1 == null) || (object2 == null))
{
return false;
}
return object1.equals(object2);
}
public static <T> T defaultIfNull(T object, T defaultValue)
{
return object != null ? object : defaultValue;
}
// -------------------------------------------------------------------------
// -------------------------------------------------------------------------
public static String toCamelCase(String string)
{
StringBuilder result = new StringBuilder();
// [#2515] - Keep trailing underscores
for (String word : string.split("_", -1))
{
// Uppercase first letter of a word
if (word.length() > 0)
{
// [#82] - If a word starts with a digit, prevail the
// underscore to prevent naming clashes
if (Character.isDigit(word.charAt(0)))
{
result.append("_");
}
result.append(word.substring(0, 1).toUpperCase());
result.append(word.substring(1).toLowerCase());
}
// If no letter exists, prevail the underscore (e.g. leading
// underscores)
else
{
result.append("_");
}
}
return result.toString();
}
public static String toCamelCaseLC(String string)
{
return toLC(toCamelCase(string));
}
public static String toLC(String string)
{
if (string == null || string.isEmpty())
{
return string;
}
return Character.toLowerCase(string.charAt(0)) + string.substring(1);
}
public static String[] split(String regex, CharSequence input)
{
int index = 0;
ArrayList<String> matchList = new ArrayList<String>();
Matcher m = Pattern.compile(regex).matcher(input);
// Add segments before each match found
while (m.find())
{
matchList.add(input.subSequence(index, m.start()).toString());
matchList.add(input.subSequence(m.start(), m.end()).toString());
index = m.end();
}
// If no match was found, return this
if (index == 0)
return new String[] { input.toString() };
// Add remaining segment
matchList.add(input.subSequence(index, input.length()).toString());
// Construct result
Iterator<String> it = matchList.iterator();
while (it.hasNext())
{
if ("".equals(it.next()))
{
it.remove();
}
}
String[] result = new String[matchList.size()];
return matchList.toArray(result);
}
}
| |
package com.ptoceti.osgi.pi.impl;
/*
* #%L
* **********************************************************************
* ORGANIZATION : ptoceti
* PROJECT : Pi
* FILENAME : PiService.java
*
* This file is part of the Ptoceti project. More information about
* this project can be found here: http://www.ptoceti.com/
* **********************************************************************
* %%
* Copyright (C) 2013 - 2015 ptoceti
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import java.io.IOException;
import java.util.ArrayList;
import java.util.Dictionary;
import java.util.Hashtable;
import java.util.List;
import org.osgi.framework.Constants;
import org.osgi.service.cm.ConfigurationException;
import org.osgi.service.cm.ManagedService;
import org.osgi.service.log.LogService;
import org.xmlpull.v1.XmlPullParserException;
import com.pi4j.io.gpio.GpioController;
import com.pi4j.io.gpio.GpioFactory;
//import com.ptoceti.osgi.control.Command;
import com.ptoceti.osgi.pi.impl.Activator;
/**
* Simple service that expose Pi's pin ans system infos to the wire handler.
* Configuration for the pins to read or write to is taken from an external file.
*
* The service is exposed as a managed service so that it can take configuration from the configuration admin.
*
* @author lor
*
*/
public class PiService implements ManagedService {
/**
* refresh rate at which the system infos is push to the wire handler.
*/
public static final String CONFIGURATION_KEY_REFRESHRATE= "com.ptoceti.osgi.pi.refreshRate";
/**
* configuration that indicate the location of the ins configuration file.
*/
public static final String CONFIGURATION_KEY_PINSCONFIGFILE = "com.ptoceti.osgi.pi.pinsConfigFile";
/**
* The list of pins, when builded and configured.
*/
private List<PiPin> pins = new ArrayList<PiPin>();
/**
* The instance of the gpio controller for this service.
*/
private GpioController gpioController;
/**
* The wire handler instance that manage pushing and updating the wires.
*/
private WireHandler wireHandler = null;
/**
* PiService creator. Record itself as a managed service.
*
*/
public PiService() {
gpioController = GpioFactory.getInstance();
String[] clazzes = new String[] { ManagedService.class.getName() };
// register the class as a managed service.
Hashtable<String, Object> properties = new Hashtable<String,Object>();
properties.put(Constants.SERVICE_PID, this.getClass().getName());
Activator.bc.registerService(clazzes, this, properties);
Activator.log(LogService.LOG_INFO, "Registered "
+ this.getClass().getName() + ", Pid = "
+ (String) properties.get(Constants.SERVICE_PID));
}
/**
* Stop the service. Stop the wire handler, release the pins and the gpio controller.
*/
public void stop() {
if( wireHandler != null) wireHandler.stop();
releasePins();
gpioController.shutdown();
}
/**
* From ManagedService interface. Expect service configuration to be passed here.
*
* @param properties properties for the service
* @throws ConfigurationException in error confiuring the service
*/
@Override
public void updated(Dictionary properties) throws ConfigurationException {
Activator.log(LogService.LOG_DEBUG, "Configuration update.");
if( properties != null ) {
if( wireHandler != null){
wireHandler.stop();
wireHandler = null;
}
Integer newRefreshRate = null;
if( properties.get(CONFIGURATION_KEY_REFRESHRATE) != null ) {
Object rate = properties.get(CONFIGURATION_KEY_REFRESHRATE);
newRefreshRate = rate instanceof Integer ? (Integer) rate : Integer.parseInt(rate.toString());
Activator.log(LogService.LOG_DEBUG, "Refresh Thread configured at intervals of : "+ newRefreshRate.toString() + " ms.");
}
if( properties.get(CONFIGURATION_KEY_PINSCONFIGFILE) != null) {
releasePins();
ConfigReader configReader = new ConfigReader((String)properties.get(CONFIGURATION_KEY_PINSCONFIGFILE));
try {
List<PinConfig> pinsConfig = configReader.initialiseDataFromConfigFile();
pins = buildPins(pinsConfig);
} catch (XmlPullParserException e) {
Activator.log(LogService.LOG_ERROR, "Error parsing xml config file: " + e.toString());
} catch (IOException e) {
Activator.log(LogService.LOG_ERROR, "Error reading xml config file: " + e.toString());
}
}
wireHandler = new WireHandler(newRefreshRate, pins);
}
}
/**
* Build a list of pins object that matches the configuration
* @param pinsConfig configuration for the pins
* @return List a list of configured pins
*/
List<PiPin> buildPins(List<PinConfig> pinsConfig ){
List<PiPin> result = new ArrayList<PiPin>();
for( PinConfig config: pinsConfig){
if( config.isDigital()){
if( config.isDirectionIn()){
PiPin pin = new PinDigitalIn(config,gpioController,this);
result.add(pin);
} else {
PiPin pin = new PinDigitalOut(config, gpioController);
result.add(pin);
}
}
}
return result;
}
/**
* Release pins resources taken by each.
*/
protected void releasePins(){
for(PiPin pin : pins){
pin.stop(gpioController);
}
}
/**
* Redirect wire value update from pins to wirehandler.
*
* @param pin the pin to get the value from
*/
protected void pushPinValues(PiPin pin) {
if( wireHandler != null){
wireHandler.pushPinValues(pin);
}
}
}
| |
/*
* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package software.amazon.awssdk.core.document;
import java.io.Serializable;
import java.math.BigDecimal;
import java.math.BigInteger;
import java.text.ParseException;
import java.util.List;
import java.util.Map;
import java.util.function.Consumer;
import software.amazon.awssdk.annotations.Immutable;
import software.amazon.awssdk.annotations.SdkPublicApi;
import software.amazon.awssdk.core.SdkNumber;
import software.amazon.awssdk.core.document.internal.BooleanDocument;
import software.amazon.awssdk.core.document.internal.ListDocument;
import software.amazon.awssdk.core.document.internal.MapDocument;
import software.amazon.awssdk.core.document.internal.NullDocument;
import software.amazon.awssdk.core.document.internal.NumberDocument;
import software.amazon.awssdk.core.document.internal.StringDocument;
/**
* Interface for Document Types.
* Document types are used to carry open content that is Data with no fixed schema, data that can't be modeled using rigid types,
* or data that has a schema that evolves outside the purview of a service
* without requiring techniques like embedding JSON inside JSON strings.
* Document type value is serialized using the same format as its surroundings and requires no additional encoding or escaping.
* This interface specifies all the methods to access a Document, also provides constructor methods for
* instantiating Document.
*
*/
@SdkPublicApi
@Immutable
public interface Document extends Serializable {
/**
* Create {@link Document} from a string, using the provided String.
* @param string String value.
* @return Implementation of Document that stores a String.
*/
static Document fromString(String string) {
return new StringDocument(string);
}
/**
* Create {@link Document} from a boolean.
* @param booleanValue Boolean value.
* @return Implementation of Document that stores a Boolean.
*/
static Document fromBoolean(boolean booleanValue) {
return new BooleanDocument(booleanValue);
}
/**
* Create {@link Document} from a {@link SdkNumber}.
* @param number {@link SdkNumber} sdkNumber with the given precision type.
* @return Implementation of Document that stores a {@link SdkNumber}.
*/
static Document fromNumber(SdkNumber number) {
return new NumberDocument(number);
}
/**
* Create {@link Document} from a int.
* @param number int type number.
* @return Implementation of Document that stores a {@link SdkNumber} constructed {@link SdkNumber#fromInteger(int)}.
*/
static Document fromNumber(int number) {
return new NumberDocument(SdkNumber.fromInteger(number));
}
/**
* Create {@link Document} from a long.
* @param number long type number.
* @return Implementation of Document that stores a {@link SdkNumber} constructed {@link SdkNumber#longValue()}.
*/
static Document fromNumber(long number) {
return new NumberDocument(SdkNumber.fromLong(number));
}
/**
* Create {@link Document} from a float.
* @param number float type number.
* @return Implementation of Document that stores a {@link SdkNumber} constructed {@link SdkNumber#floatValue()}
*/
static Document fromNumber(float number) {
return new NumberDocument(SdkNumber.fromFloat(number));
}
/**
* Create {@link Document} from a double.
* @param number double type number.
* @return Implementation of Document that stores a {@link SdkNumber} constructed {@link SdkNumber#fromDouble(double)}
*/
static Document fromNumber(double number) {
return new NumberDocument(SdkNumber.fromDouble(number));
}
/**
* Create {@link Document} from a BigDecimal.
* @param number BigDecimal type number.
* @return Implementation of Document that stores a {@link SdkNumber} constructed {@link SdkNumber#fromBigDecimal(BigDecimal)}
*/
static Document fromNumber(BigDecimal number) {
return new NumberDocument(SdkNumber.fromBigDecimal(number));
}
/**
* Create {@link Document} from a BigInteger.
* @param number BigInteger type number.
* @return Implementation of Document that stores a {@link SdkNumber} constructed {@link SdkNumber#fromBigInteger(BigInteger)}
*/
static Document fromNumber(BigInteger number) {
return new NumberDocument(SdkNumber.fromBigInteger(number));
}
/**
* Create {@link Document} from a String.
* @param number String representation of a number.
* @return Implementation of Document that stores a {@link SdkNumber} constructed {@link SdkNumber#fromString(String)}
* @throws ParseException Throws ParseException when the inputString is not of Number format.
*/
static Document fromNumber(String number) {
return new NumberDocument(SdkNumber.fromString(number));
}
/**
* Creates a Document from a Map of Documents.
* @param documentMap Map with Keys of Type Strinb and Value of Document type.
* @return Implementation of Document that stores a Map with String Keys and Document Values.
*/
static Document fromMap(Map<String, Document> documentMap) {
return new MapDocument(documentMap);
}
/**
* Create a {@link ListBuilder} for generating a {@link Document} by directly allowing user add Documents.
* @param documentList List of Documents.
* @return Implementation of Document that stores a Lists of Documents.
*/
static Document fromList(List<Document> documentList) {
return new ListDocument(documentList);
}
/**
* Create a {@link MapBuilder} for generating a {@link Document} by directly allowing user to put String Keys
* and Document Values in the builder methods.
* @return Builder to Construct Document with Map of Documents.
*/
static MapBuilder mapBuilder() {
return MapDocument.mapBuilder();
}
/**
* Provides Builder methods of {@link ListBuilder} to directly create Document with List of Documents
* @return Builder methods to Construct Document with List of Documents.
*/
static ListBuilder listBuilder() {
return ListDocument.listBuilder();
}
/**
* Creates a document is a {@code null} value.
*
* @return Implementation of a Null Document.
*/
static Document fromNull() {
return new NullDocument();
}
/**
* Gets the value of the document as a Java type that represents the
* document type data model: {@code boolean}, {@code String} for Strings and Numbers,
* {@code null}, {@code List<Object>}, or
* {@code Map<String, Object>}.
* @return Returns the document as one of a fixed set of Java types.
*/
Object unwrap();
/**
* Checks if the document is a {@code null} value.
* @return Returns true if the document is a {@code null} value.
*/
default boolean isNull() {
return false;
}
/**
* @return Returns true if this document is a boolean value.
*/
default boolean isBoolean() {
return false;
}
/**
* Gets the document as a {@code boolean} if it is a boolean.
* @return Returns the boolean value.
* @throws UnsupportedOperationException if the document is not a boolean.
*/
boolean asBoolean();
/**
* @return Returns true if this document is a string value.
*/
default boolean isString() {
return false;
}
/**
* Gets the document as a {@code String}.
*
* @return Returns the string value.
* @throws UnsupportedOperationException if the document is not a string.
*/
String asString();
/**
* @return Returns true if this document is a number value.
*/
default boolean isNumber() {
return false;
}
/**
* Gets the document as a {@link SdkNumber} if it is a {@link SdkNumber}.
* @return Returns the {@link SdkNumber}.
* @throws UnsupportedOperationException if the document is not a number.
*/
SdkNumber asNumber();
/**
* @return Returns true if this document is a Map.
*/
default boolean isMap() {
return false;
}
/**
* Gets the document as a {@code Map}.
* <p>Each value contained in the {@code Map} is the same as how the value
* would be represented by {@link Document}.
* @return Returns the Document map.
* @throws UnsupportedOperationException if the document is not an Map.
*/
Map<String, Document> asMap();
/**
* @return Returns true if this document is a document type List.
*/
default boolean isList() {
return false;
}
/**
* Gets the document as a {@code List} if it is a document type array.
* <p>Each value contained in the {@code List} is the same as how the
* value would be represented by {@link Document}.
*
* @return Returns the lists of Document.
* @throws UnsupportedOperationException if the document is not an List.
*/
List<Document> asList();
/**
* Accepts a visitor to the Document.
* @param <R> visitor return type.
* @param visitor Visitor to dispatch to.
* @return Returns the accepted result.
*/
<R> R accept(DocumentVisitor<? extends R> visitor);
/**
* Accepts a visitor with the Document.
* @param visitor Visitor to dispatch to.
*/
void accept(VoidDocumentVisitor visitor);
interface MapBuilder {
/**
* Inserts a Key Value pair to a Document Map with String key and a Document created from the given String.
* @param key Map Key for the Document.
* @param stringValue String value which will be used to create a Document to be inserted in a DocumentMap.
* @return Builder which provides APIs to put Key Value pair to a Document Map.
*/
MapBuilder putString(String key, String stringValue);
/**
* Inserts a Key Value pair to a Document Map with String key and a Document created from the given Number.
* @param key Map Key for the Document.
* @param numberValue Number value which will be used to create a Document to be inserted in a DocumentMap.
* @return Builder which provides APIs to put Key Value pair to a Document Map.
*/
MapBuilder putNumber(String key, SdkNumber numberValue);
/**
* Inserts a Key Value pair to a Document Map with String key and a Document created from the given integer.
* @param key Map Key for the Document.
* @param numberValue Integer value which will be used to create a Document to be inserted in a DocumentMap.
* @return Builder which provides APIs to put Key Value pair to a Document Map.
*/
MapBuilder putNumber(String key, int numberValue);
/**
* Inserts a Key Value pair to a Document Map with String key and a Document created from the given long.
* @param key Map Key for the Document.
* @param numberValue long value which will be used to create a Document to be inserted in a DocumentMap.
* @return Builder which provides APIs to put Key Value pair to a Document Map.
*/
MapBuilder putNumber(String key, long numberValue);
/**
* Inserts a Key Value pair to a Document Map with String key and a Document created from the given double.
* @param key Map Key for the Document.
* @param numberValue double value which will be used to create a Document to be inserted in a DocumentMap.
* @return Builder which provides APIs to put Key Value pair to a Document Map.
*/
MapBuilder putNumber(String key, double numberValue);
/**
* Inserts a Key Value pair to a Document Map with String key and a Document created from the given float.
* @param key Map Key for the Document.
* @param numberValue float value which will be used to create a Document to be inserted in a DocumentMap.
* @return Builder which provides APIs to put Key Value pair to a Document Map.
*/
MapBuilder putNumber(String key, float numberValue);
/**
* Inserts a Key Value pair to a Document Map with String key and a Document created from the given BigDecimal.
* @param key Map Key for the Document.
* @param numberValue BigDecimal value which will be used to create a Document to be inserted in a DocumentMap.
* @return Builder which provides APIs to put Key Value pair to a Document Map.
*/
MapBuilder putNumber(String key, BigDecimal numberValue);
/**
* Inserts a Key Value pair to a Document Map with String key and a Document created from the given BigInteger.
* @param key Map Key for the Document.
* @param numberValue BigInteger value which will be used to create a Document to be inserted in a DocumentMap.
* @return Builder which provides APIs to put Key Value pair to a Document Map.
*/
MapBuilder putNumber(String key, BigInteger numberValue);
/**
*
* Inserts a Key Value pair to a Document Map with String key and a Document created from the given String.
* @param key Map Key for the Document.
* @param numberValue String value which will be used to create a Document to be inserted in a DocumentMap.
* @return Builder which provides APIs to put Key Value pair to a Document Map.
*/
MapBuilder putNumber(String key, String numberValue);
/**
* Inserts a Key Value pair to a Document Map with String key and a Document created from the given boolean.
* @param key Map Key for the Document.
* @param booleanValue Boolean value which will be used to create a Document to be inserted in a DocumentMap.
* @return Builder which provides APIs to put Key Value pair to a Document Map.
*/
MapBuilder putBoolean(String key, boolean booleanValue);
/**
* Inserts a Key Value pair to a Document Map with String key and the given Document.
* @param key Map Key for the Document.
* @param document Document to be inserted in a DocumentMap.
* @return Builder which provides APIs to put Key Value pair to a Document Map.
*/
MapBuilder putDocument(String key, Document document);
/**
* Inserts a Key Value pair to a Document Map with String key and value with Null Document.
* @param key Map Key for the Document.
* @return Builder which provides APIs to put Key Value pair to a Document Map.
*/
MapBuilder putNull(String key);
/**
* Inserts a Key Value pair to a Document Map with String key and value as List of Document.
* @param key Map Key for the Document.
* @param documentList List of Documents.
* @return Builder which provides APIs to put Key Value pair to a Document Map.
*/
MapBuilder putList(String key, List<Document> documentList);
/**
* Inserts a Key Value pair to a Document Map with String key and value constructed from Consumer of
* {@link ListBuilder}.
* @param key Map Key for the Document.
* @param listBuilderConsumer Consumer that accepts {@link ListBuilder}
* @return Builder which provides APIs to put Key Value pair to a Document Map.
*/
MapBuilder putList(String key, Consumer<ListBuilder> listBuilderConsumer);
/**
* Inserts a Key Value pair to a Document Map with String key and Document constructed from Document Map.
* @param key Map Key for the Document.
* @param documentMap Map of Document.
* @return Builder which provides APIs to put Key Value pair to a Document Map.
*/
MapBuilder putMap(String key, Map<String, Document> documentMap);
/**
* Inserts a Key Value pair to a Document Map with String key and value constructed from Consumer of
* {@link MapBuilder}.
* @param key Map Key for the Document.
* @param mapBuilderConsumer Consumer that accepts {@link ListBuilder}
* @return Builder which provides APIs to put Key Value pair to a Document Map.
*/
MapBuilder putMap(String key, Consumer<MapBuilder> mapBuilderConsumer);
/**
* Creates a new {@link Document} with the key value pair pair inserted using put method.
* @return The new {@link Document}.
*/
Document build();
}
interface ListBuilder {
/**
* Adds a Document which is constructed from the given stringValue..
* @param stringValue String Value from which the Document to be added is created.
* @return Builder which provides APIs to add Document to a Document List.
*/
ListBuilder addString(String stringValue);
/**
* Adds a Document which is constructed from the given boolean.
* @param booleanValue Boolean value from which the Document to be added is created.
* @return Builder which provides APIs to add Document to a Document List.
*/
ListBuilder addBoolean(boolean booleanValue);
/**
* Adds a Document which is constructed from the given {@link SdkNumber}.
* @param numberValue {@link SdkNumber} from which the Document to be added is created.
* @return Builder which provides APIs to add Document to a Document List.
*/
ListBuilder addNumber(SdkNumber numberValue);
/**
* Adds a Document which is constructed from the given integer.
* @param numberValue integer from which the Document to be added is created.
* @return Builder which provides APIs to add Document to a Document List.
*/
ListBuilder addNumber(int numberValue);
/**
* Adds a Document which is constructed from the given long.
* @param numberValue long from which the Document to be added is created.
* @return Builder which provides APIs to add Document to a Document List.
*/
ListBuilder addNumber(long numberValue);
/**
* Adds a Document which is constructed from the given float.
* @param numberValue float from which the Document to be added is created.
* @return Builder which provides APIs to add Document to a Document List.
*/
ListBuilder addNumber(float numberValue);
/**
* Adds a Document which is constructed from the given double.
* @param numberValue double from which the Document to be added is created.
* @return Builder which provides APIs to add Document to a Document List.
*/
ListBuilder addNumber(double numberValue);
/**
* Adds a Document which is constructed from the given BigDecimal.
* @param numberValue BigDecimal from which the Document to be added is created.
* @return Builder which provides APIs to add Document to a Document List.
*/
ListBuilder addNumber(BigDecimal numberValue);
/**
* Adds a Document which is constructed from the given BigInteger.
* @param numberValue BigInteger from which the Document to be added is created.
* @return Builder which provides APIs to add Document to a Document List.
*/
ListBuilder addNumber(BigInteger numberValue);
/**
*
* Adds a Document which is constructed from the given String.
* @param numberValue String from which the Document to be added is created.
* @return Builder which provides APIs to add Document to a Document List.
*/
ListBuilder addNumber(String numberValue);
/**
* Adds a Document to the constructed Document List.
* @param document Document that will be added to a Document List.
* @return Builder which provides APIs to add Document to a Document List.
*/
ListBuilder addDocument(Document document);
/**
* Inserts a Document Value constructed from Consumer of {@link MapBuilder}.
* @param mapBuilderConsumer Consumer that accepts {@link ListBuilder}
* @return Builder which provides APIs to put Key Value pair to a Document Map.
*/
ListBuilder addMap(Consumer<MapBuilder> mapBuilderConsumer);
/**
* Inserts a Null Document to the constructed Document List.
* @return Builder which provides APIs to add Document to a Document List.
*/
ListBuilder addNull();
/**
* Creates a new {@link Document} with the List members as added with add method.
* @return The new {@link Document}.
*/
Document build();
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.internal.managers.deployment;
import org.apache.ignite.*;
import org.apache.ignite.compute.*;
import org.apache.ignite.configuration.*;
import org.apache.ignite.events.*;
import org.apache.ignite.internal.*;
import org.apache.ignite.internal.util.*;
import org.apache.ignite.internal.util.typedef.*;
import org.apache.ignite.internal.util.typedef.internal.*;
import org.apache.ignite.lang.*;
import org.apache.ignite.marshaller.optimized.*;
import org.apache.ignite.spi.*;
import org.apache.ignite.spi.deployment.*;
import org.jdk8.backport.*;
import org.jetbrains.annotations.*;
import java.util.*;
import java.util.Map.*;
import java.util.concurrent.*;
import static org.apache.ignite.events.EventType.*;
/**
* Storage for local deployments.
*/
class GridDeploymentLocalStore extends GridDeploymentStoreAdapter {
/** Deployment cache by class name. */
private final ConcurrentMap<String, ConcurrentLinkedDeque8<GridDeployment>> cache =
new ConcurrentHashMap8<>();
/** Mutex. */
private final Object mux = new Object();
/**
* @param spi Deployment SPI.
* @param ctx Grid kernal context.
* @param comm Deployment communication.
*/
GridDeploymentLocalStore(DeploymentSpi spi, GridKernalContext ctx, GridDeploymentCommunication comm) {
super(spi, ctx, comm);
}
/** {@inheritDoc} */
@Override public void start() throws IgniteCheckedException {
spi.setListener(new LocalDeploymentListener());
if (log.isDebugEnabled())
log.debug(startInfo());
}
/** {@inheritDoc} */
@Override public void stop() {
spi.setListener(null);
Map<String, Collection<GridDeployment>> cp;
synchronized (mux) {
cp = new HashMap<String, Collection<GridDeployment>>(cache);
for (Entry<String, Collection<GridDeployment>> entry : cp.entrySet())
entry.setValue(new ArrayList<>(entry.getValue()));
}
for (Collection<GridDeployment> deps : cp.values()) {
for (GridDeployment cls : deps)
undeploy(cls.classLoader());
}
if (log.isDebugEnabled())
log.debug(stopInfo());
}
/** {@inheritDoc} */
@Override public Collection<GridDeployment> getDeployments() {
Collection<GridDeployment> deps = new ArrayList<>();
synchronized (mux) {
for (ConcurrentLinkedDeque8<GridDeployment> depList : cache.values())
for (GridDeployment d : depList)
if (!deps.contains(d))
deps.add(d);
}
return deps;
}
/** {@inheritDoc} */
@Nullable @Override public GridDeployment getDeployment(IgniteUuid ldrId) {
synchronized (mux) {
for (ConcurrentLinkedDeque8<GridDeployment> deps : cache.values())
for (GridDeployment dep : deps)
if (dep.classLoaderId().equals(ldrId))
return dep;
}
for (GridDeployment dep : ctx.task().getUsedDeployments())
if (dep.classLoaderId().equals(ldrId))
return dep;
return null;
}
/** {@inheritDoc} */
@Nullable @Override public GridDeployment getDeployment(GridDeploymentMetadata meta) {
if (log.isDebugEnabled())
log.debug("Deployment meta for local deployment: " + meta);
String alias = meta.alias();
// Validate metadata.
assert alias != null : "Meta is invalid: " + meta;
GridDeployment dep = deployment(alias);
if (dep != null) {
if (log.isDebugEnabled())
log.debug("Acquired deployment class from local cache: " + dep);
return dep;
}
DeploymentResource rsrc = spi.findResource(alias);
if (rsrc != null) {
dep = deploy(ctx.config().getDeploymentMode(), rsrc.getClassLoader(), rsrc.getResourceClass(), alias,
meta.record());
assert dep != null;
if (log.isDebugEnabled())
log.debug("Acquired deployment class from SPI: " + dep);
}
// Auto-deploy.
else {
ClassLoader ldr = meta.classLoader();
if (ldr == null) {
ldr = Thread.currentThread().getContextClassLoader();
// Safety.
if (ldr == null)
ldr = U.gridClassLoader();
}
if (ldr instanceof GridDeploymentClassLoader) {
if (log.isDebugEnabled())
log.debug("Skipping local auto-deploy (nested execution) [ldr=" + ldr + ", meta=" + meta + ']');
return null;
}
try {
// Check that class can be loaded.
String clsName = meta.className();
Class<?> cls = Class.forName(clsName != null ? clsName : alias, true, ldr);
spi.register(ldr, cls);
rsrc = spi.findResource(cls.getName());
if (rsrc != null && rsrc.getResourceClass().equals(cls)) {
if (log.isDebugEnabled())
log.debug("Retrieved auto-loaded resource from spi: " + rsrc);
dep = deploy(ctx.config().getDeploymentMode(), ldr, cls, meta.alias(), meta.record());
assert dep != null;
}
else {
U.warn(log, "Failed to find resource from deployment SPI even after registering: " + meta);
return null;
}
}
catch (ClassNotFoundException ignored) {
if (log.isDebugEnabled())
log.debug("Failed to load class for local auto-deployment [ldr=" + ldr + ", meta=" + meta + ']');
return null;
}
catch (IgniteSpiException e) {
U.error(log, "Failed to deploy local class with meta: " + meta, e);
return null;
}
}
if (log.isDebugEnabled())
log.debug("Acquired deployment class: " + dep);
return dep;
}
/**
* @param alias Class alias.
* @return Deployment.
*/
@Nullable private GridDeployment deployment(String alias) {
ConcurrentLinkedDeque8<GridDeployment> deps = cache.get(alias);
if (deps != null) {
GridDeployment dep = deps.peekFirst();
if (dep != null && !dep.undeployed())
return dep;
}
return null;
}
/**
* @param depMode Deployment mode.
* @param ldr Class loader to deploy.
* @param cls Class.
* @param alias Class alias.
* @param recordEvt {@code True} to record event.
* @return Deployment.
*/
private GridDeployment deploy(DeploymentMode depMode, ClassLoader ldr, Class<?> cls, String alias,
boolean recordEvt) {
GridDeployment dep = null;
synchronized (mux) {
boolean fireEvt = false;
try {
ConcurrentLinkedDeque8<GridDeployment> cachedDeps = null;
// Find existing class loader info.
for (ConcurrentLinkedDeque8<GridDeployment> deps : cache.values()) {
for (GridDeployment d : deps) {
if (d.classLoader() == ldr) {
// Cache class and alias.
fireEvt = d.addDeployedClass(cls, alias);
cachedDeps = deps;
dep = d;
break;
}
}
if (cachedDeps != null)
break;
}
if (cachedDeps != null) {
assert dep != null;
cache.put(alias, cachedDeps);
if (!cls.getName().equals(alias))
// Cache by class name as well.
cache.put(cls.getName(), cachedDeps);
return dep;
}
IgniteUuid ldrId = IgniteUuid.fromUuid(ctx.localNodeId());
String userVer = userVersion(ldr);
dep = new GridDeployment(depMode, ldr, ldrId, userVer, cls.getName(), true);
fireEvt = dep.addDeployedClass(cls, alias);
assert fireEvt : "Class was not added to newly created deployment [cls=" + cls +
", depMode=" + depMode + ", dep=" + dep + ']';
ConcurrentLinkedDeque8<GridDeployment> deps =
F.addIfAbsent(cache, alias, F.<GridDeployment>newDeque());
if (!deps.isEmpty()) {
for (GridDeployment d : deps) {
if (!d.undeployed()) {
U.error(log, "Found more than one active deployment for the same resource " +
"[cls=" + cls + ", depMode=" + depMode + ", dep=" + d + ']');
return null;
}
}
}
// Add at the beginning of the list for future fast access.
deps.addFirst(dep);
if (!cls.getName().equals(alias))
// Cache by class name as well.
cache.put(cls.getName(), deps);
if (log.isDebugEnabled())
log.debug("Created new deployment: " + dep);
}
finally {
if (fireEvt)
recordDeploy(cls, alias, recordEvt);
}
}
return dep;
}
/** {@inheritDoc} */
@Nullable @Override public GridDeployment explicitDeploy(Class<?> cls, ClassLoader clsLdr) throws IgniteCheckedException {
try {
// Make sure not to deploy peer loaded tasks with non-local class loader,
// if local one exists.
if (clsLdr.getClass().equals(GridDeploymentClassLoader.class))
clsLdr = clsLdr.getParent();
spi.register(clsLdr, cls);
GridDeployment dep = deployment(cls.getName());
if (dep == null) {
DeploymentResource rsrc = spi.findResource(cls.getName());
if (rsrc != null && rsrc.getClassLoader() == clsLdr)
dep = deploy(ctx.config().getDeploymentMode(), rsrc.getClassLoader(),
rsrc.getResourceClass(), rsrc.getName(), true);
}
return dep;
}
catch (IgniteSpiException e) {
recordDeployFailed(cls, clsLdr, true);
// Avoid double wrapping.
if (e.getCause() instanceof IgniteCheckedException)
throw (IgniteCheckedException)e.getCause();
throw new IgniteDeploymentCheckedException("Failed to deploy class: " + cls.getName(), e);
}
}
/** {@inheritDoc} */
@Override public void explicitUndeploy(UUID nodeId, String rsrcName) {
assert rsrcName != null;
// Simply delegate to SPI.
// Internal cache will be cleared once undeployment callback is received from SPI.
spi.unregister(rsrcName);
}
/** {@inheritDoc} */
@Override public void addParticipants(Map<UUID, IgniteUuid> allParticipants,
Map<UUID, IgniteUuid> addedParticipants) {
assert false;
}
/**
* Records deploy event.
* <p>
* This needs to be called in synchronized block.
*
* @param cls Deployed class.
* @param alias Class alias.
* @param recordEvt Flag indicating whether to record events.
*/
private void recordDeploy(Class<?> cls, String alias, boolean recordEvt) {
assert cls != null;
boolean isTask = isTask(cls);
String msg = (isTask ? "Task" : "Class") + " locally deployed: " + cls;
if (recordEvt && ctx.event().isRecordable(isTask ? EVT_TASK_DEPLOYED : EVT_CLASS_DEPLOYED)) {
DeploymentEvent evt = new DeploymentEvent();
evt.message(msg);
evt.node(ctx.discovery().localNode());
evt.type(isTask ? EVT_TASK_DEPLOYED : EVT_CLASS_DEPLOYED);
evt.alias(alias);
ctx.event().record(evt);
}
// Don't record JDK or Grid classes.
if (U.isGrid(cls) || U.isJdk(cls))
return;
if (log.isInfoEnabled())
log.info(msg);
}
/**
* Records deploy event.
*
* @param cls Deployed class.
* @param clsLdr Class loader.
* @param recordEvt Flag indicating whether to record events.
*/
@SuppressWarnings({"unchecked"})
private void recordDeployFailed(Class<?> cls, ClassLoader clsLdr, boolean recordEvt) {
assert cls != null;
assert clsLdr != null;
boolean isTask = isTask(cls);
String msg = "Failed to deploy " + (isTask ? "task" : "class") + " [cls=" + cls + ", clsLdr=" + clsLdr + ']';
if (recordEvt && ctx.event().isRecordable(isTask ? EVT_CLASS_DEPLOY_FAILED : EVT_TASK_DEPLOY_FAILED)) {
String taskName = isTask ? U.getTaskName((Class<? extends ComputeTask<?, ?>>)cls) : null;
DeploymentEvent evt = new DeploymentEvent();
evt.message(msg);
evt.node(ctx.discovery().localNode());
evt.type(isTask(cls) ? EVT_CLASS_DEPLOY_FAILED : EVT_TASK_DEPLOY_FAILED);
evt.alias(taskName);
ctx.event().record(evt);
}
if (log.isInfoEnabled())
log.info(msg);
}
/**
* Records undeploy event.
*
* @param dep Undeployed class loader.
*/
private void recordUndeploy(GridDeployment dep) {
assert dep.undeployed();
if (ctx.event().isRecordable(EVT_TASK_UNDEPLOYED) ||
ctx.event().isRecordable(EVT_CLASS_UNDEPLOYED)) {
for (Class<?> cls : dep.deployedClasses()) {
boolean isTask = isTask(cls);
String msg = isTask ? "Task locally undeployed: " + cls : "Class locally undeployed: " + cls;
if (ctx.event().isRecordable(isTask ? EVT_TASK_UNDEPLOYED : EVT_CLASS_UNDEPLOYED)) {
DeploymentEvent evt = new DeploymentEvent();
evt.message(msg);
evt.node(ctx.discovery().localNode());
evt.type(isTask ? EVT_TASK_UNDEPLOYED : EVT_CLASS_UNDEPLOYED);
evt.alias(getAlias(dep, cls));
ctx.event().record(evt);
}
if (log.isInfoEnabled())
log.info(msg);
}
}
}
/**
* Gets alias for a class.
*
* @param dep Deployment.
* @param cls Class.
* @return Alias for a class.
*/
private String getAlias(GridDeployment dep, Class<?> cls) {
String alias = cls.getName();
if (isTask(cls)) {
ComputeTaskName ann = dep.annotation(cls, ComputeTaskName.class);
if (ann != null)
alias = ann.value();
}
return alias;
}
/**
* @param ldr Class loader to undeploy.
*/
private void undeploy(ClassLoader ldr) {
Collection<GridDeployment> doomed = new HashSet<>();
synchronized (mux) {
for (Iterator<ConcurrentLinkedDeque8<GridDeployment>> i1 = cache.values().iterator(); i1.hasNext();) {
ConcurrentLinkedDeque8<GridDeployment> deps = i1.next();
for (Iterator<GridDeployment> i2 = deps.iterator(); i2.hasNext();) {
GridDeployment dep = i2.next();
if (dep.classLoader() == ldr) {
dep.undeploy();
i2.remove();
doomed.add(dep);
if (log.isInfoEnabled())
log.info("Removed undeployed class: " + dep);
}
}
if (deps.isEmpty())
i1.remove();
}
}
for (GridDeployment dep : doomed) {
if (dep.obsolete()) {
// Resource cleanup.
ctx.resource().onUndeployed(dep);
// Clear optimized marshaller's cache. If another marshaller is used, this is no-op.
OptimizedMarshaller.onUndeploy(ldr);
clearSerializationCaches();
// Class loader cache should be cleared in the last order.
GridAnnotationsCache.onUndeployed(ldr);
GridClassLoaderCache.onUndeployed(ldr);
}
recordUndeploy(dep);
}
}
/** {@inheritDoc} */
@Override public String toString() {
return S.toString(GridDeploymentLocalStore.class, this);
}
/**
*
*/
private class LocalDeploymentListener implements DeploymentListener {
/** {@inheritDoc} */
@Override public void onUnregistered(ClassLoader ldr) {
if (log.isDebugEnabled())
log.debug("Received callback from SPI to unregister class loader: " + ldr);
undeploy(ldr);
}
}
}
| |
// Copyright 2016 Twitter. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.twitter.heron.common.utils.topology;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Set;
import com.twitter.heron.api.Config;
import com.twitter.heron.api.generated.TopologyAPI;
import com.twitter.heron.api.topology.GeneralTopologyContext;
import com.twitter.heron.api.tuple.Fields;
import com.twitter.heron.common.basics.TypeUtils;
/**
* From Heron. To get the topology info.
*/
public class GeneralTopologyContextImpl implements GeneralTopologyContext {
private final TopologyAPI.Topology topology;
// The topology as supplied by the cluster overloaded by any
// component specific config
private final Map<String, Object> topologyConfig;
// Map from taskid to Component Id.
private final Map<Integer, String> taskToComponentMap;
// Map from component id to list of its inputs
private final Map<String, List<TopologyAPI.InputStream>> inputs;
// Map from component id to list of its outputs
private final Map<String, List<TopologyAPI.OutputStream>> outputs;
// Map <componentId -> <streamId -> Fields>>
private final Map<String, Map<String, Fields>> componentsOutputFields;
public GeneralTopologyContextImpl(Map<String, Object> clusterConfig,
TopologyAPI.Topology topology,
Map<Integer, String> taskToComponentMap) {
this.topology = topology;
this.topologyConfig = new HashMap<>(clusterConfig);
this.taskToComponentMap = taskToComponentMap;
this.inputs = new HashMap<>();
this.outputs = new HashMap<>();
this.componentsOutputFields = new HashMap<>();
for (int i = 0; i < this.topology.getSpoutsCount(); ++i) {
TopologyAPI.Spout spout = this.topology.getSpouts(i);
// spouts don't have any inputs
this.inputs.put(spout.getComp().getName(), new LinkedList<TopologyAPI.InputStream>());
this.outputs.put(spout.getComp().getName(), spout.getOutputsList());
this.componentsOutputFields.putAll(getOutputToComponentsFields(spout.getOutputsList()));
}
for (int i = 0; i < this.topology.getBoltsCount(); ++i) {
TopologyAPI.Bolt bolt = this.topology.getBolts(i);
this.inputs.put(bolt.getComp().getName(), bolt.getInputsList());
this.outputs.put(bolt.getComp().getName(), bolt.getOutputsList());
this.componentsOutputFields.putAll(getOutputToComponentsFields(bolt.getOutputsList()));
}
}
public static Map<String, Map<String, Fields>> getOutputToComponentsFields(
List<TopologyAPI.OutputStream> outputs) {
Map<String, Map<String, Fields>> outputFields = new HashMap<>();
for (TopologyAPI.OutputStream outputStream : outputs) {
String componentName = outputStream.getStream().getComponentName();
String streamId = outputStream.getStream().getId();
Map<String, Fields> componentFields = outputFields.get(componentName);
if (componentFields == null) {
componentFields = new HashMap<>();
}
// Get the fields of a particular OutputStream
List<String> retval = new ArrayList<>();
for (TopologyAPI.StreamSchema.KeyType kt : outputStream.getSchema().getKeysList()) {
retval.add(kt.getKey());
}
// Put it into the map
componentFields.put(streamId, new Fields(retval));
outputFields.put(componentName, componentFields);
}
return outputFields;
}
// accessors
public Map<String, Object> getTopologyConfig() {
return topologyConfig;
}
/**
* Gets the unique id assigned to this topology. The id is the topology name with a
* unique nonce appended to it.
*
* @return the topology id
*/
public String getTopologyId() {
return topology.getId();
}
@Override
@SuppressWarnings("deprecation")
public TopologyAPI.Topology getRawTopology() {
return topology;
}
/**
* Gets the component id for the specified task id. The component id maps
* to a component id specified for a Spout or Bolt in the topology definition.
*
* @param taskId the task id
* @return the component id for the input task id
*/
public String getComponentId(int taskId) {
if (taskToComponentMap.containsKey(taskId)) {
return taskToComponentMap.get(taskId);
} else {
return null;
}
}
/**
* Gets the set of streams declared for the specified component.
*/
public Set<String> getComponentStreams(String componentId) {
if (outputs.containsKey(componentId)) {
Set<String> streams = new HashSet<>();
List<TopologyAPI.OutputStream> olist = outputs.get(componentId);
for (TopologyAPI.OutputStream ostream : olist) {
streams.add(ostream.getStream().getId());
}
return streams;
} else {
return null;
}
}
/**
* Gets the task ids allocated for the given component id. The task ids are
* always returned in ascending order.
*/
public List<Integer> getComponentTasks(String componentId) {
List<Integer> retVal = new LinkedList<>();
for (Map.Entry<Integer, String> entry : taskToComponentMap.entrySet()) {
if (entry.getValue().equals(componentId)) {
retVal.add(entry.getKey());
}
}
return retVal;
}
/**
* Gets the declared output fields for the specified component/stream.
*/
public Fields getComponentOutputFields(String componentId, String streamId) {
Map<String, Fields> componentFields = componentsOutputFields.get(componentId);
if (componentFields != null) {
return componentFields.get(streamId);
}
return null;
}
/**
* Gets the declared output fields for the specified global stream id.
*/
/*
TODO:- Do we really need this? The above function shd cover it
public Fields getComponentOutputFields(GlobalStreamId id);
*/
/**
* Gets the declared inputs to the specified component.
*
* @return A map from subscribed component/stream to the grouping subscribed with.
*/
public Map<TopologyAPI.StreamId, TopologyAPI.Grouping> getSources(String componentId) {
if (inputs.containsKey(componentId)) {
Map<TopologyAPI.StreamId, TopologyAPI.Grouping> retVal =
new HashMap<>();
for (TopologyAPI.InputStream istream : inputs.get(componentId)) {
retVal.put(istream.getStream(), istream.getGtype());
}
return retVal;
} else {
return null;
}
}
/**
* Gets information about who is consuming the outputs of the specified component,
* and how.
*
* @return Map from stream id to component id to the Grouping used.
*/
public Map<String, Map<String, TopologyAPI.Grouping>> getTargets(String componentId) {
Map<String, Map<String, TopologyAPI.Grouping>> retVal =
new HashMap<>();
if (!outputs.containsKey(componentId)) {
return retVal;
}
for (TopologyAPI.OutputStream ostream : outputs.get(componentId)) {
Map<String, TopologyAPI.Grouping> targetMap =
new HashMap<>();
for (Map.Entry<String, List<TopologyAPI.InputStream>> e : inputs.entrySet()) {
String targetComponentId = e.getKey();
for (TopologyAPI.InputStream is : e.getValue()) {
if (areStreamsEqual(ostream.getStream(), is.getStream())) {
targetMap.put(targetComponentId, is.getGtype());
}
}
}
retVal.put(ostream.getStream().getId(), targetMap);
}
return retVal;
}
/**
* Gets a map from task id to component id.
*/
public Map<Integer, String> getTaskToComponent() {
return taskToComponentMap;
}
/**
* Gets a list of all component ids in this topology
*/
public Set<String> getComponentIds() {
return inputs.keySet();
}
/*
TODO:- This should not be exposed. Take it out
public ComponentCommon getComponentCommon(String componentId) {
return ThriftTopologyUtils.getComponentCommon(getRawTopology(), componentId);
}
*/
public int maxTopologyMessageTimeout() {
// TODO:- get the per component overrides implemented
return TypeUtils.getInteger(topologyConfig.get(Config.TOPOLOGY_MESSAGE_TIMEOUT_SECS));
}
private boolean areStreamsEqual(TopologyAPI.StreamId a, TopologyAPI.StreamId b) {
return a.getId().equals(b.getId()) && a.getComponentName().equals(b.getComponentName());
}
}
| |
/*
* Copyright 2012-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.kms.model;
import java.io.Serializable;
import javax.annotation.Generated;
/**
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/kms-2014-11-01/GenerateDataKeyWithoutPlaintext"
* target="_top">AWS API Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class GenerateDataKeyWithoutPlaintextResult extends com.amazonaws.AmazonWebServiceResult<com.amazonaws.ResponseMetadata> implements Serializable,
Cloneable {
/**
* <p>
* The encrypted data encryption key.
* </p>
*/
private java.nio.ByteBuffer ciphertextBlob;
/**
* <p>
* The identifier of the CMK under which the data encryption key was generated and encrypted.
* </p>
*/
private String keyId;
/**
* <p>
* The encrypted data encryption key.
* </p>
* <p>
* The AWS SDK for Java performs a Base64 encoding on this field before sending this request to the AWS service.
* Users of the SDK should not perform Base64 encoding on this field.
* </p>
* <p>
* Warning: ByteBuffers returned by the SDK are mutable. Changes to the content or position of the byte buffer will
* be seen by all objects that have a reference to this object. It is recommended to call ByteBuffer.duplicate() or
* ByteBuffer.asReadOnlyBuffer() before using or reading from the buffer. This behavior will be changed in a future
* major version of the SDK.
* </p>
*
* @param ciphertextBlob
* The encrypted data encryption key.
*/
public void setCiphertextBlob(java.nio.ByteBuffer ciphertextBlob) {
this.ciphertextBlob = ciphertextBlob;
}
/**
* <p>
* The encrypted data encryption key.
* </p>
* <p>
* {@code ByteBuffer}s are stateful. Calling their {@code get} methods changes their {@code position}. We recommend
* using {@link java.nio.ByteBuffer#asReadOnlyBuffer()} to create a read-only view of the buffer with an independent
* {@code position}, and calling {@code get} methods on this rather than directly on the returned {@code ByteBuffer}.
* Doing so will ensure that anyone else using the {@code ByteBuffer} will not be affected by changes to the
* {@code position}.
* </p>
*
* @return The encrypted data encryption key.
*/
public java.nio.ByteBuffer getCiphertextBlob() {
return this.ciphertextBlob;
}
/**
* <p>
* The encrypted data encryption key.
* </p>
* <p>
* The AWS SDK for Java performs a Base64 encoding on this field before sending this request to the AWS service.
* Users of the SDK should not perform Base64 encoding on this field.
* </p>
* <p>
* Warning: ByteBuffers returned by the SDK are mutable. Changes to the content or position of the byte buffer will
* be seen by all objects that have a reference to this object. It is recommended to call ByteBuffer.duplicate() or
* ByteBuffer.asReadOnlyBuffer() before using or reading from the buffer. This behavior will be changed in a future
* major version of the SDK.
* </p>
*
* @param ciphertextBlob
* The encrypted data encryption key.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public GenerateDataKeyWithoutPlaintextResult withCiphertextBlob(java.nio.ByteBuffer ciphertextBlob) {
setCiphertextBlob(ciphertextBlob);
return this;
}
/**
* <p>
* The identifier of the CMK under which the data encryption key was generated and encrypted.
* </p>
*
* @param keyId
* The identifier of the CMK under which the data encryption key was generated and encrypted.
*/
public void setKeyId(String keyId) {
this.keyId = keyId;
}
/**
* <p>
* The identifier of the CMK under which the data encryption key was generated and encrypted.
* </p>
*
* @return The identifier of the CMK under which the data encryption key was generated and encrypted.
*/
public String getKeyId() {
return this.keyId;
}
/**
* <p>
* The identifier of the CMK under which the data encryption key was generated and encrypted.
* </p>
*
* @param keyId
* The identifier of the CMK under which the data encryption key was generated and encrypted.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public GenerateDataKeyWithoutPlaintextResult withKeyId(String keyId) {
setKeyId(keyId);
return this;
}
/**
* Returns a string representation of this object; useful for testing and debugging.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getCiphertextBlob() != null)
sb.append("CiphertextBlob: ").append(getCiphertextBlob()).append(",");
if (getKeyId() != null)
sb.append("KeyId: ").append(getKeyId());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof GenerateDataKeyWithoutPlaintextResult == false)
return false;
GenerateDataKeyWithoutPlaintextResult other = (GenerateDataKeyWithoutPlaintextResult) obj;
if (other.getCiphertextBlob() == null ^ this.getCiphertextBlob() == null)
return false;
if (other.getCiphertextBlob() != null && other.getCiphertextBlob().equals(this.getCiphertextBlob()) == false)
return false;
if (other.getKeyId() == null ^ this.getKeyId() == null)
return false;
if (other.getKeyId() != null && other.getKeyId().equals(this.getKeyId()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getCiphertextBlob() == null) ? 0 : getCiphertextBlob().hashCode());
hashCode = prime * hashCode + ((getKeyId() == null) ? 0 : getKeyId().hashCode());
return hashCode;
}
@Override
public GenerateDataKeyWithoutPlaintextResult clone() {
try {
return (GenerateDataKeyWithoutPlaintextResult) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e);
}
}
}
| |
/*
Copyright 2022 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package io.kubernetes.client.openapi.models;
import java.util.Iterator;
import java.util.List;
/** Generated */
public class V1beta1EndpointSliceFluentImpl<
A extends io.kubernetes.client.openapi.models.V1beta1EndpointSliceFluent<A>>
extends io.kubernetes.client.fluent.BaseFluent<A>
implements io.kubernetes.client.openapi.models.V1beta1EndpointSliceFluent<A> {
public V1beta1EndpointSliceFluentImpl() {}
public V1beta1EndpointSliceFluentImpl(
io.kubernetes.client.openapi.models.V1beta1EndpointSlice instance) {
this.withAddressType(instance.getAddressType());
this.withApiVersion(instance.getApiVersion());
this.withEndpoints(instance.getEndpoints());
this.withKind(instance.getKind());
this.withMetadata(instance.getMetadata());
this.withPorts(instance.getPorts());
}
private java.lang.String addressType;
private java.lang.String apiVersion;
private java.util.ArrayList<io.kubernetes.client.openapi.models.V1beta1EndpointBuilder> endpoints;
private java.lang.String kind;
private io.kubernetes.client.openapi.models.V1ObjectMetaBuilder metadata;
private java.util.ArrayList<io.kubernetes.client.openapi.models.V1beta1EndpointPortBuilder> ports;
public java.lang.String getAddressType() {
return this.addressType;
}
public A withAddressType(java.lang.String addressType) {
this.addressType = addressType;
return (A) this;
}
public java.lang.Boolean hasAddressType() {
return this.addressType != null;
}
/** Method is deprecated. use withAddressType instead. */
@java.lang.Deprecated
public A withNewAddressType(java.lang.String original) {
return (A) withAddressType(new String(original));
}
public java.lang.String getApiVersion() {
return this.apiVersion;
}
public A withApiVersion(java.lang.String apiVersion) {
this.apiVersion = apiVersion;
return (A) this;
}
public java.lang.Boolean hasApiVersion() {
return this.apiVersion != null;
}
/** Method is deprecated. use withApiVersion instead. */
@java.lang.Deprecated
public A withNewApiVersion(java.lang.String original) {
return (A) withApiVersion(new String(original));
}
public A addToEndpoints(
java.lang.Integer index, io.kubernetes.client.openapi.models.V1beta1Endpoint item) {
if (this.endpoints == null) {
this.endpoints =
new java.util.ArrayList<io.kubernetes.client.openapi.models.V1beta1EndpointBuilder>();
}
io.kubernetes.client.openapi.models.V1beta1EndpointBuilder builder =
new io.kubernetes.client.openapi.models.V1beta1EndpointBuilder(item);
_visitables
.get("endpoints")
.add(index >= 0 ? index : _visitables.get("endpoints").size(), builder);
this.endpoints.add(index >= 0 ? index : endpoints.size(), builder);
return (A) this;
}
public A setToEndpoints(
java.lang.Integer index, io.kubernetes.client.openapi.models.V1beta1Endpoint item) {
if (this.endpoints == null) {
this.endpoints =
new java.util.ArrayList<io.kubernetes.client.openapi.models.V1beta1EndpointBuilder>();
}
io.kubernetes.client.openapi.models.V1beta1EndpointBuilder builder =
new io.kubernetes.client.openapi.models.V1beta1EndpointBuilder(item);
if (index < 0 || index >= _visitables.get("endpoints").size()) {
_visitables.get("endpoints").add(builder);
} else {
_visitables.get("endpoints").set(index, builder);
}
if (index < 0 || index >= endpoints.size()) {
endpoints.add(builder);
} else {
endpoints.set(index, builder);
}
return (A) this;
}
public A addToEndpoints(io.kubernetes.client.openapi.models.V1beta1Endpoint... items) {
if (this.endpoints == null) {
this.endpoints =
new java.util.ArrayList<io.kubernetes.client.openapi.models.V1beta1EndpointBuilder>();
}
for (io.kubernetes.client.openapi.models.V1beta1Endpoint item : items) {
io.kubernetes.client.openapi.models.V1beta1EndpointBuilder builder =
new io.kubernetes.client.openapi.models.V1beta1EndpointBuilder(item);
_visitables.get("endpoints").add(builder);
this.endpoints.add(builder);
}
return (A) this;
}
public A addAllToEndpoints(
java.util.Collection<io.kubernetes.client.openapi.models.V1beta1Endpoint> items) {
if (this.endpoints == null) {
this.endpoints =
new java.util.ArrayList<io.kubernetes.client.openapi.models.V1beta1EndpointBuilder>();
}
for (io.kubernetes.client.openapi.models.V1beta1Endpoint item : items) {
io.kubernetes.client.openapi.models.V1beta1EndpointBuilder builder =
new io.kubernetes.client.openapi.models.V1beta1EndpointBuilder(item);
_visitables.get("endpoints").add(builder);
this.endpoints.add(builder);
}
return (A) this;
}
public A removeFromEndpoints(io.kubernetes.client.openapi.models.V1beta1Endpoint... items) {
for (io.kubernetes.client.openapi.models.V1beta1Endpoint item : items) {
io.kubernetes.client.openapi.models.V1beta1EndpointBuilder builder =
new io.kubernetes.client.openapi.models.V1beta1EndpointBuilder(item);
_visitables.get("endpoints").remove(builder);
if (this.endpoints != null) {
this.endpoints.remove(builder);
}
}
return (A) this;
}
public A removeAllFromEndpoints(
java.util.Collection<io.kubernetes.client.openapi.models.V1beta1Endpoint> items) {
for (io.kubernetes.client.openapi.models.V1beta1Endpoint item : items) {
io.kubernetes.client.openapi.models.V1beta1EndpointBuilder builder =
new io.kubernetes.client.openapi.models.V1beta1EndpointBuilder(item);
_visitables.get("endpoints").remove(builder);
if (this.endpoints != null) {
this.endpoints.remove(builder);
}
}
return (A) this;
}
public A removeMatchingFromEndpoints(
java.util.function.Predicate<io.kubernetes.client.openapi.models.V1beta1EndpointBuilder>
predicate) {
if (endpoints == null) return (A) this;
final Iterator<io.kubernetes.client.openapi.models.V1beta1EndpointBuilder> each =
endpoints.iterator();
final List visitables = _visitables.get("endpoints");
while (each.hasNext()) {
io.kubernetes.client.openapi.models.V1beta1EndpointBuilder builder = each.next();
if (predicate.test(builder)) {
visitables.remove(builder);
each.remove();
}
}
return (A) this;
}
/**
* This method has been deprecated, please use method buildEndpoints instead.
*
* @return The buildable object.
*/
@java.lang.Deprecated
public java.util.List<io.kubernetes.client.openapi.models.V1beta1Endpoint> getEndpoints() {
return endpoints != null ? build(endpoints) : null;
}
public java.util.List<io.kubernetes.client.openapi.models.V1beta1Endpoint> buildEndpoints() {
return endpoints != null ? build(endpoints) : null;
}
public io.kubernetes.client.openapi.models.V1beta1Endpoint buildEndpoint(
java.lang.Integer index) {
return this.endpoints.get(index).build();
}
public io.kubernetes.client.openapi.models.V1beta1Endpoint buildFirstEndpoint() {
return this.endpoints.get(0).build();
}
public io.kubernetes.client.openapi.models.V1beta1Endpoint buildLastEndpoint() {
return this.endpoints.get(endpoints.size() - 1).build();
}
public io.kubernetes.client.openapi.models.V1beta1Endpoint buildMatchingEndpoint(
java.util.function.Predicate<io.kubernetes.client.openapi.models.V1beta1EndpointBuilder>
predicate) {
for (io.kubernetes.client.openapi.models.V1beta1EndpointBuilder item : endpoints) {
if (predicate.test(item)) {
return item.build();
}
}
return null;
}
public java.lang.Boolean hasMatchingEndpoint(
java.util.function.Predicate<io.kubernetes.client.openapi.models.V1beta1EndpointBuilder>
predicate) {
for (io.kubernetes.client.openapi.models.V1beta1EndpointBuilder item : endpoints) {
if (predicate.test(item)) {
return true;
}
}
return false;
}
public A withEndpoints(
java.util.List<io.kubernetes.client.openapi.models.V1beta1Endpoint> endpoints) {
if (this.endpoints != null) {
_visitables.get("endpoints").removeAll(this.endpoints);
}
if (endpoints != null) {
this.endpoints = new java.util.ArrayList();
for (io.kubernetes.client.openapi.models.V1beta1Endpoint item : endpoints) {
this.addToEndpoints(item);
}
} else {
this.endpoints = null;
}
return (A) this;
}
public A withEndpoints(io.kubernetes.client.openapi.models.V1beta1Endpoint... endpoints) {
if (this.endpoints != null) {
this.endpoints.clear();
}
if (endpoints != null) {
for (io.kubernetes.client.openapi.models.V1beta1Endpoint item : endpoints) {
this.addToEndpoints(item);
}
}
return (A) this;
}
public java.lang.Boolean hasEndpoints() {
return endpoints != null && !endpoints.isEmpty();
}
public io.kubernetes.client.openapi.models.V1beta1EndpointSliceFluent.EndpointsNested<A>
addNewEndpoint() {
return new io.kubernetes.client.openapi.models.V1beta1EndpointSliceFluentImpl
.EndpointsNestedImpl();
}
public io.kubernetes.client.openapi.models.V1beta1EndpointSliceFluent.EndpointsNested<A>
addNewEndpointLike(io.kubernetes.client.openapi.models.V1beta1Endpoint item) {
return new io.kubernetes.client.openapi.models.V1beta1EndpointSliceFluentImpl
.EndpointsNestedImpl(-1, item);
}
public io.kubernetes.client.openapi.models.V1beta1EndpointSliceFluent.EndpointsNested<A>
setNewEndpointLike(
java.lang.Integer index, io.kubernetes.client.openapi.models.V1beta1Endpoint item) {
return new io.kubernetes.client.openapi.models.V1beta1EndpointSliceFluentImpl
.EndpointsNestedImpl(index, item);
}
public io.kubernetes.client.openapi.models.V1beta1EndpointSliceFluent.EndpointsNested<A>
editEndpoint(java.lang.Integer index) {
if (endpoints.size() <= index)
throw new RuntimeException("Can't edit endpoints. Index exceeds size.");
return setNewEndpointLike(index, buildEndpoint(index));
}
public io.kubernetes.client.openapi.models.V1beta1EndpointSliceFluent.EndpointsNested<A>
editFirstEndpoint() {
if (endpoints.size() == 0)
throw new RuntimeException("Can't edit first endpoints. The list is empty.");
return setNewEndpointLike(0, buildEndpoint(0));
}
public io.kubernetes.client.openapi.models.V1beta1EndpointSliceFluent.EndpointsNested<A>
editLastEndpoint() {
int index = endpoints.size() - 1;
if (index < 0) throw new RuntimeException("Can't edit last endpoints. The list is empty.");
return setNewEndpointLike(index, buildEndpoint(index));
}
public io.kubernetes.client.openapi.models.V1beta1EndpointSliceFluent.EndpointsNested<A>
editMatchingEndpoint(
java.util.function.Predicate<io.kubernetes.client.openapi.models.V1beta1EndpointBuilder>
predicate) {
int index = -1;
for (int i = 0; i < endpoints.size(); i++) {
if (predicate.test(endpoints.get(i))) {
index = i;
break;
}
}
if (index < 0) throw new RuntimeException("Can't edit matching endpoints. No match found.");
return setNewEndpointLike(index, buildEndpoint(index));
}
public java.lang.String getKind() {
return this.kind;
}
public A withKind(java.lang.String kind) {
this.kind = kind;
return (A) this;
}
public java.lang.Boolean hasKind() {
return this.kind != null;
}
/** Method is deprecated. use withKind instead. */
@java.lang.Deprecated
public A withNewKind(java.lang.String original) {
return (A) withKind(new String(original));
}
/**
* This method has been deprecated, please use method buildMetadata instead.
*
* @return The buildable object.
*/
@java.lang.Deprecated
public io.kubernetes.client.openapi.models.V1ObjectMeta getMetadata() {
return this.metadata != null ? this.metadata.build() : null;
}
public io.kubernetes.client.openapi.models.V1ObjectMeta buildMetadata() {
return this.metadata != null ? this.metadata.build() : null;
}
public A withMetadata(io.kubernetes.client.openapi.models.V1ObjectMeta metadata) {
_visitables.get("metadata").remove(this.metadata);
if (metadata != null) {
this.metadata = new io.kubernetes.client.openapi.models.V1ObjectMetaBuilder(metadata);
_visitables.get("metadata").add(this.metadata);
}
return (A) this;
}
public java.lang.Boolean hasMetadata() {
return this.metadata != null;
}
public io.kubernetes.client.openapi.models.V1beta1EndpointSliceFluent.MetadataNested<A>
withNewMetadata() {
return new io.kubernetes.client.openapi.models.V1beta1EndpointSliceFluentImpl
.MetadataNestedImpl();
}
public io.kubernetes.client.openapi.models.V1beta1EndpointSliceFluent.MetadataNested<A>
withNewMetadataLike(io.kubernetes.client.openapi.models.V1ObjectMeta item) {
return new io.kubernetes.client.openapi.models.V1beta1EndpointSliceFluentImpl
.MetadataNestedImpl(item);
}
public io.kubernetes.client.openapi.models.V1beta1EndpointSliceFluent.MetadataNested<A>
editMetadata() {
return withNewMetadataLike(getMetadata());
}
public io.kubernetes.client.openapi.models.V1beta1EndpointSliceFluent.MetadataNested<A>
editOrNewMetadata() {
return withNewMetadataLike(
getMetadata() != null
? getMetadata()
: new io.kubernetes.client.openapi.models.V1ObjectMetaBuilder().build());
}
public io.kubernetes.client.openapi.models.V1beta1EndpointSliceFluent.MetadataNested<A>
editOrNewMetadataLike(io.kubernetes.client.openapi.models.V1ObjectMeta item) {
return withNewMetadataLike(getMetadata() != null ? getMetadata() : item);
}
public A addToPorts(
java.lang.Integer index, io.kubernetes.client.openapi.models.V1beta1EndpointPort item) {
if (this.ports == null) {
this.ports =
new java.util.ArrayList<io.kubernetes.client.openapi.models.V1beta1EndpointPortBuilder>();
}
io.kubernetes.client.openapi.models.V1beta1EndpointPortBuilder builder =
new io.kubernetes.client.openapi.models.V1beta1EndpointPortBuilder(item);
_visitables.get("ports").add(index >= 0 ? index : _visitables.get("ports").size(), builder);
this.ports.add(index >= 0 ? index : ports.size(), builder);
return (A) this;
}
public A setToPorts(
java.lang.Integer index, io.kubernetes.client.openapi.models.V1beta1EndpointPort item) {
if (this.ports == null) {
this.ports =
new java.util.ArrayList<io.kubernetes.client.openapi.models.V1beta1EndpointPortBuilder>();
}
io.kubernetes.client.openapi.models.V1beta1EndpointPortBuilder builder =
new io.kubernetes.client.openapi.models.V1beta1EndpointPortBuilder(item);
if (index < 0 || index >= _visitables.get("ports").size()) {
_visitables.get("ports").add(builder);
} else {
_visitables.get("ports").set(index, builder);
}
if (index < 0 || index >= ports.size()) {
ports.add(builder);
} else {
ports.set(index, builder);
}
return (A) this;
}
public A addToPorts(io.kubernetes.client.openapi.models.V1beta1EndpointPort... items) {
if (this.ports == null) {
this.ports =
new java.util.ArrayList<io.kubernetes.client.openapi.models.V1beta1EndpointPortBuilder>();
}
for (io.kubernetes.client.openapi.models.V1beta1EndpointPort item : items) {
io.kubernetes.client.openapi.models.V1beta1EndpointPortBuilder builder =
new io.kubernetes.client.openapi.models.V1beta1EndpointPortBuilder(item);
_visitables.get("ports").add(builder);
this.ports.add(builder);
}
return (A) this;
}
public A addAllToPorts(
java.util.Collection<io.kubernetes.client.openapi.models.V1beta1EndpointPort> items) {
if (this.ports == null) {
this.ports =
new java.util.ArrayList<io.kubernetes.client.openapi.models.V1beta1EndpointPortBuilder>();
}
for (io.kubernetes.client.openapi.models.V1beta1EndpointPort item : items) {
io.kubernetes.client.openapi.models.V1beta1EndpointPortBuilder builder =
new io.kubernetes.client.openapi.models.V1beta1EndpointPortBuilder(item);
_visitables.get("ports").add(builder);
this.ports.add(builder);
}
return (A) this;
}
public A removeFromPorts(io.kubernetes.client.openapi.models.V1beta1EndpointPort... items) {
for (io.kubernetes.client.openapi.models.V1beta1EndpointPort item : items) {
io.kubernetes.client.openapi.models.V1beta1EndpointPortBuilder builder =
new io.kubernetes.client.openapi.models.V1beta1EndpointPortBuilder(item);
_visitables.get("ports").remove(builder);
if (this.ports != null) {
this.ports.remove(builder);
}
}
return (A) this;
}
public A removeAllFromPorts(
java.util.Collection<io.kubernetes.client.openapi.models.V1beta1EndpointPort> items) {
for (io.kubernetes.client.openapi.models.V1beta1EndpointPort item : items) {
io.kubernetes.client.openapi.models.V1beta1EndpointPortBuilder builder =
new io.kubernetes.client.openapi.models.V1beta1EndpointPortBuilder(item);
_visitables.get("ports").remove(builder);
if (this.ports != null) {
this.ports.remove(builder);
}
}
return (A) this;
}
public A removeMatchingFromPorts(
java.util.function.Predicate<io.kubernetes.client.openapi.models.V1beta1EndpointPortBuilder>
predicate) {
if (ports == null) return (A) this;
final Iterator<io.kubernetes.client.openapi.models.V1beta1EndpointPortBuilder> each =
ports.iterator();
final List visitables = _visitables.get("ports");
while (each.hasNext()) {
io.kubernetes.client.openapi.models.V1beta1EndpointPortBuilder builder = each.next();
if (predicate.test(builder)) {
visitables.remove(builder);
each.remove();
}
}
return (A) this;
}
/**
* This method has been deprecated, please use method buildPorts instead.
*
* @return The buildable object.
*/
@java.lang.Deprecated
public java.util.List<io.kubernetes.client.openapi.models.V1beta1EndpointPort> getPorts() {
return ports != null ? build(ports) : null;
}
public java.util.List<io.kubernetes.client.openapi.models.V1beta1EndpointPort> buildPorts() {
return ports != null ? build(ports) : null;
}
public io.kubernetes.client.openapi.models.V1beta1EndpointPort buildPort(
java.lang.Integer index) {
return this.ports.get(index).build();
}
public io.kubernetes.client.openapi.models.V1beta1EndpointPort buildFirstPort() {
return this.ports.get(0).build();
}
public io.kubernetes.client.openapi.models.V1beta1EndpointPort buildLastPort() {
return this.ports.get(ports.size() - 1).build();
}
public io.kubernetes.client.openapi.models.V1beta1EndpointPort buildMatchingPort(
java.util.function.Predicate<io.kubernetes.client.openapi.models.V1beta1EndpointPortBuilder>
predicate) {
for (io.kubernetes.client.openapi.models.V1beta1EndpointPortBuilder item : ports) {
if (predicate.test(item)) {
return item.build();
}
}
return null;
}
public java.lang.Boolean hasMatchingPort(
java.util.function.Predicate<io.kubernetes.client.openapi.models.V1beta1EndpointPortBuilder>
predicate) {
for (io.kubernetes.client.openapi.models.V1beta1EndpointPortBuilder item : ports) {
if (predicate.test(item)) {
return true;
}
}
return false;
}
public A withPorts(
java.util.List<io.kubernetes.client.openapi.models.V1beta1EndpointPort> ports) {
if (this.ports != null) {
_visitables.get("ports").removeAll(this.ports);
}
if (ports != null) {
this.ports = new java.util.ArrayList();
for (io.kubernetes.client.openapi.models.V1beta1EndpointPort item : ports) {
this.addToPorts(item);
}
} else {
this.ports = null;
}
return (A) this;
}
public A withPorts(io.kubernetes.client.openapi.models.V1beta1EndpointPort... ports) {
if (this.ports != null) {
this.ports.clear();
}
if (ports != null) {
for (io.kubernetes.client.openapi.models.V1beta1EndpointPort item : ports) {
this.addToPorts(item);
}
}
return (A) this;
}
public java.lang.Boolean hasPorts() {
return ports != null && !ports.isEmpty();
}
public io.kubernetes.client.openapi.models.V1beta1EndpointSliceFluent.PortsNested<A>
addNewPort() {
return new io.kubernetes.client.openapi.models.V1beta1EndpointSliceFluentImpl.PortsNestedImpl();
}
public io.kubernetes.client.openapi.models.V1beta1EndpointSliceFluent.PortsNested<A>
addNewPortLike(io.kubernetes.client.openapi.models.V1beta1EndpointPort item) {
return new io.kubernetes.client.openapi.models.V1beta1EndpointSliceFluentImpl.PortsNestedImpl(
-1, item);
}
public io.kubernetes.client.openapi.models.V1beta1EndpointSliceFluent.PortsNested<A>
setNewPortLike(
java.lang.Integer index, io.kubernetes.client.openapi.models.V1beta1EndpointPort item) {
return new io.kubernetes.client.openapi.models.V1beta1EndpointSliceFluentImpl.PortsNestedImpl(
index, item);
}
public io.kubernetes.client.openapi.models.V1beta1EndpointSliceFluent.PortsNested<A> editPort(
java.lang.Integer index) {
if (ports.size() <= index) throw new RuntimeException("Can't edit ports. Index exceeds size.");
return setNewPortLike(index, buildPort(index));
}
public io.kubernetes.client.openapi.models.V1beta1EndpointSliceFluent.PortsNested<A>
editFirstPort() {
if (ports.size() == 0) throw new RuntimeException("Can't edit first ports. The list is empty.");
return setNewPortLike(0, buildPort(0));
}
public io.kubernetes.client.openapi.models.V1beta1EndpointSliceFluent.PortsNested<A>
editLastPort() {
int index = ports.size() - 1;
if (index < 0) throw new RuntimeException("Can't edit last ports. The list is empty.");
return setNewPortLike(index, buildPort(index));
}
public io.kubernetes.client.openapi.models.V1beta1EndpointSliceFluent.PortsNested<A>
editMatchingPort(
java.util.function.Predicate<
io.kubernetes.client.openapi.models.V1beta1EndpointPortBuilder>
predicate) {
int index = -1;
for (int i = 0; i < ports.size(); i++) {
if (predicate.test(ports.get(i))) {
index = i;
break;
}
}
if (index < 0) throw new RuntimeException("Can't edit matching ports. No match found.");
return setNewPortLike(index, buildPort(index));
}
public boolean equals(java.lang.Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
V1beta1EndpointSliceFluentImpl that = (V1beta1EndpointSliceFluentImpl) o;
if (addressType != null ? !addressType.equals(that.addressType) : that.addressType != null)
return false;
if (apiVersion != null ? !apiVersion.equals(that.apiVersion) : that.apiVersion != null)
return false;
if (endpoints != null ? !endpoints.equals(that.endpoints) : that.endpoints != null)
return false;
if (kind != null ? !kind.equals(that.kind) : that.kind != null) return false;
if (metadata != null ? !metadata.equals(that.metadata) : that.metadata != null) return false;
if (ports != null ? !ports.equals(that.ports) : that.ports != null) return false;
return true;
}
public int hashCode() {
return java.util.Objects.hash(
addressType, apiVersion, endpoints, kind, metadata, ports, super.hashCode());
}
public class EndpointsNestedImpl<N>
extends io.kubernetes.client.openapi.models.V1beta1EndpointFluentImpl<
io.kubernetes.client.openapi.models.V1beta1EndpointSliceFluent.EndpointsNested<N>>
implements io.kubernetes.client.openapi.models.V1beta1EndpointSliceFluent.EndpointsNested<N>,
io.kubernetes.client.fluent.Nested<N> {
EndpointsNestedImpl(
java.lang.Integer index, io.kubernetes.client.openapi.models.V1beta1Endpoint item) {
this.index = index;
this.builder = new io.kubernetes.client.openapi.models.V1beta1EndpointBuilder(this, item);
}
EndpointsNestedImpl() {
this.index = -1;
this.builder = new io.kubernetes.client.openapi.models.V1beta1EndpointBuilder(this);
}
io.kubernetes.client.openapi.models.V1beta1EndpointBuilder builder;
java.lang.Integer index;
public N and() {
return (N) V1beta1EndpointSliceFluentImpl.this.setToEndpoints(index, builder.build());
}
public N endEndpoint() {
return and();
}
}
public class MetadataNestedImpl<N>
extends io.kubernetes.client.openapi.models.V1ObjectMetaFluentImpl<
io.kubernetes.client.openapi.models.V1beta1EndpointSliceFluent.MetadataNested<N>>
implements io.kubernetes.client.openapi.models.V1beta1EndpointSliceFluent.MetadataNested<N>,
io.kubernetes.client.fluent.Nested<N> {
MetadataNestedImpl(io.kubernetes.client.openapi.models.V1ObjectMeta item) {
this.builder = new io.kubernetes.client.openapi.models.V1ObjectMetaBuilder(this, item);
}
MetadataNestedImpl() {
this.builder = new io.kubernetes.client.openapi.models.V1ObjectMetaBuilder(this);
}
io.kubernetes.client.openapi.models.V1ObjectMetaBuilder builder;
public N and() {
return (N) V1beta1EndpointSliceFluentImpl.this.withMetadata(builder.build());
}
public N endMetadata() {
return and();
}
}
public class PortsNestedImpl<N>
extends io.kubernetes.client.openapi.models.V1beta1EndpointPortFluentImpl<
io.kubernetes.client.openapi.models.V1beta1EndpointSliceFluent.PortsNested<N>>
implements io.kubernetes.client.openapi.models.V1beta1EndpointSliceFluent.PortsNested<N>,
io.kubernetes.client.fluent.Nested<N> {
PortsNestedImpl(
java.lang.Integer index, io.kubernetes.client.openapi.models.V1beta1EndpointPort item) {
this.index = index;
this.builder = new io.kubernetes.client.openapi.models.V1beta1EndpointPortBuilder(this, item);
}
PortsNestedImpl() {
this.index = -1;
this.builder = new io.kubernetes.client.openapi.models.V1beta1EndpointPortBuilder(this);
}
io.kubernetes.client.openapi.models.V1beta1EndpointPortBuilder builder;
java.lang.Integer index;
public N and() {
return (N) V1beta1EndpointSliceFluentImpl.this.setToPorts(index, builder.build());
}
public N endPort() {
return and();
}
}
}
| |
package me.androidbox.busbymovies.movielist;
import android.animation.Animator;
import android.animation.AnimatorInflater;
import android.os.Bundle;
import android.support.annotation.IdRes;
import android.support.annotation.NonNull;
import android.support.annotation.Nullable;
import android.support.annotation.VisibleForTesting;
import android.support.design.widget.FloatingActionButton;
import android.support.v4.app.Fragment;
import android.support.v4.app.FragmentManager;
import android.support.v4.widget.ContentLoadingProgressBar;
import android.support.v4.widget.SwipeRefreshLayout;
import android.support.v7.widget.GridLayoutManager;
import android.support.v7.widget.LinearLayoutManager;
import android.support.v7.widget.RecyclerView;
import android.text.TextUtils;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.Toast;
import com.google.common.base.Preconditions;
import javax.inject.Inject;
import butterknife.BindView;
import butterknife.ButterKnife;
import butterknife.OnClick;
import butterknife.Unbinder;
import me.androidbox.busbymovies.R;
import me.androidbox.busbymovies.adapters.MovieAdapter;
import me.androidbox.busbymovies.data.MovieFavouritePresenterContract;
import me.androidbox.busbymovies.di.BusbyMoviesMainApplication;
import me.androidbox.busbymovies.models.Movie;
import me.androidbox.busbymovies.models.Movies;
import me.androidbox.busbymovies.models.Results;
import me.androidbox.busbymovies.moviesearch.MovieSearchDialog;
import me.androidbox.busbymovies.moviesearch.MovieSearchListener;
import timber.log.Timber;
/**
* A simple {@link Fragment} subclass.
*/
public class MovieListViewImp
extends
Fragment
implements
MovieListViewContract,
MovieFavouritePresenterContract.MovieFavouriteListListener,
MovieSearchListener {
public static final String TAG = MovieListViewImp.class.getSimpleName();
@Inject MovieListPresenterContract mMovieListPresenterImp;
@Inject MovieFavouritePresenterContract mMovieFavouritePresenterImp;
@Inject MovieAdapter mMovieAdapter;
@BindView(R.id.rvMovieList) RecyclerView mRvMovieList;
@BindView(R.id.pbMovieList) ContentLoadingProgressBar mPbMovieList;
@BindView(R.id.fabPopular) FloatingActionButton mFabPopular;
@BindView(R.id.fabTopRated) FloatingActionButton mFabTopRated;
@BindView(R.id.fabFavourite) FloatingActionButton mFabFavourite;
@BindView(R.id.swipeContainer) SwipeRefreshLayout swipeRefreshLayout;
@BindView(R.id.fabSearch) FloatingActionButton mFabSearch;
@VisibleForTesting Unbinder mUnbinder;
@VisibleForTesting boolean mIsSortFabOpen;
public MovieListViewImp() {
// Required empty public constructor
}
/* Factory method */
public static MovieListViewImp newInstance() {
return new MovieListViewImp();
}
@Override
public void onCreate(@Nullable Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
((BusbyMoviesMainApplication)getActivity().getApplication())
.getMovieListComponent(MovieListViewImp.this)
.inject(this);
mMovieListPresenterImp.attachView(MovieListViewImp.this);
}
@Override
public void onDestroy() {
mMovieListPresenterImp.detachView();
super.onDestroy();
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) {
// Inflate the layout for this fragment
final View view = inflater.inflate(R.layout.movie_list_view, container, false);
mUnbinder = ButterKnife.bind(MovieListViewImp.this, view);
setupRecyclerView();
setupSwipeToRefresh();
mMovieListPresenterImp.getPopularMovies();
return view;
}
@Override
public void onDestroyView() {
mMovieListPresenterImp.closeSortFab();
mMovieListPresenterImp.detachView();
mUnbinder.unbind();
super.onDestroyView();
}
/* Close the sort Fab */
@Override
public void onCloseSortFab() {
if(mIsSortFabOpen) {
final Animator closePopularFab = AnimatorInflater.loadAnimator(getActivity(), R.animator.close_popular_fab);
closePopularFab.setTarget(mFabPopular);
closePopularFab.start();
final Animator closeTopRatedFab = AnimatorInflater.loadAnimator(getActivity(), R.animator.close_toprated_fab);
closeTopRatedFab.setTarget(mFabTopRated);
closeTopRatedFab.start();
final Animator openFavourite = AnimatorInflater.loadAnimator(getActivity(), R.animator.close_favourite_fab);
openFavourite.setTarget(mFabFavourite);
openFavourite.start();
final Animator openSearch = AnimatorInflater.loadAnimator(getActivity(), R.animator.close_search_fab);
openSearch.setTarget(mFabSearch);
openSearch.start();
mIsSortFabOpen = false;
}
}
@Override
public void onOpenSortFab() {
if(!mIsSortFabOpen) {
final Animator openPopularFab = AnimatorInflater.loadAnimator(getActivity(), R.animator.open_popular_fab);
openPopularFab.setTarget(mFabPopular);
openPopularFab.start();
final Animator openTopRatedTab = AnimatorInflater.loadAnimator(getActivity(), R.animator.open_toprated_fab);
openTopRatedTab.setTarget(mFabTopRated);
openTopRatedTab.start();
final Animator openFavourite = AnimatorInflater.loadAnimator(getActivity(), R.animator.open_favourite_fab);
openFavourite.setTarget(mFabFavourite);
openFavourite.start();
final Animator openSearch = AnimatorInflater.loadAnimator(getActivity(), R.animator.open_search_fab);
openSearch.setTarget(mFabSearch);
openSearch.start();
mIsSortFabOpen = true;
}
}
@SuppressWarnings("unused")
@OnClick(R.id.fabSort)
public void openSort() {
mMovieListPresenterImp.openSortFab();
}
@SuppressWarnings("unused")
@OnClick(R.id.fabPopular)
public void getPopular() {
mMovieListPresenterImp.getPopularMovies();
mMovieListPresenterImp.closeSortFab();
}
@SuppressWarnings("unused")
@OnClick(R.id.fabTopRated)
public void getTopRated() {
mMovieListPresenterImp.getTopRatedMovies();
mMovieListPresenterImp.closeSortFab();
}
@SuppressWarnings("unused")
@OnClick(R.id.fabFavourite)
public void getFavourites() {
mMovieFavouritePresenterImp.getFavouriteMovies();
mMovieListPresenterImp.closeSortFab();
}
@SuppressWarnings("unused")
@OnClick(R.id.fabSearch)
public void searchMovie() {
Timber.d("searchMovie");
final FragmentManager fragmentManager = getActivity().getSupportFragmentManager();
final MovieSearchDialog movieSearchDialog = MovieSearchDialog.newInstance();
movieSearchDialog.setTargetFragment(MovieListViewImp.this, 0);
movieSearchDialog.show(fragmentManager, "MovieSearchDialog");
mMovieListPresenterImp.closeSortFab();
}
private void setupSwipeToRefresh() {
swipeRefreshLayout.setColorSchemeResources(
android.R.color.holo_blue_bright,
android.R.color.holo_orange_light,
android.R.color.holo_green_light,
android.R.color.holo_red_light);
/* TODO Swipe to refresh should fetch movies related to what is on the search i.e. popular, top rated, searched, or favourites */
swipeRefreshLayout.setOnRefreshListener(() -> mMovieListPresenterImp.getPopularMovies());
}
private void setupRecyclerView() {
/* Portrait mode 2 columns as there is less width to display movies
Landscape mode 3 columns as there is more width to display movies
*/
@IdRes
final int columnCount = getResources().getInteger(R.integer.recyclerview_column_count);
final RecyclerView.LayoutManager gridLayoutManager
= new GridLayoutManager(getActivity(), columnCount, LinearLayoutManager.VERTICAL, false);
mRvMovieList.setLayoutManager(gridLayoutManager);
mRvMovieList.setHasFixedSize(true);
mRvMovieList.setAdapter(mMovieAdapter);
}
@Override
public void displayPopularMovies(Results<Movies> popularMovies) {
mMovieAdapter.loadAdapter(popularMovies);
swipeRefreshLayout.setRefreshing(false);
}
@Override
public void displayTopRatedMovies(Results<Movies> topRatedMovies) {
mMovieAdapter.loadAdapter(topRatedMovies);
}
@Override
public void onGetFavouriteMoviesSuccess(Results<Movie> favouriteList) {
if(favouriteList.getResults().size() > 0) {
mMovieAdapter.loadAdapter(favouriteList);
}
else {
Toast.makeText(getActivity(), "There are no favourites to display", Toast.LENGTH_SHORT)
.show();
}
}
@Override
public void onGetFavouriteMoviesFailure(String errorMessage) {
Timber.e("onGetFavouriteMoviesFailure: %s", errorMessage);
Toast.makeText(getActivity(), errorMessage, Toast.LENGTH_LONG).show();
}
@Override
public void failedToDisplayPopularMovies(String errorMessage) {
Toast.makeText(getActivity(), errorMessage, Toast.LENGTH_LONG).show();
swipeRefreshLayout.setRefreshing(false);
Timber.e("Failed to get popular movies %s", errorMessage);
}
@Override
public void failedToDisplayTopRatedMovies(String errorMessage) {
Toast.makeText(getActivity(), errorMessage, Toast.LENGTH_LONG).show();
mPbMovieList.hide();
Timber.e("Failed to get top rated movies %s", errorMessage);
}
@Override
public void onMovieSearch(@NonNull String movieName, final int movieYear) {
final String nameOfMovie = Preconditions.checkNotNull(movieName, "Movie name hasn't been entered");
if(!TextUtils.isEmpty(nameOfMovie)) {
mMovieListPresenterImp.searchMovies(nameOfMovie, movieYear);
}
}
@Override
public void failedToGetSearchMovies(String errorMessage) {
Timber.w("failedToGetSearchMovies: %s", errorMessage);
Toast.makeText(getActivity(), errorMessage, Toast.LENGTH_SHORT).show();
}
@Override
public void successToGetSearchMovies(Results<Movies> movieSearch) {
mMovieAdapter.loadAdapter(movieSearch);
}
@Override
public void onHideProgressBar() {
if(mPbMovieList.isShown()) {
mPbMovieList.hide();
}
}
@Override
public void onShowProgressBar() {
if(!mPbMovieList.isShown()) {
mPbMovieList.show();
}
}
}
| |
/*
* Copyright 2017-present Open Networking Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.onosproject.pipelines.fabric.pipeliner;
import com.google.common.collect.ImmutableSet;
import org.onlab.packet.MacAddress;
import org.onlab.packet.VlanId;
import org.onlab.util.ImmutableByteSequence;
import org.onosproject.net.DeviceId;
import org.onosproject.net.flow.DefaultFlowRule;
import org.onosproject.net.flow.DefaultTrafficSelector;
import org.onosproject.net.flow.DefaultTrafficTreatment;
import org.onosproject.net.flow.FlowRule;
import org.onosproject.net.flow.TrafficSelector;
import org.onosproject.net.flow.TrafficTreatment;
import org.onosproject.net.flow.criteria.Criterion;
import org.onosproject.net.flow.criteria.EthCriterion;
import org.onosproject.net.flow.criteria.IPCriterion;
import org.onosproject.net.flow.criteria.MplsCriterion;
import org.onosproject.net.flow.criteria.VlanIdCriterion;
import org.onosproject.net.flowobjective.ForwardingObjective;
import org.onosproject.net.flowobjective.ObjectiveError;
import org.onosproject.net.pi.runtime.PiAction;
import org.onosproject.net.pi.runtime.PiActionParam;
import org.onosproject.pipelines.fabric.FabricConstants;
import org.slf4j.Logger;
import java.util.Set;
import static com.google.common.base.Preconditions.checkNotNull;
import static org.slf4j.LoggerFactory.getLogger;
/**
* Handling forwarding objective for fabric pipeliner.
*/
public class FabricForwardingPipeliner {
private static final Logger log = getLogger(FabricForwardingPipeliner.class);
protected DeviceId deviceId;
public FabricForwardingPipeliner(DeviceId deviceId) {
this.deviceId = deviceId;
}
public PipelinerTranslationResult forward(ForwardingObjective forwardObjective) {
PipelinerTranslationResult.Builder resultBuilder = PipelinerTranslationResult.builder();
if (forwardObjective.flag() == ForwardingObjective.Flag.VERSATILE) {
processVersatileFwd(forwardObjective, resultBuilder);
} else {
processSpecificFwd(forwardObjective, resultBuilder);
}
return resultBuilder.build();
}
private void processVersatileFwd(ForwardingObjective fwd,
PipelinerTranslationResult.Builder resultBuilder) {
// program ACL table only
FlowRule flowRule = DefaultFlowRule.builder()
.withSelector(fwd.selector())
.withTreatment(fwd.treatment())
.forTable(FabricConstants.TBL_ACL_ID)
.withPriority(fwd.priority())
.forDevice(deviceId)
.makePermanent()
.fromApp(fwd.appId())
.build();
resultBuilder.addFlowRule(flowRule);
}
private void processSpecificFwd(ForwardingObjective fwd,
PipelinerTranslationResult.Builder resultBuilder) {
TrafficSelector selector = fwd.selector();
TrafficSelector meta = fwd.meta();
ImmutableSet.Builder<Criterion> criterionSetBuilder = ImmutableSet.builder();
criterionSetBuilder.addAll(selector.criteria());
if (meta != null) {
criterionSetBuilder.addAll(meta.criteria());
}
Set<Criterion> criteria = criterionSetBuilder.build();
VlanIdCriterion vlanIdCriterion = null;
EthCriterion ethDstCriterion = null;
IPCriterion ipDstCriterion = null;
MplsCriterion mplsCriterion = null;
for (Criterion criterion : criteria) {
switch (criterion.type()) {
case ETH_DST:
ethDstCriterion = (EthCriterion) criterion;
break;
case VLAN_VID:
vlanIdCriterion = (VlanIdCriterion) criterion;
break;
case IPV4_DST:
ipDstCriterion = (IPCriterion) criterion;
break;
case MPLS_LABEL:
mplsCriterion = (MplsCriterion) criterion;
break;
case ETH_TYPE:
case MPLS_BOS:
// do nothing
break;
default:
log.warn("Unsupported criterion {}", criterion);
break;
}
}
ForwardingFunctionType forwardingFunctionType =
ForwardingFunctionType.getForwardingFunctionType(fwd);
switch (forwardingFunctionType) {
case L2_UNICAST:
processL2UnicastRule(vlanIdCriterion, ethDstCriterion, fwd, resultBuilder);
break;
case L2_BROADCAST:
processL2BroadcastRule(vlanIdCriterion, fwd, resultBuilder);
break;
case IPV4_UNICAST:
processIpv4UnicastRule(ipDstCriterion, fwd, resultBuilder);
break;
case MPLS:
processMplsRule(mplsCriterion, fwd, resultBuilder);
break;
case IPV4_MULTICAST:
case IPV6_UNICAST:
case IPV6_MULTICAST:
default:
log.warn("Unsupported forwarding function type {}", criteria);
resultBuilder.setError(ObjectiveError.UNSUPPORTED);
break;
}
}
// L2 Unicast: learnt mac address + vlan
private void processL2UnicastRule(VlanIdCriterion vlanIdCriterion,
EthCriterion ethDstCriterion,
ForwardingObjective fwd,
PipelinerTranslationResult.Builder resultBuilder) {
checkNotNull(vlanIdCriterion, "VlanId criterion should not be null");
checkNotNull(ethDstCriterion, "EthDst criterion should not be null");
if (fwd.nextId() == null) {
log.warn("Forwarding objective for L2 unicast should contains next id");
resultBuilder.setError(ObjectiveError.BADPARAMS);
return;
}
VlanId vlanId = vlanIdCriterion.vlanId();
MacAddress ethDst = ethDstCriterion.mac();
TrafficSelector selector = DefaultTrafficSelector.builder()
.matchVlanId(vlanId)
.matchEthDst(ethDst)
.build();
TrafficTreatment treatment = buildSetNextIdTreatment(fwd.nextId());
FlowRule flowRule = DefaultFlowRule.builder()
.withSelector(selector)
.withTreatment(treatment)
.fromApp(fwd.appId())
.withPriority(fwd.priority())
.makePermanent()
.forDevice(deviceId)
.forTable(FabricConstants.TBL_BRIDGING_ID)
.build();
resultBuilder.addFlowRule(flowRule);
}
private void processL2BroadcastRule(VlanIdCriterion vlanIdCriterion,
ForwardingObjective fwd,
PipelinerTranslationResult.Builder resultBuilder) {
checkNotNull(vlanIdCriterion, "VlanId criterion should not be null");
if (fwd.nextId() == null) {
log.warn("Forwarding objective for L2 broadcast should contains next id");
resultBuilder.setError(ObjectiveError.BADPARAMS);
return;
}
VlanId vlanId = vlanIdCriterion.vlanId();
TrafficSelector selector = DefaultTrafficSelector.builder()
.matchVlanId(vlanId)
.build();
TrafficTreatment treatment = buildSetNextIdTreatment(fwd.nextId());
FlowRule flowRule = DefaultFlowRule.builder()
.withSelector(selector)
.withTreatment(treatment)
.fromApp(fwd.appId())
.withPriority(fwd.priority())
.makePermanent()
.forDevice(deviceId)
.forTable(FabricConstants.TBL_BRIDGING_ID)
.build();
resultBuilder.addFlowRule(flowRule);
}
private void processIpv4UnicastRule(IPCriterion ipDstCriterion, ForwardingObjective fwd,
PipelinerTranslationResult.Builder resultBuilder) {
checkNotNull(ipDstCriterion, "IP dst criterion should not be null");
if (fwd.nextId() == null) {
log.warn("Forwarding objective for IPv4 unicast should contains next id");
resultBuilder.setError(ObjectiveError.BADPARAMS);
return;
}
TrafficSelector selector = DefaultTrafficSelector.builder()
.matchIPDst(ipDstCriterion.ip())
.build();
TrafficTreatment treatment = buildSetNextIdTreatment(fwd.nextId());
FlowRule flowRule = DefaultFlowRule.builder()
.withSelector(selector)
.withTreatment(treatment)
.fromApp(fwd.appId())
.withPriority(fwd.priority())
.makePermanent()
.forDevice(deviceId)
.forTable(FabricConstants.TBL_UNICAST_V4_ID)
.build();
resultBuilder.addFlowRule(flowRule);
}
private void processMplsRule(MplsCriterion mplsCriterion, ForwardingObjective fwd,
PipelinerTranslationResult.Builder resultBuilder) {
checkNotNull(mplsCriterion, "Mpls criterion should not be null");
if (fwd.nextId() == null) {
log.warn("Forwarding objective for MPLS should contains next id");
resultBuilder.setError(ObjectiveError.BADPARAMS);
return;
}
TrafficSelector selector = DefaultTrafficSelector.builder()
.add(mplsCriterion)
.build();
PiActionParam nextIdParam = new PiActionParam(FabricConstants.ACT_PRM_NEXT_ID_ID,
ImmutableByteSequence.copyFrom(fwd.nextId().byteValue()));
PiAction nextIdAction = PiAction.builder()
.withId(FabricConstants.ACT_POP_MPLS_AND_NEXT_ID)
.withParameter(nextIdParam)
.build();
TrafficTreatment treatment = DefaultTrafficTreatment.builder()
.piTableAction(nextIdAction)
.build();
FlowRule flowRule = DefaultFlowRule.builder()
.withSelector(selector)
.withTreatment(treatment)
.fromApp(fwd.appId())
.withPriority(fwd.priority())
.makePermanent()
.forDevice(deviceId)
.forTable(FabricConstants.TBL_MPLS_ID)
.build();
resultBuilder.addFlowRule(flowRule);
}
private static TrafficTreatment buildSetNextIdTreatment(Integer nextId) {
PiActionParam nextIdParam = new PiActionParam(FabricConstants.ACT_PRM_NEXT_ID_ID,
ImmutableByteSequence.copyFrom(nextId.byteValue()));
PiAction nextIdAction = PiAction.builder()
.withId(FabricConstants.ACT_SET_NEXT_ID_ID)
.withParameter(nextIdParam)
.build();
return DefaultTrafficTreatment.builder()
.piTableAction(nextIdAction)
.build();
}
}
| |
package com.winningsmiledental;
import java.awt.*;
import java.awt.event.*;
import javax.swing.*;
import javax.swing.event.*;
import javax.swing.tree.*;
import org.swixml.SwingEngine;
import java.io.*;
import java.util.*;
import java.net.*;
import org.jdom.*;
/**
* This is the Main Frame of the Application.
*/
public class JDentProFrame implements ApplicationFrame {
public SwingEngine swix = null;
private boolean CONTINUE_RUNNING;
/*
*====================
*BEGIN SwiXML fields
*====================
*/
public JMenuBar menubar;
public JMenu menuFile;
//application menu items
public JMenuItem mi_Options;
public JMenuItem mi_About;
public JMenuItem mi_Help;
public JMenuItem mi_Exit;
private Application application;
/*
*====================
*END SwiXML fields
*====================
*/
public JDentProFrame(Application a) {
super();
setApplication(a);
configure();
}
public void setApplication(Application a) {
application = a;
}
public Application getApplication() {
return application;
}
public void configure() {
try {
setupGUI();
}
catch (Exception e) {
e.printStackTrace(System.out);
getApplication().exit(1);
}
}
public JFrame getFrame () {
JFrame frame = null;
if (swix!=null) {
frame = (JFrame) swix.getRootComponent();
}
return frame;
}
private void setupGUI () throws Exception {
System.out.println("creating new SwingEngine");
swix = new SwingEngine( this );
String urlString = "/xml/AppWindow.xml";
URL url =
getClass().getResource(urlString);
swix.render(url);
System.out.println("created new SwingEngine");
connectActionListeners();
LoginScreenGUI loginScreen = new LoginScreenGUI(this);
loadScreen(loginScreen);
getFrame().setVisible(true);
}
public void loadScreen(GUI gui) {
((AbstractGUI)gui).refresh();
getFrame().getContentPane().removeAll();
getFrame().getContentPane().add(gui.getPane(), 0);
getFrame().getContentPane().validate();
getFrame().getContentPane().repaint();
}
private void connectActionListeners() {
// File Menu
mi_Exit.addActionListener(this);
mi_Options.addActionListener(this);
mi_Help.addActionListener(this);
mi_About.addActionListener(this);
}
/*
* ============================
* BEGIN ACTIONLISTENER IMPLEMENTATION
* ============================
*/
public void actionPerformed (ActionEvent ae) {
String command = ae.getActionCommand();
// Application Menu
if (command.equals("AC_Help")) {
/** Help menu item action needs a help file to open */
}
else if (command.equals("AC_About")) {
onActionAbout();
}
else if (command.equals("AC_Exit")) {
onActionExit();
}
}
/*
* ============================
* END ACTIONLISTENER IMPLEMENTATION
* ============================
*/
/*
* ============================
* BEGIN WINDOWLISTENER IMPLEMENTATION
* ============================
*/
public void windowActivated(WindowEvent e) {
//should redraw everything
}
public void windowClosed(WindowEvent e) {
}
/**
* Invoked when a window is in the process of being closed.
* The close operation can be overridden at this point.
*/
public void windowClosing( WindowEvent e ) {
onActionExit();
}
public void windowDeactivated(WindowEvent e) {
}
public void windowDeiconified(WindowEvent e) {
}
public void windowIconified(WindowEvent e) {
}
public void windowOpened(WindowEvent e) {
}
/*
* ============================
* END WINDOWLISTENER IMPLEMENTATION
* ============================
*/
private void exitGUI () {
CONTINUE_RUNNING = false;
}
public void onActionExit() {
exitGUI();
}
/** pops up a dialog box specifying the
* information about the application.
*/
public void onActionAbout () {
JDialog dialog =
new JDialog(getFrame(),
"About");
JScrollPane scrollPane =
new JScrollPane(new AboutPane(this));
dialog.getContentPane().add("Center", scrollPane);
JPanel panel = new JPanel();
dialog.getContentPane().add("South", panel);
dialog.setSize(400, 200);
dialog.setVisible(true);
}
public SwingEngine getSwingEngine() {
return swix;
}
private void cleanup() {
getApplication().exit(0);
}
private boolean continueRunning() {
return CONTINUE_RUNNING;
}
/*
* ============================
* BEGIN RUNNABLE IMPLEMENTATION
* ============================
*/
public void run() {
CONTINUE_RUNNING = true;
while (continueRunning()) {
try {
Thread.sleep(1);
}
catch (Exception e) {
}
}
cleanup();
}
/*
* ============================
* END RUNNABLE IMPLEMENTATION
* ============================
*/
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.drill.jdbc.impl;
import static org.junit.Assert.*;
import org.apache.drill.common.types.Types;
import org.apache.drill.common.types.TypeProtos.MajorType;
import org.apache.drill.common.types.TypeProtos.MinorType;
import org.apache.drill.exec.vector.accessor.AbstractSqlAccessor;
import org.apache.drill.exec.vector.accessor.InvalidAccessException;
import org.apache.drill.exec.vector.accessor.SqlAccessor;
import org.apache.drill.jdbc.SQLConversionOverflowException;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Ignore;
import org.junit.Test;
import static org.junit.Assert.fail;
import static org.junit.Assert.assertThat;
import static org.hamcrest.CoreMatchers.*;
/**
* Class-level unit test for {@link TypeConvertingSqlAccessor}.
* (Also see {@link org.apache.drill.jdbcResultSetGetMethodConversionsTest}.
*/
public class TypeConvertingSqlAccessorTest {
/**
* Base test stub(?) for accessors underlying TypeConvertingSqlAccessor.
* Carries type and (Object form of) one value.
*/
private static abstract class BaseStubAccessor extends AbstractSqlAccessor
implements SqlAccessor {
private final MajorType type;
private final Object value;
BaseStubAccessor( MajorType type, Object value )
{
this.type = type;
this.value = value;
}
@Override
public Class<?> getObjectClass() {
throw new RuntimeException( "Unexpected use of getObjectClass(...)" );
}
@Override
public MajorType getType() {
return type;
}
protected Object getValue() {
return value;
}
@Override
public boolean isNull( int rowOffset ) {
return false;
}
@Override
public Object getObject( int rowOffset ) throws InvalidAccessException {
throw new RuntimeException( "Unexpected use of getObject(...)" );
}
}
// Byte? TinyInt? TINYINT?
private static class TinyIntStubAccessor extends BaseStubAccessor {
TinyIntStubAccessor( byte value ) {
super( Types.required( MinorType.TINYINT ), value );
}
public byte getByte( int rowOffset ) {
return (Byte) getValue();
}
} // TinyIntStubAccessor
// Short? SmallInt? SMALLINT?
private static class SmallIntStubAccessor extends BaseStubAccessor {
SmallIntStubAccessor( short value ) {
super( Types.required( MinorType.SMALLINT ), value );
}
public short getShort( int rowOffset ) {
return (Short) getValue();
}
} // SmallIntStubAccessor
// Int? Int? INT?
private static class IntegerStubAccessor extends BaseStubAccessor {
IntegerStubAccessor( int value ) {
super( Types.required( MinorType.INT ), value );
}
public int getInt( int rowOffset ) {
return (Integer) getValue();
}
} // IntegerStubAccessor
// Long? Bigint? BIGINT?
private static class BigIntStubAccessor extends BaseStubAccessor {
BigIntStubAccessor( long value ) {
super( Types.required( MinorType.BIGINT ), value );
}
public long getLong( int rowOffset ) {
return (Long) getValue();
}
} // BigIntStubAccessor
// Float? Float4? FLOAT? (REAL?)
private static class FloatStubAccessor extends BaseStubAccessor {
FloatStubAccessor( float value ) {
super( Types.required( MinorType.FLOAT4 ), value );
}
public float getFloat( int rowOffset ) {
return (Float) getValue();
}
} // FloatStubAccessor
// Double? Float8? DOUBLE?
private static class DoubleStubAccessor extends BaseStubAccessor {
DoubleStubAccessor( double value ) {
super( Types.required( MinorType.FLOAT8 ), value );
}
public double getDouble( int rowOffset ) {
return (double) getValue();
}
} // DoubleStubAccessor
//////////////////////////////////////////////////////////////////////
// Column accessor (getXxx(...)) methods, in same order as in JDBC 4.2 spec.
// TABLE B-6 ("Use of ResultSet getter Methods to Retrieve JDBC Data Types"):
////////////////////////////////////////
// - getByte:
// - TINYINT, SMALLINT, INTEGER, BIGINT; REAL, FLOAT, DOUBLE; DECIMAL, NUMERIC;
// - BIT, BOOLEAN;
// - CHAR, VARCHAR, LONGVARCHAR;
// - ROWID;
@Test
public void test_getByte_on_TINYINT_getsIt() throws InvalidAccessException {
final SqlAccessor uut1 =
new TypeConvertingSqlAccessor( new TinyIntStubAccessor( (byte) 127 ) );
assertThat( uut1.getByte( 0 ), equalTo( (byte) 127 ) );
final SqlAccessor uut2 =
new TypeConvertingSqlAccessor( new TinyIntStubAccessor( (byte) -128 ) );
assertThat( uut2.getByte( 0 ), equalTo( (byte) -128 ) );
}
@Test
public void test_getByte_on_SMALLINT_thatFits_getsIt()
throws InvalidAccessException {
final SqlAccessor uut =
new TypeConvertingSqlAccessor( new SmallIntStubAccessor( (short) 127 ) );
assertThat( uut.getByte( 0 ), equalTo( (byte) 127 ) );
}
@Test( expected = SQLConversionOverflowException.class )
public void test_getByte_on_SMALLINT_thatOverflows_rejectsIt()
throws InvalidAccessException {
final SqlAccessor uut =
new TypeConvertingSqlAccessor( new SmallIntStubAccessor( (short) 128 ) );
try {
uut.getByte( 0 );
}
catch ( Throwable e ) {
// Expect the too-big source value in error message:
assertThat( e.getMessage(), containsString( "128" ) );
// Probably expect the method name:
assertThat( e.getMessage(), containsString( "getByte" ) );
// Expect something about source type (original SQL type and default Java
// type, currently).
assertThat( e.getMessage(), allOf( containsString( "short" ),
containsString( "SMALLINT" ) ) );
throw e;
}
}
@Test
public void test_getByte_on_INTEGER_thatFits_getsIt()
throws InvalidAccessException {
final SqlAccessor uut =
new TypeConvertingSqlAccessor( new IntegerStubAccessor( -128 ) );
assertThat( uut.getByte( 0 ), equalTo( (byte) -128 ) );
}
@Test( expected = SQLConversionOverflowException.class )
public void test_getByte_on_INTEGER_thatOverflows_rejectsIt()
throws InvalidAccessException {
final SqlAccessor uut =
new TypeConvertingSqlAccessor( new IntegerStubAccessor( -129 ) );
try {
uut.getByte( 0 );
}
catch ( Throwable e ) {
assertThat( e.getMessage(), containsString( "-129" ) );
assertThat( e.getMessage(), containsString( "getByte" ) );
assertThat( e.getMessage(), allOf( containsString( "int" ),
containsString( "INTEGER" ) ) );
throw e;
}
}
@Test
public void test_getByte_on_BIGINT_thatFits_getsIt()
throws InvalidAccessException {
final SqlAccessor uut =
new TypeConvertingSqlAccessor( new BigIntStubAccessor( -128 ) );
assertThat( uut.getByte( 0 ), equalTo( (byte) -128 ) );
}
@Test( expected = SQLConversionOverflowException.class )
public void test_getByte_on_BIGINT_thatOverflows_rejectsIt()
throws InvalidAccessException {
final SqlAccessor uut =
new TypeConvertingSqlAccessor( new BigIntStubAccessor( 129 ) );
try {
uut.getByte( 0 );
}
catch ( Throwable e ) {
assertThat( e.getMessage(), containsString( "129" ) );
assertThat( e.getMessage(), containsString( "getByte" ) );
assertThat( e.getMessage(), allOf( containsString( "long" ),
containsString( "BIGINT" ) ) );
throw e;
}
}
@Test
public void test_getByte_on_FLOAT_thatFits_getsIt()
throws InvalidAccessException {
final SqlAccessor uut =
new TypeConvertingSqlAccessor( new FloatStubAccessor( -128.0f ) );
assertThat( uut.getByte( 0 ), equalTo( (byte) -128 ) );
}
@Test( expected = SQLConversionOverflowException.class )
public void test_getByte_on_FLOAT_thatOverflows_rejectsIt()
throws InvalidAccessException {
final SqlAccessor uut =
new TypeConvertingSqlAccessor( new FloatStubAccessor( -130f ) );
try {
uut.getByte( 0 );
}
catch ( Throwable e ) {
assertThat( e.getMessage(), containsString( "-130" ) );
assertThat( e.getMessage(), containsString( "getByte" ) );
assertThat( e.getMessage(), allOf( containsString( "float" ),
anyOf( containsString( "REAL" ),
containsString( "FLOAT" ) ) ) );
throw e;
}
}
@Test
public void test_getByte_on_DOUBLE_thatFits_getsIt()
throws InvalidAccessException {
final SqlAccessor uut =
new TypeConvertingSqlAccessor( new DoubleStubAccessor( 127.0d ) );
assertThat( uut.getByte( 0 ), equalTo( (byte) 127) );
}
@Test( expected = SQLConversionOverflowException.class )
public void test_getByte_on_DOUBLE_thatOverflows_rejectsIt()
throws InvalidAccessException {
final SqlAccessor uut =
new TypeConvertingSqlAccessor( new DoubleStubAccessor( -130 ) );
try {
uut.getByte( 0 );
}
catch ( Throwable e ) {
assertThat( e.getMessage(), containsString( "-130" ) );
assertThat( e.getMessage(), containsString( "getByte" ) );
assertThat( e.getMessage(), allOf( containsString( "double" ),
anyOf( containsString( "DOUBLE PRECISION" ),
containsString( "FLOAT(" ) ) ) );
throw e;
}
}
////////////////////////////////////////
// - getShort:
// - TINYINT, SMALLINT, INTEGER, BIGINT; REAL, FLOAT, DOUBLE; DECIMAL, NUMERIC;
// - BIT, BOOLEAN;
// - CHAR, VARCHAR, LONGVARCHAR;
@Test
public void test_getShort_on_TINYINT_getsIt() throws InvalidAccessException {
final SqlAccessor uut1 =
new TypeConvertingSqlAccessor( new TinyIntStubAccessor( (byte) 127 ) );
assertThat( uut1.getShort( 0 ), equalTo( (short) 127 ) );
final SqlAccessor uut2 =
new TypeConvertingSqlAccessor( new TinyIntStubAccessor( (byte) -128 ) );
assertThat( uut2.getShort( 0 ), equalTo( (short) -128 ) );
}
@Test
public void test_getShort_on_SMALLINT_getsIt() throws InvalidAccessException {
final SqlAccessor uut1 =
new TypeConvertingSqlAccessor( new SmallIntStubAccessor( (short) 32767 ) );
assertThat( uut1.getShort( 0 ), equalTo( (short) 32767 ) );
final SqlAccessor uut2 =
new TypeConvertingSqlAccessor( new SmallIntStubAccessor( (short) -32768 ) );
assertThat( uut2.getShort( 0 ), equalTo( (short) -32768 ) );
}
@Test
public void test_getShort_on_INTEGER_thatFits_getsIt()
throws InvalidAccessException {
final SqlAccessor uut1 =
new TypeConvertingSqlAccessor( new IntegerStubAccessor( 32767 ) );
assertThat( uut1.getShort( 0 ), equalTo( (short) 32767 ) );
final SqlAccessor uut2 =
new TypeConvertingSqlAccessor( new IntegerStubAccessor( -32768 ) );
assertThat( uut2.getShort( 0 ), equalTo( (short) -32768 ) );
}
@Test( expected = SQLConversionOverflowException.class )
public void test_getShort_on_INTEGER_thatOverflows_throws()
throws InvalidAccessException {
final SqlAccessor uut =
new TypeConvertingSqlAccessor( new IntegerStubAccessor( -32769 ) );
try {
uut.getShort( 0 );
}
catch ( Throwable e ) {
assertThat( e.getMessage(), containsString( "-32769" ) );
assertThat( e.getMessage(), containsString( "getShort" ) );
assertThat( e.getMessage(), allOf( containsString( "int" ),
containsString( "INTEGER" ) ) );
throw e;
}
}
@Test
public void test_getShort_BIGINT_thatFits_getsIt() throws InvalidAccessException {
final SqlAccessor uut =
new TypeConvertingSqlAccessor( new BigIntStubAccessor( -32678 ) );
assertThat( uut.getShort( 0 ), equalTo( (short) -32678 ) );
}
@Test( expected = SQLConversionOverflowException.class )
public void test_getShort_on_BIGINT_thatOverflows_throws()
throws InvalidAccessException {
final SqlAccessor uut =
new TypeConvertingSqlAccessor( new BigIntStubAccessor( 65535 ) );
try {
uut.getShort( 0 );
}
catch ( Throwable e ) {
assertThat( e.getMessage(), containsString( "65535" ) );
assertThat( e.getMessage(), containsString( "getShort" ) );
assertThat( e.getMessage(), allOf( containsString( "long" ),
containsString( "BIGINT" ) ) );
throw e;
}
}
@Test
public void test_getShort_on_FLOAT_thatFits_getsIt() throws InvalidAccessException {
final SqlAccessor uut =
new TypeConvertingSqlAccessor( new FloatStubAccessor( -32768f ) );
assertThat( uut.getShort( 0 ), equalTo( (short) -32768 ) );
}
@Test( expected = SQLConversionOverflowException.class )
public void test_getShort_on_FLOAT_thatOverflows_rejectsIt()
throws InvalidAccessException {
final SqlAccessor uut =
new TypeConvertingSqlAccessor( new FloatStubAccessor( -32769f ) );
try {
uut.getShort( 0 );
}
catch ( Throwable e ) {
assertThat( e.getMessage(), containsString( "-32769" ) );
assertThat( e.getMessage(), containsString( "getShort" ) );
assertThat( e.getMessage(), allOf( containsString( "float" ),
anyOf( containsString( "REAL" ),
containsString( "FLOAT" ) ) ) );
throw e;
}
}
@Test
public void test_getShort_on_DOUBLE_thatFits_getsIt() throws InvalidAccessException {
final SqlAccessor uut =
new TypeConvertingSqlAccessor( new DoubleStubAccessor( 32767d ) );
assertThat( uut.getShort( 0 ), equalTo( (short) 32767) );
}
@Test( expected = SQLConversionOverflowException.class )
public void test_getShort_on_DOUBLE_thatOverflows_rejectsIt()
throws InvalidAccessException {
final SqlAccessor uut =
new TypeConvertingSqlAccessor( new DoubleStubAccessor( 32768 ) );
try {
uut.getShort( 0 );
}
catch ( Throwable e ) {
assertThat( e.getMessage(), containsString( "32768" ) );
assertThat( e.getMessage(), containsString( "getShort" ) );
assertThat( e.getMessage(), allOf( containsString( "double" ),
anyOf( containsString( "DOUBLE PRECISION" ),
containsString( "FLOAT" ) ) ) );
throw e;
}
}
////////////////////////////////////////
// - getInt:
// - TINYINT, SMALLINT, INTEGER, BIGINT; REAL, FLOAT, DOUBLE; DECIMAL, NUMERIC;
// - BIT, BOOLEAN;
// - CHAR, VARCHAR, LONGVARCHAR;
@Test
public void test_getInt_on_TINYINT_getsIt() throws InvalidAccessException {
final SqlAccessor uut1 =
new TypeConvertingSqlAccessor( new TinyIntStubAccessor( (byte) 127 ) );
assertThat( uut1.getInt( 0 ), equalTo( 127 ) );
final SqlAccessor uut2 =
new TypeConvertingSqlAccessor( new TinyIntStubAccessor( (byte) -128 ) );
assertThat( uut2.getInt( 0 ), equalTo( -128 ) );
}
@Test
public void test_getInt_on_SMALLINT_getsIt() throws InvalidAccessException {
final SqlAccessor uut1 =
new TypeConvertingSqlAccessor( new SmallIntStubAccessor( (short) 32767 ) );
assertThat( uut1.getInt( 0 ), equalTo( 32767 ) );
final SqlAccessor uut2 =
new TypeConvertingSqlAccessor( new SmallIntStubAccessor( (short) -32768 ) );
assertThat( uut2.getInt( 0 ), equalTo( -32768 ) );
}
@Test
public void test_getInt_on_INTEGER_getsIt() throws InvalidAccessException {
final SqlAccessor uut1 =
new TypeConvertingSqlAccessor( new IntegerStubAccessor( 2147483647 ) );
assertThat( uut1.getInt( 0 ), equalTo( 2147483647 ) );
final SqlAccessor uut2 =
new TypeConvertingSqlAccessor( new IntegerStubAccessor( -2147483648 ) );
assertThat( uut2.getInt( 0 ), equalTo( -2147483648 ) );
}
@Test
public void test_getInt_on_BIGINT_thatFits_getsIt() throws InvalidAccessException {
final SqlAccessor uut =
new TypeConvertingSqlAccessor( new BigIntStubAccessor( 2147483647 ) );
assertThat( uut.getInt( 0 ), equalTo( 2147483647 ) );
}
@Test( expected = SQLConversionOverflowException.class )
public void test_getInt_on_BIGINT_thatOverflows_throws() throws
InvalidAccessException {
final SqlAccessor uut =
new TypeConvertingSqlAccessor( new BigIntStubAccessor( 2147483648L ) );
try {
uut.getInt( 0 );
}
catch ( Throwable e ) {
assertThat( e.getMessage(), containsString( "2147483648" ) );
assertThat( e.getMessage(), containsString( "getInt" ) );
assertThat( e.getMessage(), allOf( containsString( "long" ),
containsString( "BIGINT" ) ) );
throw e;
}
}
@Test
public void test_getInt_on_FLOAT_thatFits_getsIt() throws InvalidAccessException {
final SqlAccessor uut =
new TypeConvertingSqlAccessor( new FloatStubAccessor( 1e9f ) );
assertThat( uut.getInt( 0 ), equalTo( 1_000_000_000 ) );
}
@Test( expected = SQLConversionOverflowException.class )
public void test_getInt_on_FLOAT_thatOverflows_rejectsIt() throws
InvalidAccessException {
final SqlAccessor uut =
new TypeConvertingSqlAccessor( new FloatStubAccessor( 1e10f ) );
try {
uut.getInt( 0 );
}
catch ( Throwable e ) {
assertThat( e.getMessage(), containsString( "1.0E10" ) );
assertThat( e.getMessage(), containsString( "getInt" ) );
assertThat( e.getMessage(), allOf( containsString( "float" ),
anyOf( containsString( "REAL" ),
containsString( "FLOAT" ) ) ) );
throw e;
}
}
@Test
public void test_getInt_on_DOUBLE_thatFits_getsIt() throws InvalidAccessException {
final SqlAccessor uut =
new TypeConvertingSqlAccessor( new DoubleStubAccessor( -2147483648.0d ) );
assertThat( uut.getInt( 0 ), equalTo( -2147483648 ) );
}
@Test( expected = SQLConversionOverflowException.class )
public void test_getInt_on_DOUBLE_thatOverflows_rejectsIt() throws
InvalidAccessException {
final SqlAccessor uut =
new TypeConvertingSqlAccessor( new DoubleStubAccessor( -2147483649.0d ) );
try {
uut.getInt( 0 );
}
catch ( Throwable e ) {
assertThat( e.getMessage(), containsString( "-2.147483649E9" ) );
assertThat( e.getMessage(), containsString( "getInt" ) );
assertThat( e.getMessage(), allOf( containsString( "double" ),
containsString( "DOUBLE PRECISION" ) ) );
throw e;
}
}
////////////////////////////////////////
// - getLong:
// - TINYINT, SMALLINT, INTEGER, BIGINT; REAL, FLOAT, DOUBLE; DECIMAL, NUMERIC;
// - BIT, BOOLEAN;
// - CHAR, VARCHAR, LONGVARCHAR;
@Test
public void test_getLong_on_TINYINT_getsIt() throws InvalidAccessException {
final SqlAccessor uut1 =
new TypeConvertingSqlAccessor( new TinyIntStubAccessor( (byte) 127 ) );
assertThat( uut1.getLong( 0 ), equalTo( 127L ) );
final SqlAccessor uut2 =
new TypeConvertingSqlAccessor( new TinyIntStubAccessor( (byte) -128 ) );
assertThat( uut2.getLong( 0 ), equalTo( -128L ) );
}
@Test
public void test_getLong_on_SMALLINT_getsIt() throws InvalidAccessException {
final SqlAccessor uut1 =
new TypeConvertingSqlAccessor( new SmallIntStubAccessor( (short) 32767 ) );
assertThat( uut1.getLong( 0 ), equalTo( 32767L ) );
final SqlAccessor uut2 =
new TypeConvertingSqlAccessor( new SmallIntStubAccessor( (short) -32768 ) );
assertThat( uut2.getLong( 0 ), equalTo( -32768L ) );
}
@Test
public void test_getLong_on_INTEGER_getsIt() throws InvalidAccessException {
final SqlAccessor uut1 =
new TypeConvertingSqlAccessor( new IntegerStubAccessor( 2147483647 ) );
assertThat( uut1.getLong( 0 ), equalTo( 2147483647L ) );
final SqlAccessor uut2 =
new TypeConvertingSqlAccessor( new IntegerStubAccessor( -2147483648 ) );
assertThat( uut2.getLong( 0 ), equalTo( -2147483648L ) );
}
@Test
public void test_getLong_on_BIGINT_getsIt() throws InvalidAccessException {
final SqlAccessor uut =
new TypeConvertingSqlAccessor( new BigIntStubAccessor( 2147483648L ) );
assertThat( uut.getLong( 0 ), equalTo( 2147483648L ) );
}
@Test
public void test_getLong_on_FLOAT_thatFits_getsIt() throws InvalidAccessException {
final SqlAccessor uut =
new TypeConvertingSqlAccessor(
new FloatStubAccessor( 9223372036854775807L * 1.0f ) );
assertThat( uut.getLong( 0 ), equalTo( 9223372036854775807L ) );
}
@Test( expected = SQLConversionOverflowException.class )
public void test_getLong_on_FLOAT_thatOverflows_rejectsIt()
throws InvalidAccessException {
final SqlAccessor uut =
new TypeConvertingSqlAccessor( new FloatStubAccessor( 1.5e20f ) );
try {
uut.getLong( 0 );
}
catch ( Throwable e ) {
assertThat( e.getMessage(), containsString( "1.5000" ) );
assertThat( e.getMessage(), containsString( "getLong" ) );
assertThat( e.getMessage(), allOf( containsString( "float" ),
anyOf( containsString( "REAL" ),
containsString( "FLOAT" ) ) ) );
throw e;
}
}
@Test
public void test_getLong_on_DOUBLE_thatFits_getsIt() throws InvalidAccessException {
final SqlAccessor uut =
new TypeConvertingSqlAccessor(
new DoubleStubAccessor( 9223372036854775807L * 1.0d ) );
assertThat( uut.getLong( 0 ), equalTo( 9223372036854775807L ) );
}
@Test( expected = SQLConversionOverflowException.class )
public void test_getLong_on_DOUBLE_thatOverflows_rejectsIt()
throws InvalidAccessException {
final SqlAccessor uut =
new TypeConvertingSqlAccessor( new DoubleStubAccessor( 1e20 ) );
try {
uut.getLong( 0 );
}
catch ( Throwable e ) {
assertThat( e.getMessage(), containsString( "1.0E20" ) );
assertThat( e.getMessage(), containsString( "getLong" ) );
assertThat( e.getMessage(), allOf( containsString( "double" ),
containsString( "DOUBLE PRECISION" ) ) );
throw e;
}
}
////////////////////////////////////////
// - getFloat:
// - TINYINT, SMALLINT, INTEGER, BIGINT; REAL, FLOAT, DOUBLE; DECIMAL, NUMERIC;
// - BIT, BOOLEAN;
// - CHAR, VARCHAR, LONGVARCHAR;
@Test
public void test_getFloat_on_FLOAT_getsIt() throws InvalidAccessException {
final SqlAccessor uut1 =
new TypeConvertingSqlAccessor( new FloatStubAccessor( 1.23f ) );
assertThat( uut1.getFloat( 0 ), equalTo( 1.23f ) );
final SqlAccessor uut2 =
new TypeConvertingSqlAccessor( new FloatStubAccessor( Float.MAX_VALUE ) );
assertThat( uut2.getFloat( 0 ), equalTo( Float.MAX_VALUE ) );
final SqlAccessor uut3 =
new TypeConvertingSqlAccessor( new FloatStubAccessor( Float.MIN_VALUE ) );
assertThat( uut3.getFloat( 0 ), equalTo( Float.MIN_VALUE ) );
}
@Test
public void test_getFloat_on_DOUBLE_thatFits_getsIt() throws InvalidAccessException {
final SqlAccessor uut1 =
new TypeConvertingSqlAccessor( new DoubleStubAccessor( 1.125 ) );
assertThat( uut1.getFloat( 0 ), equalTo( 1.125f ) );
final SqlAccessor uut2 =
new TypeConvertingSqlAccessor( new DoubleStubAccessor( Float.MAX_VALUE ) );
assertThat( uut2.getFloat( 0 ), equalTo( Float.MAX_VALUE ) );
}
@Test( expected = SQLConversionOverflowException.class )
public void test_getFloat_on_DOUBLE_thatOverflows_throws()
throws InvalidAccessException {
final SqlAccessor uut =
new TypeConvertingSqlAccessor( new DoubleStubAccessor( 1e100 ) );
try {
uut.getFloat( 0 );
}
catch ( Throwable e ) {
assertThat( e.getMessage(), containsString( "1.0E100" ) );
assertThat( e.getMessage(), containsString( "getFloat" ) );
assertThat( e.getMessage(), allOf( containsString( "double" ),
anyOf ( containsString( "DOUBLE PRECISION" ),
containsString( "FLOAT" ) ) ) );
throw e;
}
}
////////////////////////////////////////
// - getDouble:
// - TINYINT, SMALLINT, INTEGER, BIGINT; REAL, FLOAT, DOUBLE; DECIMAL, NUMERIC;
// - BIT, BOOLEAN;
// - CHAR, VARCHAR, LONGVARCHAR;
@Test
public void test_getDouble_on_FLOAT_getsIt() throws InvalidAccessException {
final SqlAccessor uut1 =
new TypeConvertingSqlAccessor( new FloatStubAccessor( 6.02e23f ) );
assertThat( uut1.getDouble( 0 ), equalTo( (double) 6.02e23f ) );
final SqlAccessor uut2 =
new TypeConvertingSqlAccessor( new FloatStubAccessor( Float.MAX_VALUE ) );
assertThat( uut2.getDouble( 0 ), equalTo( (double) Float.MAX_VALUE ) );
final SqlAccessor uut3 =
new TypeConvertingSqlAccessor( new FloatStubAccessor( Float.MIN_VALUE ) );
assertThat( uut3.getDouble( 0 ), equalTo( (double) Float.MIN_VALUE ) );
}
@Test
public void test_getDouble_on_DOUBLE_getsIt() throws InvalidAccessException {
final SqlAccessor uut1 =
new TypeConvertingSqlAccessor( new DoubleStubAccessor( -1e100 ) );
assertThat( uut1.getDouble( 0 ), equalTo( -1e100 ) );
final SqlAccessor uut2 =
new TypeConvertingSqlAccessor( new DoubleStubAccessor( Double.MAX_VALUE ) );
assertThat( uut2.getDouble( 0 ), equalTo( Double.MAX_VALUE ) );
final SqlAccessor uut3 =
new TypeConvertingSqlAccessor( new DoubleStubAccessor( Double.MIN_VALUE ) );
assertThat( uut3.getDouble( 0 ), equalTo( Double.MIN_VALUE ) );
}
////////////////////////////////////////
// - getBigDecimal:
// - TINYINT, SMALLINT, INTEGER, BIGINT; REAL, FLOAT, DOUBLE; DECIMAL, NUMERIC;
// - BIT, BOOLEAN;
// - CHAR, VARCHAR, LONGVARCHAR;
////////////////////////////////////////
// - getBoolean:
// - TINYINT, SMALLINT, INTEGER, BIGINT; REAL, FLOAT, DOUBLE; DECIMAL, NUMERIC;
// - BIT, BOOLEAN;
// - CHAR, VARCHAR, LONGVARCHAR;
////////////////////////////////////////
// - getString:
// - TINYINT, SMALLINT, INTEGER, BIGINT; REAL, FLOAT, DOUBLE; DECIMAL, NUMERIC;
// - BIT, BOOLEAN;
// - CHAR, VARCHAR, LONGVARCHAR;
// - NCHAR, NVARCHAR, LONGNVARCHAR;
// - BINARY, VARBINARY, LONGVARBINARY;
// - DATE, TIME, TIMESTAMP;
// - DATALINK;
////////////////////////////////////////
// - getNString:
// - TINYINT, SMALLINT, INTEGER, BIGINT; REAL, FLOAT, DOUBLE; DECIMAL, NUMERIC;
// - BIT, BOOLEAN;
// - CHAR, VARCHAR, LONGVARCHAR;
// - NCHAR, NVARCHAR, LONGNVARCHAR;
// - BINARY, VARBINARY, LONGVARBINARY;
// - DATE, TIME, TIMESTAMP;
// - DATALINK;
////////////////////////////////////////
// - getBytes:
// - BINARY, VARBINARY, LONGVARBINARY;
////////////////////////////////////////
// - getDate:
// - CHAR, VARCHAR, LONGVARCHAR;
// - DATE, TIMESTAMP;
////////////////////////////////////////
// - getTime:
// - CHAR, VARCHAR, LONGVARCHAR;
// - TIME, TIMESTAMP;
////////////////////////////////////////
// - getTimestamp:
// - CHAR, VARCHAR, LONGVARCHAR;
// - DATE, TIME, TIMESTAMP;
////////////////////////////////////////
// - getAsciiStream:
// - CHAR, VARCHAR, LONGVARCHAR;
// - BINARY, VARBINARY, LONGVARBINARY;
// - CLOB, NCLOB;
////////////////////////////////////////
// - getBinaryStream:
// - BINARY, VARBINARY, LONGVARBINARY;
////////////////////////////////////////
// - getCharacterStream:
// - CHAR, VARCHAR, LONGVARCHAR;
// - NCHAR, NVARCHAR, LONGNVARCHAR;
// - BINARY, VARBINARY, LONGVARBINARY;
// - CLOB, NCLOB;
// - SQLXML;
////////////////////////////////////////
// - getNCharacterStream:
// - CHAR, VARCHAR, LONGVARCHAR;
// - NCHAR, NVARCHAR, LONGNVARCHAR;
// - BINARY, VARBINARY, LONGVARBINARY;
// - CLOB, NCLOB;
// - SQLXML;
////////////////////////////////////////
// - getClob:
// - CLOB, NCLOB;
////////////////////////////////////////
// - getNClob:
// - CLOB, NCLOB;
////////////////////////////////////////
// - getBlob:
// - BLOB;
////////////////////////////////////////
// - getArray:
// - ARRAY;
////////////////////////////////////////
// - getRef:
// - REF;
////////////////////////////////////////
// - getURL:
// - DATALINK;
////////////////////////////////////////
// - getObject:
// - TINYINT, SMALLINT, INTEGER, BIGINT; REAL, FLOAT, DOUBLE; DECIMAL, NUMERIC;
// - BIT, BOOLEAN;
// - CHAR, VARCHAR, LONGVARCHAR;
// - NCHAR, NVARCHAR, LONGNVARCHAR;
// - BINARY, VARBINARY, LONGVARBINARY;
// - CLOB, NCLOB;
// - BLOB;
// - DATE, TIME, TIMESTAMP;
// - TIME_WITH_TIMEZONE;
// - TIMESTAMP_WITH_TIMEZONE;
// - DATALINK;
// - ROWID;
// - SQLXML;
// - ARRAY;
// - REF;
// - STRUCT;
// - JAVA_OBJECT;
////////////////////////////////////////
// - getRowId:
// - ROWID;
////////////////////////////////////////
// - getSQLXML:
// - SQLXML SQLXML
}
| |
import java.util.Arrays;
import java.util.List;
public final class TimeContentItemHelper {
public static List<TimePeriods> TIME_PERIOD_1_CONTENT_ITEM_KEYS = Arrays.asList(
TimePeriods.TIME_PERIOD1_1,
TimePeriods.TIME_PERIOD1_2,
TimePeriods.TIME_PERIOD1_3,
TimePeriods.TIME_PERIOD1_4,
TimePeriods.TIME_PERIOD1_5,
TimePeriods.TIME_PERIOD1_6
);
public static List<TimePeriods> TIME_PERIOD_2_CONTENT_ITEM_KEYS = Arrays.asList(
TimePeriods.TIME_PERIOD2_1,
TimePeriods.TIME_PERIOD2_2,
TimePeriods.TIME_PERIOD2_3,
TimePeriods.TIME_PERIOD2_4,
TimePeriods.TIME_PERIOD2_5,
TimePeriods.TIME_PERIOD2_6
);
public static List<TimePeriods> TIME_PERIOD_3_CONTENT_ITEM_KEYS = Arrays.asList(
TimePeriods.TIME_PERIOD3_1,
TimePeriods.TIME_PERIOD3_2,
TimePeriods.TIME_PERIOD3_3,
TimePeriods.TIME_PERIOD3_4,
TimePeriods.TIME_PERIOD3_5,
TimePeriods.TIME_PERIOD3_6
);
public static List<TimePeriods> TIME_PERIOD_4_CONTENT_ITEM_KEYS = Arrays.asList(
TimePeriods.TIME_PERIOD4_1,
TimePeriods.TIME_PERIOD4_2,
TimePeriods.TIME_PERIOD4_3,
TimePeriods.TIME_PERIOD4_4,
TimePeriods.TIME_PERIOD4_5,
TimePeriods.TIME_PERIOD4_6
);
public static List<TimePeriods> TIME_PERIOD_5_CONTENT_ITEM_KEYS = Arrays.asList(
TimePeriods.TIME_PERIOD5_1,
TimePeriods.TIME_PERIOD5_2,
TimePeriods.TIME_PERIOD5_3,
TimePeriods.TIME_PERIOD5_4,
TimePeriods.TIME_PERIOD5_5,
TimePeriods.TIME_PERIOD5_6
);
public static List<TimePeriods> TIME_PERIOD_6_CONTENT_ITEM_KEYS = Arrays.asList(
TimePeriods.TIME_PERIOD6_1,
TimePeriods.TIME_PERIOD6_2,
TimePeriods.TIME_PERIOD6_3,
TimePeriods.TIME_PERIOD6_4,
TimePeriods.TIME_PERIOD6_5,
TimePeriods.TIME_PERIOD6_6
);
public static List<List<TimePeriods>> TIME_PERIOD_CONTENT_ITEM_KEY_MATRIX = Arrays.asList(
TIME_PERIOD_1_CONTENT_ITEM_KEYS,
TIME_PERIOD_2_CONTENT_ITEM_KEYS,
TIME_PERIOD_3_CONTENT_ITEM_KEYS,
TIME_PERIOD_4_CONTENT_ITEM_KEYS,
TIME_PERIOD_5_CONTENT_ITEM_KEYS,
TIME_PERIOD_6_CONTENT_ITEM_KEYS
);
public static List<TimePeriods> TIME_AVERAGE_CONTENT_ITEM_KEYS = Arrays.asList(
TimePeriods.TIME_AVERAGE1,
TimePeriods.TIME_AVERAGE2,
TimePeriods.TIME_AVERAGE3,
TimePeriods.TIME_AVERAGE4,
TimePeriods.TIME_AVERAGE5,
TimePeriods.TIME_AVERAGE6
);
public static List<TimePeriods> TIME_ROOM_DESCRIPTION_CONTENT_ITEM_KEYS = Arrays.asList(
TimePeriods.TIME_AVERAGE_DESCRIPTION1,
TimePeriods.TIME_AVERAGE_DESCRIPTION2,
TimePeriods.TIME_AVERAGE_DESCRIPTION3,
TimePeriods.TIME_AVERAGE_DESCRIPTION4,
TimePeriods.TIME_AVERAGE_DESCRIPTION5,
TimePeriods.TIME_AVERAGE_DESCRIPTION6
);
public static List<TimePeriods> TIME_1_CONTENT_ITEM_KEYS = Arrays.asList(
TimePeriods.TIME_AVERAGE_PRICE1_1,
TimePeriods.TIME_AVERAGE_PRICE1_2,
TimePeriods.TIME_AVERAGE_PRICE1_3,
TimePeriods.TIME_AVERAGE_PRICE1_4,
TimePeriods.TIME_AVERAGE_PRICE1_5,
TimePeriods.TIME_AVERAGE_PRICE1_6
);
public static List<TimePeriods> TIME_2_CONTENT_ITEM_KEYS = Arrays.asList(
TimePeriods.TIME_AVERAGE_PRICE2_1,
TimePeriods.TIME_AVERAGE_PRICE2_2,
TimePeriods.TIME_AVERAGE_PRICE2_3,
TimePeriods.TIME_AVERAGE_PRICE2_4,
TimePeriods.TIME_AVERAGE_PRICE2_5,
TimePeriods.TIME_AVERAGE_PRICE2_6
);
public static List<TimePeriods> TIME_3_CONTENT_ITEM_KEYS = Arrays.asList(
TimePeriods.TIME_AVERAGE_PRICE3_1,
TimePeriods.TIME_AVERAGE_PRICE3_2,
TimePeriods.TIME_AVERAGE_PRICE3_3,
TimePeriods.TIME_AVERAGE_PRICE3_4,
TimePeriods.TIME_AVERAGE_PRICE3_5,
TimePeriods.TIME_AVERAGE_PRICE3_6
);
public static List<TimePeriods> TIME_4_CONTENT_ITEM_KEYS = Arrays.asList(
TimePeriods.TIME_AVERAGE_PRICE4_1,
TimePeriods.TIME_AVERAGE_PRICE4_2,
TimePeriods.TIME_AVERAGE_PRICE4_3,
TimePeriods.TIME_AVERAGE_PRICE4_4,
TimePeriods.TIME_AVERAGE_PRICE4_5,
TimePeriods.TIME_AVERAGE_PRICE4_6
);
public static List<TimePeriods> TIME_5_CONTENT_ITEM_KEYS = Arrays.asList(
TimePeriods.TIME_AVERAGE_PRICE5_1,
TimePeriods.TIME_AVERAGE_PRICE5_2,
TimePeriods.TIME_AVERAGE_PRICE5_3,
TimePeriods.TIME_AVERAGE_PRICE5_4,
TimePeriods.TIME_AVERAGE_PRICE5_5,
TimePeriods.TIME_AVERAGE_PRICE5_6
);
public static List<TimePeriods> TIME_6_CONTENT_ITEM_KEYS = Arrays.asList(
TimePeriods.TIME_AVERAGE_PRICE6_1,
TimePeriods.TIME_AVERAGE_PRICE6_2,
TimePeriods.TIME_AVERAGE_PRICE6_3,
TimePeriods.TIME_AVERAGE_PRICE6_4,
TimePeriods.TIME_AVERAGE_PRICE6_5,
TimePeriods.TIME_AVERAGE_PRICE6_6
);
public static List<List<TimePeriods>> TIME_CONTENT_ITEM_KEY_MATRIX = Arrays.asList(
TIME_1_CONTENT_ITEM_KEYS,
TIME_2_CONTENT_ITEM_KEYS,
TIME_3_CONTENT_ITEM_KEYS,
TIME_4_CONTENT_ITEM_KEYS,
TIME_5_CONTENT_ITEM_KEYS,
TIME_6_CONTENT_ITEM_KEYS
);
public static List<TimePeriods> TIME_NOTE_LABELS_CONTENT_ITEM_KEYS = Arrays.asList(
TimePeriods.TIME_NOTE_LABEL_1,
TimePeriods.TIME_NOTE_LABEL_2,
TimePeriods.TIME_NOTE_LABEL_3,
TimePeriods.TIME_NOTE_LABEL_4,
TimePeriods.TIME_NOTE_LABEL_5,
TimePeriods.TIME_NOTE_LABEL_6,
TimePeriods.TIME_NOTE_LABEL_7,
TimePeriods.TIME_NOTE_LABEL_8,
TimePeriods.TIME_NOTE_LABEL_9,
TimePeriods.TIME_NOTE_LABEL_10
);
public static List<TimePeriods> TIME_NOTES_CONTENT_ITEM_KEYS = Arrays.asList(
TimePeriods.TIME_NOTE_1,
TimePeriods.TIME_NOTE_2,
TimePeriods.TIME_NOTE_3,
TimePeriods.TIME_NOTE_4,
TimePeriods.TIME_NOTE_5,
TimePeriods.TIME_NOTE_6,
TimePeriods.TIME_NOTE_7,
TimePeriods.TIME_NOTE_8,
TimePeriods.TIME_NOTE_9,
TimePeriods.TIME_NOTE_10
);
public static List<TimePeriods> TIME_CONTENT_ITEM_KEYS = Arrays.asList(
TimePeriods.TIMES_ENABLED,
TimePeriods.TIMES_COUNT_OF_TIMES,
TimePeriods.TIMES_COUNT_OF_TIME_PERIODS,
TimePeriods.TIMES_COUNT_OF_AVERAGES,
TimePeriods.TIME_NAME1,
TimePeriods.TIME_NAME2,
TimePeriods.TIME_NAME3,
TimePeriods.TIME_NAME4,
TimePeriods.TIME_NAME5,
TimePeriods.TIME_NAME6,
TimePeriods.TIME_PERIOD1_1,
TimePeriods.TIME_PERIOD1_2,
TimePeriods.TIME_PERIOD1_3,
TimePeriods.TIME_PERIOD1_4,
TimePeriods.TIME_PERIOD1_5,
TimePeriods.TIME_PERIOD1_6,
TimePeriods.TIME_PERIOD2_1,
TimePeriods.TIME_PERIOD2_2,
TimePeriods.TIME_PERIOD2_3,
TimePeriods.TIME_PERIOD2_4,
TimePeriods.TIME_PERIOD2_5,
TimePeriods.TIME_PERIOD2_6,
TimePeriods.TIME_PERIOD3_1,
TimePeriods.TIME_PERIOD3_2,
TimePeriods.TIME_PERIOD3_3,
TimePeriods.TIME_PERIOD3_4,
TimePeriods.TIME_PERIOD3_5,
TimePeriods.TIME_PERIOD3_6,
TimePeriods.TIME_PERIOD4_1,
TimePeriods.TIME_PERIOD4_2,
TimePeriods.TIME_PERIOD4_3,
TimePeriods.TIME_PERIOD4_4,
TimePeriods.TIME_PERIOD4_5,
TimePeriods.TIME_PERIOD4_6,
TimePeriods.TIME_PERIOD5_1,
TimePeriods.TIME_PERIOD5_2,
TimePeriods.TIME_PERIOD5_3,
TimePeriods.TIME_PERIOD5_4,
TimePeriods.TIME_PERIOD5_5,
TimePeriods.TIME_PERIOD5_6,
TimePeriods.TIME_PERIOD6_1,
TimePeriods.TIME_PERIOD6_2,
TimePeriods.TIME_PERIOD6_3,
TimePeriods.TIME_PERIOD6_4,
TimePeriods.TIME_PERIOD6_5,
TimePeriods.TIME_PERIOD6_6,
TimePeriods.TIME_AVERAGE1,
TimePeriods.TIME_AVERAGE2,
TimePeriods.TIME_AVERAGE3,
TimePeriods.TIME_AVERAGE4,
TimePeriods.TIME_AVERAGE5,
TimePeriods.TIME_AVERAGE6,
TimePeriods.TIME_AVERAGE_PRICE1_1,
TimePeriods.TIME_AVERAGE_PRICE1_2,
TimePeriods.TIME_AVERAGE_PRICE1_3,
TimePeriods.TIME_AVERAGE_PRICE1_4,
TimePeriods.TIME_AVERAGE_PRICE1_5,
TimePeriods.TIME_AVERAGE_PRICE1_6,
TimePeriods.TIME_AVERAGE_PRICE2_1,
TimePeriods.TIME_AVERAGE_PRICE2_2,
TimePeriods.TIME_AVERAGE_PRICE2_3,
TimePeriods.TIME_AVERAGE_PRICE2_4,
TimePeriods.TIME_AVERAGE_PRICE2_5,
TimePeriods.TIME_AVERAGE_PRICE2_6,
TimePeriods.TIME_AVERAGE_PRICE3_1,
TimePeriods.TIME_AVERAGE_PRICE3_2,
TimePeriods.TIME_AVERAGE_PRICE3_3,
TimePeriods.TIME_AVERAGE_PRICE3_4,
TimePeriods.TIME_AVERAGE_PRICE3_5,
TimePeriods.TIME_AVERAGE_PRICE3_6,
TimePeriods.TIME_AVERAGE_PRICE4_1,
TimePeriods.TIME_AVERAGE_PRICE4_2,
TimePeriods.TIME_AVERAGE_PRICE4_3,
TimePeriods.TIME_AVERAGE_PRICE4_4,
TimePeriods.TIME_AVERAGE_PRICE4_5,
TimePeriods.TIME_AVERAGE_PRICE4_6,
TimePeriods.TIME_AVERAGE_PRICE5_1,
TimePeriods.TIME_AVERAGE_PRICE5_2,
TimePeriods.TIME_AVERAGE_PRICE5_3,
TimePeriods.TIME_AVERAGE_PRICE5_4,
TimePeriods.TIME_AVERAGE_PRICE5_5,
TimePeriods.TIME_AVERAGE_PRICE5_6,
TimePeriods.TIME_AVERAGE_PRICE6_1,
TimePeriods.TIME_AVERAGE_PRICE6_2,
TimePeriods.TIME_AVERAGE_PRICE6_3,
TimePeriods.TIME_AVERAGE_PRICE6_4,
TimePeriods.TIME_AVERAGE_PRICE6_5,
TimePeriods.TIME_AVERAGE_PRICE6_6,
TimePeriods.TIME_AVERAGE_DESCRIPTION1,
TimePeriods.TIME_AVERAGE_DESCRIPTION2,
TimePeriods.TIME_AVERAGE_DESCRIPTION3,
TimePeriods.TIME_AVERAGE_DESCRIPTION4,
TimePeriods.TIME_AVERAGE_DESCRIPTION5,
TimePeriods.TIME_AVERAGE_DESCRIPTION6,
TimePeriods.TIME_NOTE_LABEL_1,
TimePeriods.TIME_NOTE_LABEL_2,
TimePeriods.TIME_NOTE_LABEL_3,
TimePeriods.TIME_NOTE_LABEL_4,
TimePeriods.TIME_NOTE_LABEL_5,
TimePeriods.TIME_NOTE_LABEL_6,
TimePeriods.TIME_NOTE_LABEL_7,
TimePeriods.TIME_NOTE_LABEL_8,
TimePeriods.TIME_NOTE_LABEL_9,
TimePeriods.TIME_NOTE_LABEL_10,
TimePeriods.TIME_NOTE_1,
TimePeriods.TIME_NOTE_2,
TimePeriods.TIME_NOTE_3,
TimePeriods.TIME_NOTE_4,
TimePeriods.TIME_NOTE_5,
TimePeriods.TIME_NOTE_6,
TimePeriods.TIME_NOTE_7,
TimePeriods.TIME_NOTE_8,
TimePeriods.TIME_NOTE_9,
TimePeriods.TIME_NOTE_10
);
public static boolean check(TimePeriods key) {
int keyI = key.getIntValue();
if (keyI < 0) {
return false;
} else {
return true;
}
}
}
| |
package org.bbs.apklauncher.emb.auto_gen;
import java.io.FileDescriptor;
import java.io.PrintWriter;
import java.util.List;
import org.bbs.osgi.activity.ReflectUtil.ActivityReflectUtil;
import org.bbs.osgi.activity.TargetContext;
import android.annotation.SuppressLint;
import android.app.ActionBar;
import android.app.Activity;
import android.app.ActivityGroup;
import android.app.ActivityManager.TaskDescription;
import android.app.Application;
import android.app.Dialog;
import android.app.Fragment;
import android.app.TaskStackBuilder;
import android.app.FragmentManager;
import android.app.LoaderManager;
import android.app.PendingIntent;
import android.app.SharedElementCallback;
import android.app.TabActivity;
import android.app.TaskStackBuilder;
import android.content.ComponentName;
import android.content.Context;
import android.content.ContextWrapper;
//do NOT edit this file, auto-generated by host_target.groovy from Target_Activity.java.template
import android.content.Intent;
import android.content.IntentSender;
import android.content.IntentSender.SendIntentException;
import android.content.SharedPreferences;
import android.content.res.Configuration;
import android.content.res.Resources.Theme;
import android.database.Cursor;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.graphics.drawable.Drawable;
import android.media.session.MediaController;
import android.net.Uri;
import android.os.Bundle;
import android.os.PersistableBundle;
import android.preference.Preference;
import android.preference.PreferenceActivity.Header;
import android.preference.PreferenceFragment;
import android.preference.PreferenceManager;
import android.preference.PreferenceScreen;
import android.transition.Scene;
import android.transition.TransitionManager;
import android.util.AttributeSet;
import android.util.Log;
import android.view.ActionMode;
import android.view.ActionMode.Callback;
import android.view.ContextMenu;
import android.view.ContextMenu.ContextMenuInfo;
//do NOT edit this file, auto-generated by host_target.groovy from Target_Activity.java.template
import android.view.KeyEvent;
import android.view.LayoutInflater;
import android.view.Menu;
import android.view.MenuInflater;
import android.view.MenuItem;
import android.view.MotionEvent;
import android.view.View;
import android.view.View.OnClickListener;
import android.view.ViewGroup.LayoutParams;
import android.view.Window;
import android.view.WindowManager;
import android.view.accessibility.AccessibilityEvent;
import android.widget.ExpandableListAdapter;
import android.widget.ExpandableListView;
import android.widget.ListAdapter;
import android.widget.ListView;
import android.widget.TabHost;
import android.widget.TabWidget;
import android.widget.Toolbar;
/**
*
* provide consistent interface with {@link Activity}, but actually this
* is NOT a {@link Activity}, so when u want a Activity context, use
* {@link #getHostActivity}.
*/
@SuppressLint("NewApi")
//do NOT edit this file, auto-generated by host_target.groovy from Target_Activity.java.template
public class Target_Activity extends
//Activity
ContextWrapper
{
private static final String TAG = Target_Activity.class.getSimpleName();
public static final int RESULT_CANCELED = Activity.RESULT_CANCELED;
public static final int RESULT_OK = Activity.RESULT_OK;
public static final int DEFAULT_KEYS_DISABLE = Activity.DEFAULT_KEYS_DISABLE;
public static final int DEFAULT_KEYS_SEARCH_LOCAL = Activity.DEFAULT_KEYS_SEARCH_LOCAL;
protected StubBase_Activity mHostActivity;
public Target_Activity() {
this(null);
}
public Target_Activity(Context base) {
super(base);
mHostActivity = (StubBase_Activity) base;
}
void setHostActivity(Stub_Activity hostActivity) {
mHostActivity = hostActivity;
}
public StubBase_Activity getHostActivity() {
//do NOT edit this file, auto-generated by host_target.groovy from Target_Activity.java.template
return mHostActivity;
}
//private StubBase_Activity getHostActivityFromBaseContext(Context base) {
//return (StubBase_Activity) base;
// if (base instanceof StubBase_Activity) {
// return (StubBase_Activity) base;
// }
// return (StubBase_Activity) ((TargetContext)base).getHostActivity();
//}
protected void attachBaseContext(Context base) {
//mHostActivity = getHostActivityFromBaseContext(base);
super.attachBaseContext(base);
}
public final Application getApplication() {
return mHostActivity.getApplication();
}
public final boolean isChild() {
return mHostActivity.isChild();
}
public final Activity getParent() {
return mHostActivity.getParent();
}
//do NOT edit this file, auto-generated by host_target.groovy from Target_Activity.java.template
public final Cursor managedQuery(Uri uri, String[] projection, String selection,
String[] selectionArgs, String sortOrder) {
return mHostActivity.managedQuery(uri, projection, selection, selectionArgs, sortOrder);
}
public final void startManagingCursor(Cursor c) {
mHostActivity.startManagingCursor(c);
}
public final void stopManagingCursor(Cursor c) {
mHostActivity.stopManagingCursor(c);
}
public final void setDefaultKeyMode(int mode) {
mHostActivity.setDefaultKeyMode(mode);
}
public final boolean requestWindowFeature(int featureId) {
return mHostActivity.requestWindowFeature(featureId);
}
public final void setFeatureDrawableResource(int featureId, int resId) {
mHostActivity.setFeatureDrawableResource(featureId, resId);
}
public final void setFeatureDrawableUri(int featureId, Uri uri) {
//do NOT edit this file, auto-generated by host_target.groovy from Target_Activity.java.template
mHostActivity.setFeatureDrawableUri(featureId, uri);
}
public final void setFeatureDrawable(int featureId, Drawable drawable) {
mHostActivity.setFeatureDrawable(featureId, drawable);
}
public final void setFeatureDrawableAlpha(int featureId, int alpha) {
mHostActivity.setFeatureDrawableAlpha(featureId, alpha);
}
public final void setResult(int resultCode) {
mHostActivity.setResult(resultCode);
}
public final void setResult(int resultCode, Intent data) {
mHostActivity.setResult(resultCode, data);
}
public CharSequence getTitle() {
return mHostActivity.getTitle();
}
public int getTitleColor() {
return mHostActivity.getTitleColor();
}
//do NOT edit this file, auto-generated by host_target.groovy from Target_Activity.java.template
public final void setProgressBarVisibility(boolean visible) {
mHostActivity.setProgressBarVisibility(visible);
}
public final void setProgressBarIndeterminateVisibility(boolean visible) {
mHostActivity.setProgressBarIndeterminateVisibility(visible);
}
public final void setProgressBarIndeterminate(boolean indeterminate) {
mHostActivity.setProgressBarIndeterminate(indeterminate);
}
public final void setProgress(int progress) {
mHostActivity.setProgress(progress);
}
public final void setSecondaryProgress(int secondaryProgress) {
mHostActivity.setSecondaryProgress(secondaryProgress);
}
public final void setVolumeControlStream(int streamType) {
mHostActivity.setVolumeControlStream(streamType);
}
public final int getVolumeControlStream() {
return mHostActivity.getVolumeControlStream();
}
//do NOT edit this file, auto-generated by host_target.groovy from Target_Activity.java.template
public final void setMediaController(MediaController controller) {
mHostActivity.setMediaController(controller);
}
public final MediaController getMediaController() {
return mHostActivity.getMediaController();
}
public final void runOnUiThread(Runnable action) {
mHostActivity.runOnUiThread(action);
}
public Intent getIntent() {
return mHostActivity.getIntent();
}
public void setIntent(Intent newIntent) {
mHostActivity.setIntent(newIntent);
}
public WindowManager getWindowManager() {
return mHostActivity.getWindowManager();
}
//do NOT edit this file, auto-generated by host_target.groovy from Target_Activity.java.template
public Window getWindow() {
return mHostActivity.getWindow();
}
public LoaderManager getLoaderManager() {
return mHostActivity.getLoaderManager();
}
public View getCurrentFocus() {
return mHostActivity.getCurrentFocus();
}
protected void onCreate(Bundle savedInstanceState) {
}
public void onCreate(Bundle savedInstanceState,
PersistableBundle persistentState) {
}
protected void onRestoreInstanceState(Bundle savedInstanceState) {
}
//do NOT edit this file, auto-generated by host_target.groovy from Target_Activity.java.template
public void onRestoreInstanceState(Bundle savedInstanceState,
PersistableBundle persistentState) {
}
protected void onPostCreate(Bundle savedInstanceState) {
}
public void onPostCreate(Bundle savedInstanceState,
PersistableBundle persistentState) {
}
protected void onStart() {
}
protected void onRestart() {
}
protected void onResume() {
}
//do NOT edit this file, auto-generated by host_target.groovy from Target_Activity.java.template
protected void onPostResume() {
}
protected void onNewIntent(Intent intent) {
}
protected void onSaveInstanceState(Bundle outState) {
}
public void onSaveInstanceState(Bundle outState,
PersistableBundle outPersistentState) {
}
protected void onPause() {
}
protected void onUserLeaveHint() {
}
public boolean onCreateThumbnail(Bitmap outBitmap, Canvas canvas) {
//do NOT edit this file, auto-generated by host_target.groovy from Target_Activity.java.template
return false;
}
public CharSequence onCreateDescription() {
return null;
}
public void onProvideAssistData(Bundle data) {
}
protected void onStop() {
}
protected void onDestroy() {
}
public void reportFullyDrawn() {
mHostActivity.reportFullyDrawn();
}
public void onConfigurationChanged(Configuration newConfig) {
//do NOT edit this file, auto-generated by host_target.groovy from Target_Activity.java.template
}
public int getChangingConfigurations() {
return mHostActivity.getChangingConfigurations();
}
public Object getLastNonConfigurationInstance() {
return null;
}
public Object onRetainNonConfigurationInstance() {
return null;
}
public void onLowMemory() {
}
public void onTrimMemory(int level) {
}
public FragmentManager getFragmentManager() {
//do NOT edit this file, auto-generated by host_target.groovy from Target_Activity.java.template
return mHostActivity.getFragmentManager();
}
public void onAttachFragment(Fragment fragment) {
}
public View findViewById(int id) {
return mHostActivity.findViewById(id);
}
public ActionBar getActionBar() {
return mHostActivity.getActionBar();
}
public void setActionBar(Toolbar toolbar) {
mHostActivity.setActionBar(toolbar);
}
public void setContentView(int layoutResID) {
mHostActivity.setContentView(layoutResID);
}
public void setContentView(View view) {
//do NOT edit this file, auto-generated by host_target.groovy from Target_Activity.java.template
mHostActivity.setContentView(view);
}
public void setContentView(View view, LayoutParams params) {
mHostActivity.setContentView(view, params);
}
public void addContentView(View view, LayoutParams params) {
mHostActivity.addContentView(view, params);
}
public TransitionManager getContentTransitionManager() {
return mHostActivity.getContentTransitionManager();
}
public void setContentTransitionManager(TransitionManager tm) {
mHostActivity.setContentTransitionManager(tm);
}
public Scene getContentScene() {
return mHostActivity.getContentScene();
}
//do NOT edit this file, auto-generated by host_target.groovy from Target_Activity.java.template
public void setFinishOnTouchOutside(boolean finish) {
mHostActivity.setFinishOnTouchOutside(finish);
}
public boolean onKeyDown(int keyCode, KeyEvent event) {
return false;
}
public boolean onKeyLongPress(int keyCode, KeyEvent event) {
return false;
}
public boolean onKeyUp(int keyCode, KeyEvent event) {
return false;
}
public boolean onKeyMultiple(int keyCode, int repeatCount, KeyEvent event) {
return false;
}
//do NOT edit this file, auto-generated by host_target.groovy from Target_Activity.java.template
public void onBackPressed() {
}
public boolean onKeyShortcut(int keyCode, KeyEvent event) {
return false;
}
public boolean onTouchEvent(MotionEvent event) {
return false;
}
public boolean onTrackballEvent(MotionEvent event) {
return false;
}
public boolean onGenericMotionEvent(MotionEvent event) {
return false;
}
public void onUserInteraction() {
}
//do NOT edit this file, auto-generated by host_target.groovy from Target_Activity.java.template
public void onWindowAttributesChanged(
android.view.WindowManager.LayoutParams params) {
}
public void onContentChanged() {
}
public void onWindowFocusChanged(boolean hasFocus) {
}
public void onAttachedToWindow() {
}
public void onDetachedFromWindow() {
}
public boolean hasWindowFocus() {
return mHostActivity.hasWindowFocus();
}
//do NOT edit this file, auto-generated by host_target.groovy from Target_Activity.java.template
public boolean dispatchKeyEvent(KeyEvent event) {
return false;
}
public boolean dispatchKeyShortcutEvent(KeyEvent event) {
return false;
}
public boolean dispatchTouchEvent(MotionEvent ev) {
return false;
}
public boolean dispatchTrackballEvent(MotionEvent ev) {
return false;
}
public boolean dispatchGenericMotionEvent(MotionEvent ev) {
return false;
}
public boolean dispatchPopulateAccessibilityEvent(AccessibilityEvent event) {
return false;
//do NOT edit this file, auto-generated by host_target.groovy from Target_Activity.java.template
}
public View onCreatePanelView(int featureId) {
return null;
}
public boolean onCreatePanelMenu(int featureId, Menu menu) {
return false;
}
public boolean onPreparePanel(int featureId, View view, Menu menu) {
return false;
}
public boolean onMenuOpened(int featureId, Menu menu) {
return false;
}
public boolean onMenuItemSelected(int featureId, MenuItem item) {
return false;
}
public void onPanelClosed(int featureId, Menu menu) {
}
public void invalidateOptionsMenu() {
//do NOT edit this file, auto-generated by host_target.groovy from Target_Activity.java.template
mHostActivity.invalidateOptionsMenu();
}
public boolean onCreateOptionsMenu(Menu menu) {
return false;
}
public boolean onPrepareOptionsMenu(Menu menu) {
return false;
}
public boolean onOptionsItemSelected(MenuItem item) {
return false;
}
public boolean onNavigateUp() {
return false;
}
public boolean onNavigateUpFromChild(Activity child) {
return false;
}
//do NOT edit this file, auto-generated by host_target.groovy from Target_Activity.java.template
public void onCreateNavigateUpTaskStack(TaskStackBuilder builder) {
}
public void onPrepareNavigateUpTaskStack(TaskStackBuilder builder) {
}
public void onOptionsMenuClosed(Menu menu) {
}
public void openOptionsMenu() {
mHostActivity.openOptionsMenu();
}
public void closeOptionsMenu() {
mHostActivity.closeOptionsMenu();
}
public void onCreateContextMenu(ContextMenu menu, View v,
ContextMenuInfo menuInfo) {
}
public void registerForContextMenu(View view) {
mHostActivity.registerForContextMenu(view);
//do NOT edit this file, auto-generated by host_target.groovy from Target_Activity.java.template
}
public void unregisterForContextMenu(View view) {
mHostActivity.unregisterForContextMenu(view);
}
public void openContextMenu(View view) {
}
public void closeContextMenu() {
mHostActivity.closeContextMenu();
}
public boolean onContextItemSelected(MenuItem item) {
// return super.onContextItemSelected(item);
return false;
}
public void onContextMenuClosed(Menu menu) {
}
public final void showDialog(int id) {
mHostActivity.showDialog(id, null);
}
public final boolean showDialog(int id, Bundle args) {
//do NOT edit this file, auto-generated by host_target.groovy from Target_Activity.java.template
return mHostActivity.showDialog(id, args);
}
public final void dismissDialog(int id) {
mHostActivity.dismissDialog(id);
}
public final void removeDialog(int id) {
mHostActivity.removeDialog(id);
}
protected Dialog onCreateDialog(int id) {
// return super.onCreateDialog(id);
return null;
}
protected Dialog onCreateDialog(int id, Bundle args) {
// return super.onCreateDialog(id, args);
return null;
}
protected void onPrepareDialog(int id, Dialog dialog) {
}
protected void onPrepareDialog(int id, Dialog dialog, Bundle args) {
}
//do NOT edit this file, auto-generated by host_target.groovy from Target_Activity.java.template
public boolean onSearchRequested() {
return false;
}
public void startSearch(String initialQuery, boolean selectInitialQuery,
Bundle appSearchData, boolean globalSearch) {
mHostActivity.startSearch(initialQuery, selectInitialQuery, appSearchData, globalSearch);
}
public void triggerSearch(String query, Bundle appSearchData) {
mHostActivity.triggerSearch(query, appSearchData);
}
public void takeKeyEvents(boolean get) {
mHostActivity.takeKeyEvents(get);
}
public LayoutInflater getLayoutInflater() {
return mHostActivity.getLayoutInflater();
}
//do NOT edit this file, auto-generated by host_target.groovy from Target_Activity.java.template
public MenuInflater getMenuInflater() {
return mHostActivity.getMenuInflater();
}
protected void onApplyThemeResource(Theme theme, int resid, boolean first) {
}
public void startActivityForResult(Intent intent, int requestCode) {
mHostActivity.startActivityForResult(intent, requestCode);
}
public void startActivityForResult(Intent intent, int requestCode,
Bundle options) {
mHostActivity.startActivityForResult(intent, requestCode, options);
}
public void startIntentSenderForResult(IntentSender intent,
int requestCode, Intent fillInIntent, int flagsMask,
int flagsValues, int extraFlags) throws SendIntentException {
mHostActivity.startIntentSenderForResult(intent, requestCode, fillInIntent, flagsMask,
flagsValues, extraFlags);
}
//do NOT edit this file, auto-generated by host_target.groovy from Target_Activity.java.template
public void startIntentSenderForResult(IntentSender intent,
int requestCode, Intent fillInIntent, int flagsMask,
int flagsValues, int extraFlags, Bundle options)
throws SendIntentException {
mHostActivity.startIntentSenderForResult(intent, requestCode, fillInIntent, flagsMask,
flagsValues, extraFlags, options);
}
public void startActivity(Intent intent) {
mHostActivity.startActivity(intent);
}
public void startActivity(Intent intent, Bundle options) {
mHostActivity.startActivity(intent, options);
}
public void startActivities(Intent[] intents) {
mHostActivity.startActivities(intents);
}
public void startActivities(Intent[] intents, Bundle options) {
mHostActivity.startActivities(intents, options);
//do NOT edit this file, auto-generated by host_target.groovy from Target_Activity.java.template
}
public void startIntentSender(IntentSender intent, Intent fillInIntent,
int flagsMask, int flagsValues, int extraFlags)
throws SendIntentException {
mHostActivity.startIntentSender(intent, fillInIntent, flagsMask, flagsValues,
extraFlags);
}
public void startIntentSender(IntentSender intent, Intent fillInIntent,
int flagsMask, int flagsValues, int extraFlags, Bundle options)
throws SendIntentException {
mHostActivity.startIntentSender(intent, fillInIntent, flagsMask, flagsValues,
extraFlags, options);
}
public boolean startActivityIfNeeded(Intent intent, int requestCode) {
return mHostActivity.startActivityIfNeeded(intent, requestCode);
}
public boolean startActivityIfNeeded(Intent intent, int requestCode,
Bundle options) {
return mHostActivity.startActivityIfNeeded(intent, requestCode, options);
//do NOT edit this file, auto-generated by host_target.groovy from Target_Activity.java.template
}
public boolean startNextMatchingActivity(Intent intent) {
return mHostActivity.startNextMatchingActivity(intent);
}
public boolean startNextMatchingActivity(Intent intent, Bundle options) {
return mHostActivity.startNextMatchingActivity(intent, options);
}
public void startActivityFromChild(Activity child, Intent intent,
int requestCode) {
mHostActivity.startActivityFromChild(child, intent, requestCode);
}
public void startActivityFromChild(Activity child, Intent intent,
int requestCode, Bundle options) {
mHostActivity.startActivityFromChild(child, intent, requestCode, options);
}
public void startActivityFromFragment(Fragment fragment, Intent intent,
int requestCode) {
//do NOT edit this file, auto-generated by host_target.groovy from Target_Activity.java.template
mHostActivity.startActivityFromFragment(fragment, intent, requestCode);
}
public void startActivityFromFragment(Fragment fragment, Intent intent,
int requestCode, Bundle options) {
mHostActivity.startActivityFromFragment(fragment, intent, requestCode, options);
}
public void startIntentSenderFromChild(Activity child, IntentSender intent,
int requestCode, Intent fillInIntent, int flagsMask,
int flagsValues, int extraFlags) throws SendIntentException {
mHostActivity.startIntentSenderFromChild(child, intent, requestCode, fillInIntent,
flagsMask, flagsValues, extraFlags);
}
public void startIntentSenderFromChild(Activity child, IntentSender intent,
int requestCode, Intent fillInIntent, int flagsMask,
int flagsValues, int extraFlags, Bundle options)
throws SendIntentException {
mHostActivity.startIntentSenderFromChild(child, intent, requestCode, fillInIntent,
flagsMask, flagsValues, extraFlags, options);
}
//do NOT edit this file, auto-generated by host_target.groovy from Target_Activity.java.template
public void overridePendingTransition(int enterAnim, int exitAnim) {
Log.w(TAG, "overridePendingTransition() is not impled.");
mHostActivity.overridePendingTransition(enterAnim, exitAnim);
}
public String getCallingPackage() {
return mHostActivity.getCallingPackage();
}
public ComponentName getCallingActivity() {
return mHostActivity.getCallingActivity();
}
public void setVisible(boolean visible) {
mHostActivity.setVisible(visible);
}
public boolean isFinishing() {
return mHostActivity.isFinishing();
}
public boolean isDestroyed() {
//do NOT edit this file, auto-generated by host_target.groovy from Target_Activity.java.template
return mHostActivity.isDestroyed();
}
public boolean isChangingConfigurations() {
return mHostActivity.isChangingConfigurations();
}
public void recreate() {
mHostActivity.recreate();
}
public void finish() {
mHostActivity.finish();
}
public void finishAffinity() {
mHostActivity.finishAffinity();
}
public void finishFromChild(Activity child) {
mHostActivity.finishFromChild(child);
}
public void finishAfterTransition() {
mHostActivity.finishAfterTransition();
}
//do NOT edit this file, auto-generated by host_target.groovy from Target_Activity.java.template
public void finishActivity(int requestCode) {
mHostActivity.finishActivity(requestCode);
}
public void finishActivityFromChild(Activity child, int requestCode) {
mHostActivity.finishActivityFromChild(child, requestCode);
}
public void finishAndRemoveTask() {
mHostActivity.finishAndRemoveTask();
}
public boolean releaseInstance() {
return mHostActivity.releaseInstance();
}
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
}
public void onActivityReenter(int resultCode, Intent data) {
}
public PendingIntent createPendingResult(int requestCode, Intent data,
int flags) {
return mHostActivity.createPendingResult(requestCode, data, flags);
}
//do NOT edit this file, auto-generated by host_target.groovy from Target_Activity.java.template
public void setRequestedOrientation(int requestedOrientation) {
mHostActivity.setRequestedOrientation(requestedOrientation);
}
public int getRequestedOrientation() {
return mHostActivity.getRequestedOrientation();
}
public int getTaskId() {
return mHostActivity.getTaskId();
}
public boolean isTaskRoot() {
return mHostActivity.isTaskRoot();
}
public boolean moveTaskToBack(boolean nonRoot) {
return mHostActivity.moveTaskToBack(nonRoot);
}
public String getLocalClassName() {
return mHostActivity.getLocalClassName();
}
public ComponentName getComponentName() {
return mHostActivity.getComponentName();
}
//do NOT edit this file, auto-generated by host_target.groovy from Target_Activity.java.template
public SharedPreferences getPreferences(int mode) {
return mHostActivity.getPreferences(mode);
}
public Object getSystemService(String name) {
return mHostActivity.getSystemService(name);
}
public void setTitle(CharSequence title) {
mHostActivity.setTitle(title);
}
public void setTitle(int titleId) {
mHostActivity.setTitle(titleId);
}
public void setTitleColor(int textColor) {
mHostActivity.setTitleColor(textColor);
}
protected void onTitleChanged(CharSequence title, int color) {
}
protected void onChildTitleChanged(Activity childActivity,
CharSequence title) {
//do NOT edit this file, auto-generated by host_target.groovy from Target_Activity.java.template
}
public void setTaskDescription(TaskDescription taskDescription) {
mHostActivity.setTaskDescription(taskDescription);
}
public View onCreateView(String name, Context context, AttributeSet attrs) {
return null;
}
public View onCreateView(View parent, String name, Context context,
AttributeSet attrs) {
return null;
}
public void dump(String prefix, FileDescriptor fd, PrintWriter writer,
String[] args) {
mHostActivity.dump(prefix, fd, writer, args);
}
public boolean isImmersive() {
return mHostActivity.isImmersive();
}
public boolean requestVisibleBehind(boolean visible) {
return mHostActivity.requestVisibleBehind(visible);
}
//do NOT edit this file, auto-generated by host_target.groovy from Target_Activity.java.template
public void onVisibleBehindCanceled() {
}
public void onEnterAnimationComplete() {
}
public void setImmersive(boolean i) {
mHostActivity.setImmersive(i);
}
public ActionMode startActionMode(Callback callback) {
return mHostActivity.startActionMode(callback);
}
public ActionMode onWindowStartingActionMode(Callback callback) {
return null;
}
public void onActionModeStarted(ActionMode mode) {
}
public void onActionModeFinished(ActionMode mode) {
}
public boolean shouldUpRecreateTask(Intent targetIntent) {
return mHostActivity.shouldUpRecreateTask(targetIntent);
//do NOT edit this file, auto-generated by host_target.groovy from Target_Activity.java.template
}
public boolean navigateUpTo(Intent upIntent) {
return mHostActivity.navigateUpTo(upIntent);
}
public boolean navigateUpToFromChild(Activity child, Intent upIntent) {
return mHostActivity.navigateUpToFromChild(child, upIntent);
}
public Intent getParentActivityIntent() {
return mHostActivity.getParentActivityIntent();
}
public void setEnterSharedElementCallback(SharedElementCallback callback) {
mHostActivity.setEnterSharedElementCallback(callback);
}
public void setExitSharedElementCallback(SharedElementCallback callback) {
mHostActivity.setExitSharedElementCallback(callback);
}
public void postponeEnterTransition() {
mHostActivity.postponeEnterTransition();
}
//do NOT edit this file, auto-generated by host_target.groovy from Target_Activity.java.template
public void startPostponedEnterTransition() {
mHostActivity.startPostponedEnterTransition();
}
public void startLockTask() {
mHostActivity.startLockTask();
}
public void stopLockTask() {
mHostActivity.stopLockTask();
}
// tag_start:ActivityGroup
// tag_end:ActivityGroup
// tag_start:ListActivity
// tag_end:ListActivity
// tag_start:FragmentActivity
// tag_end:FragmentActivity
// tag_start:PreferenceActivity
// tag_end:PreferenceActivity
// tag_start:ExpandableListActivity
// tag_end:ExpandableListActivity
//do NOT edit this file, auto-generated by host_target.groovy from Target_Activity.java.template
// tag_start:TabActivity
// tag_end:TabActivity
// tag_start:ActionBarActivity
// tag_end:ActionBarActivity
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.sql.analyzer;
import com.facebook.presto.Session;
import com.facebook.presto.metadata.FunctionRegistry;
import com.facebook.presto.metadata.Metadata;
import com.facebook.presto.metadata.OperatorNotFoundException;
import com.facebook.presto.metadata.OperatorType;
import com.facebook.presto.metadata.Signature;
import com.facebook.presto.security.AccessControl;
import com.facebook.presto.security.DenyAllAccessControl;
import com.facebook.presto.spi.PrestoException;
import com.facebook.presto.spi.StandardErrorCode;
import com.facebook.presto.spi.type.Type;
import com.facebook.presto.spi.type.TypeManager;
import com.facebook.presto.spi.type.TypeSignature;
import com.facebook.presto.spi.type.VarcharType;
import com.facebook.presto.sql.parser.SqlParser;
import com.facebook.presto.sql.planner.DependencyExtractor;
import com.facebook.presto.sql.planner.Symbol;
import com.facebook.presto.sql.tree.ArithmeticBinaryExpression;
import com.facebook.presto.sql.tree.ArithmeticUnaryExpression;
import com.facebook.presto.sql.tree.ArrayConstructor;
import com.facebook.presto.sql.tree.BetweenPredicate;
import com.facebook.presto.sql.tree.BinaryLiteral;
import com.facebook.presto.sql.tree.BooleanLiteral;
import com.facebook.presto.sql.tree.Cast;
import com.facebook.presto.sql.tree.CoalesceExpression;
import com.facebook.presto.sql.tree.ComparisonExpression;
import com.facebook.presto.sql.tree.CurrentTime;
import com.facebook.presto.sql.tree.DereferenceExpression;
import com.facebook.presto.sql.tree.DoubleLiteral;
import com.facebook.presto.sql.tree.Expression;
import com.facebook.presto.sql.tree.Extract;
import com.facebook.presto.sql.tree.FunctionCall;
import com.facebook.presto.sql.tree.GenericLiteral;
import com.facebook.presto.sql.tree.IfExpression;
import com.facebook.presto.sql.tree.InListExpression;
import com.facebook.presto.sql.tree.InPredicate;
import com.facebook.presto.sql.tree.InputReference;
import com.facebook.presto.sql.tree.IntervalLiteral;
import com.facebook.presto.sql.tree.IsNotNullPredicate;
import com.facebook.presto.sql.tree.IsNullPredicate;
import com.facebook.presto.sql.tree.LikePredicate;
import com.facebook.presto.sql.tree.LogicalBinaryExpression;
import com.facebook.presto.sql.tree.LongLiteral;
import com.facebook.presto.sql.tree.Node;
import com.facebook.presto.sql.tree.NotExpression;
import com.facebook.presto.sql.tree.NullIfExpression;
import com.facebook.presto.sql.tree.NullLiteral;
import com.facebook.presto.sql.tree.QualifiedName;
import com.facebook.presto.sql.tree.QualifiedNameReference;
import com.facebook.presto.sql.tree.Row;
import com.facebook.presto.sql.tree.SearchedCaseExpression;
import com.facebook.presto.sql.tree.SimpleCaseExpression;
import com.facebook.presto.sql.tree.SortItem;
import com.facebook.presto.sql.tree.StackableAstVisitor;
import com.facebook.presto.sql.tree.StringLiteral;
import com.facebook.presto.sql.tree.SubqueryExpression;
import com.facebook.presto.sql.tree.SubscriptExpression;
import com.facebook.presto.sql.tree.TimeLiteral;
import com.facebook.presto.sql.tree.TimestampLiteral;
import com.facebook.presto.sql.tree.WhenClause;
import com.facebook.presto.sql.tree.WindowFrame;
import com.facebook.presto.type.RowType;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Iterables;
import io.airlift.slice.SliceUtf8;
import javax.annotation.Nullable;
import java.util.ArrayList;
import java.util.IdentityHashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Optional;
import java.util.Set;
import java.util.function.Function;
import static com.facebook.presto.metadata.OperatorType.SUBSCRIPT;
import static com.facebook.presto.spi.type.BigintType.BIGINT;
import static com.facebook.presto.spi.type.BooleanType.BOOLEAN;
import static com.facebook.presto.spi.type.DateType.DATE;
import static com.facebook.presto.spi.type.DoubleType.DOUBLE;
import static com.facebook.presto.spi.type.IntervalDayTimeType.INTERVAL_DAY_TIME;
import static com.facebook.presto.spi.type.IntervalYearMonthType.INTERVAL_YEAR_MONTH;
import static com.facebook.presto.spi.type.TimeType.TIME;
import static com.facebook.presto.spi.type.TimeWithTimeZoneType.TIME_WITH_TIME_ZONE;
import static com.facebook.presto.spi.type.TimestampType.TIMESTAMP;
import static com.facebook.presto.spi.type.TimestampWithTimeZoneType.TIMESTAMP_WITH_TIME_ZONE;
import static com.facebook.presto.spi.type.TypeSignature.parseTypeSignature;
import static com.facebook.presto.spi.type.VarbinaryType.VARBINARY;
import static com.facebook.presto.spi.type.VarcharType.VARCHAR;
import static com.facebook.presto.sql.analyzer.SemanticErrorCode.AMBIGUOUS_ATTRIBUTE;
import static com.facebook.presto.sql.analyzer.SemanticErrorCode.EXPRESSION_NOT_CONSTANT;
import static com.facebook.presto.sql.analyzer.SemanticErrorCode.INVALID_LITERAL;
import static com.facebook.presto.sql.analyzer.SemanticErrorCode.MISSING_ATTRIBUTE;
import static com.facebook.presto.sql.analyzer.SemanticErrorCode.MULTIPLE_FIELDS_FROM_SUBQUERY;
import static com.facebook.presto.sql.analyzer.SemanticErrorCode.NOT_SUPPORTED;
import static com.facebook.presto.sql.analyzer.SemanticErrorCode.TYPE_MISMATCH;
import static com.facebook.presto.sql.tree.Extract.Field.TIMEZONE_HOUR;
import static com.facebook.presto.sql.tree.Extract.Field.TIMEZONE_MINUTE;
import static com.facebook.presto.type.ArrayParametricType.ARRAY;
import static com.facebook.presto.type.JsonType.JSON;
import static com.facebook.presto.type.RowType.RowField;
import static com.facebook.presto.type.TypeRegistry.canCoerce;
import static com.facebook.presto.type.TypeRegistry.getCommonSuperTypeSignature;
import static com.facebook.presto.type.UnknownType.UNKNOWN;
import static com.facebook.presto.util.DateTimeUtils.parseTimestampLiteral;
import static com.facebook.presto.util.DateTimeUtils.timeHasTimeZone;
import static com.facebook.presto.util.DateTimeUtils.timestampHasTimeZone;
import static com.facebook.presto.util.ImmutableCollectors.toImmutableList;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.collect.Sets.newIdentityHashSet;
import static java.lang.String.format;
import static java.util.Objects.requireNonNull;
public class ExpressionAnalyzer
{
private final FunctionRegistry functionRegistry;
private final TypeManager typeManager;
private final Function<Node, StatementAnalyzer> statementAnalyzerFactory;
private final IdentityHashMap<FunctionCall, Signature> resolvedFunctions = new IdentityHashMap<>();
private final IdentityHashMap<Expression, Integer> resolvedNames = new IdentityHashMap<>();
private final IdentityHashMap<Expression, Type> expressionTypes = new IdentityHashMap<>();
private final Set<SubqueryExpression> scalarSubqueries = newIdentityHashSet();
private final IdentityHashMap<Expression, Type> expressionCoercions = new IdentityHashMap<>();
private final Set<InPredicate> subqueryInPredicates = newIdentityHashSet();
private final Session session;
public ExpressionAnalyzer(FunctionRegistry functionRegistry, TypeManager typeManager, Function<Node, StatementAnalyzer> statementAnalyzerFactory, Session session)
{
this.functionRegistry = requireNonNull(functionRegistry, "functionRegistry is null");
this.typeManager = requireNonNull(typeManager, "typeManager is null");
this.statementAnalyzerFactory = requireNonNull(statementAnalyzerFactory, "statementAnalyzerFactory is null");
this.session = requireNonNull(session, "session is null");
}
public Map<Expression, Integer> getResolvedNames()
{
return resolvedNames;
}
public IdentityHashMap<FunctionCall, Signature> getResolvedFunctions()
{
return resolvedFunctions;
}
public IdentityHashMap<Expression, Type> getExpressionTypes()
{
return expressionTypes;
}
public IdentityHashMap<Expression, Type> getExpressionCoercions()
{
return expressionCoercions;
}
public Set<InPredicate> getSubqueryInPredicates()
{
return subqueryInPredicates;
}
public Set<Expression> getColumnReferences()
{
return ImmutableSet.copyOf(resolvedNames.keySet());
}
/**
* @param tupleDescriptor the tuple descriptor to use to resolve QualifiedNames
* @param context the namespace context of the surrounding query
*/
public Type analyze(Expression expression, RelationType tupleDescriptor, AnalysisContext context)
{
Visitor visitor = new Visitor(tupleDescriptor);
return visitor.process(expression, new StackableAstVisitor.StackableAstVisitorContext<>(context));
}
public Set<SubqueryExpression> getScalarSubqueries()
{
return scalarSubqueries;
}
private class Visitor
extends StackableAstVisitor<Type, AnalysisContext>
{
private final RelationType tupleDescriptor;
private Visitor(RelationType tupleDescriptor)
{
this.tupleDescriptor = requireNonNull(tupleDescriptor, "tupleDescriptor is null");
}
@SuppressWarnings("SuspiciousMethodCalls")
@Override
public Type process(Node node, @Nullable StackableAstVisitorContext<AnalysisContext> context)
{
// don't double process a node
Type type = expressionTypes.get(node);
if (type != null) {
return type;
}
return super.process(node, context);
}
@Override
protected Type visitRow(Row node, StackableAstVisitorContext<AnalysisContext> context)
{
List<Type> types = node.getItems().stream()
.map((child) -> process(child, context))
.collect(toImmutableList());
Type type = new RowType(types, Optional.empty());
expressionTypes.put(node, type);
return type;
}
@Override
protected Type visitCurrentTime(CurrentTime node, StackableAstVisitorContext<AnalysisContext> context)
{
if (node.getPrecision() != null) {
throw new SemanticException(NOT_SUPPORTED, node, "non-default precision not yet supported");
}
Type type;
switch (node.getType()) {
case DATE:
type = DATE;
break;
case TIME:
type = TIME_WITH_TIME_ZONE;
break;
case LOCALTIME:
type = TIME;
break;
case TIMESTAMP:
type = TIMESTAMP_WITH_TIME_ZONE;
break;
case LOCALTIMESTAMP:
type = TIMESTAMP;
break;
default:
throw new SemanticException(NOT_SUPPORTED, node, "%s not yet supported", node.getType().getName());
}
expressionTypes.put(node, type);
return type;
}
@Override
protected Type visitQualifiedNameReference(QualifiedNameReference node, StackableAstVisitorContext<AnalysisContext> context)
{
List<Field> matches = tupleDescriptor.resolveFields(node.getName());
if (matches.isEmpty()) {
throw createMissingAttributeException(node);
}
if (matches.size() > 1) {
throw new SemanticException(AMBIGUOUS_ATTRIBUTE, node, "Column '%s' is ambiguous", node.getName());
}
Field field = Iterables.getOnlyElement(matches);
int fieldIndex = tupleDescriptor.indexOf(field);
resolvedNames.put(node, fieldIndex);
expressionTypes.put(node, field.getType());
return field.getType();
}
@Override
protected Type visitDereferenceExpression(DereferenceExpression node, StackableAstVisitorContext<AnalysisContext> context)
{
QualifiedName qualifiedName = DereferenceExpression.getQualifiedName(node);
// If this Dereference looks like column reference, try match it to column first.
if (qualifiedName != null) {
List<Field> matches = tupleDescriptor.resolveFields(qualifiedName);
if (matches.size() > 1) {
throw new SemanticException(AMBIGUOUS_ATTRIBUTE, node, "Column '%s' is ambiguous", node);
}
if (matches.size() == 1) {
Field field = Iterables.getOnlyElement(matches);
int fieldIndex = tupleDescriptor.indexOf(field);
resolvedNames.put(node, fieldIndex);
expressionTypes.put(node, field.getType());
return field.getType();
}
assertColumnPrefix(qualifiedName, node);
}
Type baseType = process(node.getBase(), context);
if (!(baseType instanceof RowType)) {
throw new SemanticException(TYPE_MISMATCH, node.getBase(), "Expression %s is not of type ROW", node.getBase());
}
RowType rowType = (RowType) baseType;
Type rowFieldType = null;
for (RowField rowField : rowType.getFields()) {
if (rowField.getName().equals(Optional.of(node.getFieldName()))) {
rowFieldType = rowField.getType();
break;
}
}
if (rowFieldType == null) {
throw createMissingAttributeException(node);
}
expressionTypes.put(node, rowFieldType);
return rowFieldType;
}
private void assertColumnPrefix(QualifiedName qualifiedName, Expression node)
{
// Recursively check if its prefix is a column.
while (qualifiedName.getPrefix().isPresent()) {
qualifiedName = qualifiedName.getPrefix().get();
List<Field> matches = tupleDescriptor.resolveFields(qualifiedName);
if (!matches.isEmpty()) {
// The AMBIGUOUS_ATTRIBUTE exception will be thrown later with the right node if matches.size() > 1
return;
}
}
throw createMissingAttributeException(node);
}
private SemanticException createMissingAttributeException(Expression node)
{
return new SemanticException(MISSING_ATTRIBUTE, node, "Column '%s' cannot be resolved", node);
}
@Override
protected Type visitNotExpression(NotExpression node, StackableAstVisitorContext<AnalysisContext> context)
{
coerceType(context, node.getValue(), BOOLEAN, "Value of logical NOT expression");
expressionTypes.put(node, BOOLEAN);
return BOOLEAN;
}
@Override
protected Type visitLogicalBinaryExpression(LogicalBinaryExpression node, StackableAstVisitorContext<AnalysisContext> context)
{
coerceType(context, node.getLeft(), BOOLEAN, "Left side of logical expression");
coerceType(context, node.getRight(), BOOLEAN, "Right side of logical expression");
expressionTypes.put(node, BOOLEAN);
return BOOLEAN;
}
@Override
protected Type visitComparisonExpression(ComparisonExpression node, StackableAstVisitorContext<AnalysisContext> context)
{
OperatorType operatorType;
if (node.getType() == ComparisonExpression.Type.IS_DISTINCT_FROM) {
operatorType = OperatorType.EQUAL;
}
else {
operatorType = OperatorType.valueOf(node.getType().name());
}
return getOperator(context, node, operatorType, node.getLeft(), node.getRight());
}
@Override
protected Type visitIsNullPredicate(IsNullPredicate node, StackableAstVisitorContext<AnalysisContext> context)
{
process(node.getValue(), context);
expressionTypes.put(node, BOOLEAN);
return BOOLEAN;
}
@Override
protected Type visitIsNotNullPredicate(IsNotNullPredicate node, StackableAstVisitorContext<AnalysisContext> context)
{
process(node.getValue(), context);
expressionTypes.put(node, BOOLEAN);
return BOOLEAN;
}
@Override
protected Type visitNullIfExpression(NullIfExpression node, StackableAstVisitorContext<AnalysisContext> context)
{
Type firstType = process(node.getFirst(), context);
Type secondType = process(node.getSecond(), context);
if (!getCommonSuperTypeSignature(firstType.getTypeSignature(), secondType.getTypeSignature()).isPresent()) {
throw new SemanticException(TYPE_MISMATCH, node, "Types are not comparable with NULLIF: %s vs %s", firstType, secondType);
}
expressionTypes.put(node, firstType);
return firstType;
}
@Override
protected Type visitIfExpression(IfExpression node, StackableAstVisitorContext<AnalysisContext> context)
{
coerceType(context, node.getCondition(), BOOLEAN, "IF condition");
Type type;
if (node.getFalseValue().isPresent()) {
type = coerceToSingleType(context, node, "Result types for IF must be the same: %s vs %s", node.getTrueValue(), node.getFalseValue().get());
}
else {
type = process(node.getTrueValue(), context);
}
expressionTypes.put(node, type);
return type;
}
@Override
protected Type visitSearchedCaseExpression(SearchedCaseExpression node, StackableAstVisitorContext<AnalysisContext> context)
{
for (WhenClause whenClause : node.getWhenClauses()) {
coerceType(context, whenClause.getOperand(), BOOLEAN, "CASE WHEN clause");
}
Type type = coerceToSingleType(context,
"All CASE results must be the same type: %s",
getCaseResultExpressions(node.getWhenClauses(), node.getDefaultValue()));
expressionTypes.put(node, type);
for (WhenClause whenClause : node.getWhenClauses()) {
Type whenClauseType = process(whenClause.getResult(), context);
requireNonNull(whenClauseType, format("Expression types does not contain an entry for %s", whenClause));
expressionTypes.put(whenClause, whenClauseType);
}
return type;
}
@Override
protected Type visitSimpleCaseExpression(SimpleCaseExpression node, StackableAstVisitorContext<AnalysisContext> context)
{
for (WhenClause whenClause : node.getWhenClauses()) {
coerceToSingleType(context, whenClause, "CASE operand type does not match WHEN clause operand type: %s vs %s", node.getOperand(), whenClause.getOperand());
}
Type type = coerceToSingleType(context,
"All CASE results must be the same type: %s",
getCaseResultExpressions(node.getWhenClauses(), node.getDefaultValue()));
expressionTypes.put(node, type);
for (WhenClause whenClause : node.getWhenClauses()) {
Type whenClauseType = process(whenClause.getResult(), context);
requireNonNull(whenClauseType, format("Expression types does not contain an entry for %s", whenClause));
expressionTypes.put(whenClause, whenClauseType);
}
return type;
}
private List<Expression> getCaseResultExpressions(List<WhenClause> whenClauses, Optional<Expression> defaultValue)
{
List<Expression> resultExpressions = new ArrayList<>();
for (WhenClause whenClause : whenClauses) {
resultExpressions.add(whenClause.getResult());
}
defaultValue.ifPresent(resultExpressions::add);
return resultExpressions;
}
@Override
protected Type visitCoalesceExpression(CoalesceExpression node, StackableAstVisitorContext<AnalysisContext> context)
{
Type type = coerceToSingleType(context, "All COALESCE operands must be the same type: %s", node.getOperands());
expressionTypes.put(node, type);
return type;
}
@Override
protected Type visitArithmeticUnary(ArithmeticUnaryExpression node, StackableAstVisitorContext<AnalysisContext> context)
{
switch (node.getSign()) {
case PLUS:
Type type = process(node.getValue(), context);
if (!type.equals(BIGINT) && !type.equals(DOUBLE)) {
// TODO: figure out a type-agnostic way of dealing with this. Maybe add a special unary operator
// that types can chose to implement, or piggyback on the existence of the negation operator
throw new SemanticException(TYPE_MISMATCH, node, "Unary '+' operator cannot by applied to %s type", type);
}
expressionTypes.put(node, type);
return type;
case MINUS:
return getOperator(context, node, OperatorType.NEGATION, node.getValue());
}
throw new UnsupportedOperationException("Unsupported unary operator: " + node.getSign());
}
@Override
protected Type visitArithmeticBinary(ArithmeticBinaryExpression node, StackableAstVisitorContext<AnalysisContext> context)
{
return getOperator(context, node, OperatorType.valueOf(node.getType().name()), node.getLeft(), node.getRight());
}
@Override
protected Type visitLikePredicate(LikePredicate node, StackableAstVisitorContext<AnalysisContext> context)
{
Type valueType = getVarcharType(node.getValue(), context);
Type patternType = getVarcharType(node.getPattern(), context);
coerceType(context, node.getValue(), valueType, "Left side of LIKE expression");
coerceType(context, node.getPattern(), patternType, "Pattern for LIKE expression");
if (node.getEscape() != null) {
Type escapeType = getVarcharType(node.getEscape(), context);
coerceType(context, node.getEscape(), escapeType, "Escape for LIKE expression");
}
expressionTypes.put(node, BOOLEAN);
return BOOLEAN;
}
private Type getVarcharType(Expression value, StackableAstVisitorContext<AnalysisContext> context)
{
Type type = process(value, context);
if (!(type instanceof VarcharType)) {
return VARCHAR;
}
return type;
}
@Override
protected Type visitSubscriptExpression(SubscriptExpression node, StackableAstVisitorContext<AnalysisContext> context)
{
return getOperator(context, node, SUBSCRIPT, node.getBase(), node.getIndex());
}
@Override
protected Type visitArrayConstructor(ArrayConstructor node, StackableAstVisitorContext<AnalysisContext> context)
{
Type type = coerceToSingleType(context, "All ARRAY elements must be the same type: %s", node.getValues());
Type arrayType = typeManager.getParameterizedType(ARRAY.getName(), ImmutableList.of(type.getTypeSignature()), ImmutableList.of());
expressionTypes.put(node, arrayType);
return arrayType;
}
@Override
protected Type visitStringLiteral(StringLiteral node, StackableAstVisitorContext<AnalysisContext> context)
{
VarcharType type = VarcharType.createVarcharType(SliceUtf8.countCodePoints(node.getSlice()));
expressionTypes.put(node, type);
return type;
}
@Override
protected Type visitBinaryLiteral(BinaryLiteral node, StackableAstVisitorContext<AnalysisContext> context)
{
expressionTypes.put(node, VARBINARY);
return VARBINARY;
}
@Override
protected Type visitLongLiteral(LongLiteral node, StackableAstVisitorContext<AnalysisContext> context)
{
expressionTypes.put(node, BIGINT);
return BIGINT;
}
@Override
protected Type visitDoubleLiteral(DoubleLiteral node, StackableAstVisitorContext<AnalysisContext> context)
{
expressionTypes.put(node, DOUBLE);
return DOUBLE;
}
@Override
protected Type visitBooleanLiteral(BooleanLiteral node, StackableAstVisitorContext<AnalysisContext> context)
{
expressionTypes.put(node, BOOLEAN);
return BOOLEAN;
}
@Override
protected Type visitGenericLiteral(GenericLiteral node, StackableAstVisitorContext<AnalysisContext> context)
{
Type type = typeManager.getType(parseTypeSignature(node.getType()));
if (type == null) {
throw new SemanticException(TYPE_MISMATCH, node, "Unknown type: " + node.getType());
}
if (!JSON.equals(type)) {
try {
functionRegistry.getCoercion(VARCHAR, type);
}
catch (IllegalArgumentException e) {
throw new SemanticException(TYPE_MISMATCH, node, "No literal form for type %s", type);
}
}
expressionTypes.put(node, type);
return type;
}
@Override
protected Type visitTimeLiteral(TimeLiteral node, StackableAstVisitorContext<AnalysisContext> context)
{
Type type;
if (timeHasTimeZone(node.getValue())) {
type = TIME_WITH_TIME_ZONE;
}
else {
type = TIME;
}
expressionTypes.put(node, type);
return type;
}
@Override
protected Type visitTimestampLiteral(TimestampLiteral node, StackableAstVisitorContext<AnalysisContext> context)
{
try {
parseTimestampLiteral(session.getTimeZoneKey(), node.getValue());
}
catch (Exception e) {
throw new SemanticException(INVALID_LITERAL, node, "'%s' is not a valid timestamp literal", node.getValue());
}
Type type;
if (timestampHasTimeZone(node.getValue())) {
type = TIMESTAMP_WITH_TIME_ZONE;
}
else {
type = TIMESTAMP;
}
expressionTypes.put(node, type);
return type;
}
@Override
protected Type visitIntervalLiteral(IntervalLiteral node, StackableAstVisitorContext<AnalysisContext> context)
{
Type type;
if (node.isYearToMonth()) {
type = INTERVAL_YEAR_MONTH;
}
else {
type = INTERVAL_DAY_TIME;
}
expressionTypes.put(node, type);
return type;
}
@Override
protected Type visitNullLiteral(NullLiteral node, StackableAstVisitorContext<AnalysisContext> context)
{
expressionTypes.put(node, UNKNOWN);
return UNKNOWN;
}
@Override
protected Type visitFunctionCall(FunctionCall node, StackableAstVisitorContext<AnalysisContext> context)
{
if (node.getWindow().isPresent()) {
for (Expression expression : node.getWindow().get().getPartitionBy()) {
process(expression, context);
Type type = expressionTypes.get(expression);
if (!type.isComparable()) {
throw new SemanticException(TYPE_MISMATCH, node, "%s is not comparable, and therefore cannot be used in window function PARTITION BY", type);
}
}
for (SortItem sortItem : node.getWindow().get().getOrderBy()) {
process(sortItem.getSortKey(), context);
Type type = expressionTypes.get(sortItem.getSortKey());
if (!type.isOrderable()) {
throw new SemanticException(TYPE_MISMATCH, node, "%s is not orderable, and therefore cannot be used in window function ORDER BY", type);
}
}
if (node.getWindow().get().getFrame().isPresent()) {
WindowFrame frame = node.getWindow().get().getFrame().get();
if (frame.getStart().getValue().isPresent()) {
Type type = process(frame.getStart().getValue().get(), context);
if (!type.equals(BIGINT)) {
throw new SemanticException(TYPE_MISMATCH, node, "Window frame start value type must be BIGINT (actual %s)", type);
}
}
if (frame.getEnd().isPresent() && frame.getEnd().get().getValue().isPresent()) {
Type type = process(frame.getEnd().get().getValue().get(), context);
if (!type.equals(BIGINT)) {
throw new SemanticException(TYPE_MISMATCH, node, "Window frame end value type must be BIGINT (actual %s)", type);
}
}
}
}
ImmutableList.Builder<TypeSignature> argumentTypes = ImmutableList.builder();
for (Expression expression : node.getArguments()) {
argumentTypes.add(process(expression, context).getTypeSignature());
}
Signature function;
try {
function = functionRegistry.resolveFunction(node.getName(), argumentTypes.build(), context.getContext().isApproximate());
}
catch (PrestoException e) {
if (e.getErrorCode().getCode() == StandardErrorCode.FUNCTION_NOT_FOUND.toErrorCode().getCode()) {
throw new SemanticException(SemanticErrorCode.FUNCTION_NOT_FOUND, node, e.getMessage());
}
throw e;
}
for (int i = 0; i < node.getArguments().size(); i++) {
Expression expression = node.getArguments().get(i);
Type type = typeManager.getType(function.getArgumentTypes().get(i));
requireNonNull(type, format("Type %s not found", function.getArgumentTypes().get(i)));
if (node.isDistinct() && !type.isComparable()) {
throw new SemanticException(TYPE_MISMATCH, node, "DISTINCT can only be applied to comparable types (actual: %s)", type);
}
coerceType(context, expression, type, format("Function %s argument %d", function, i));
}
resolvedFunctions.put(node, function);
Type type = typeManager.getType(function.getReturnType());
expressionTypes.put(node, type);
return type;
}
@Override
protected Type visitExtract(Extract node, StackableAstVisitorContext<AnalysisContext> context)
{
Type type = process(node.getExpression(), context);
if (!isDateTimeType(type)) {
throw new SemanticException(TYPE_MISMATCH, node.getExpression(), "Type of argument to extract must be DATE, TIME, TIMESTAMP, or INTERVAL (actual %s)", type);
}
Extract.Field field = node.getField();
if ((field == TIMEZONE_HOUR || field == TIMEZONE_MINUTE) && !(type.equals(TIME_WITH_TIME_ZONE) || type.equals(TIMESTAMP_WITH_TIME_ZONE))) {
throw new SemanticException(TYPE_MISMATCH, node.getExpression(), "Type of argument to extract time zone field must have a time zone (actual %s)", type);
}
expressionTypes.put(node, BIGINT);
return BIGINT;
}
private boolean isDateTimeType(Type type)
{
return type.equals(DATE) ||
type.equals(TIME) ||
type.equals(TIME_WITH_TIME_ZONE) ||
type.equals(TIMESTAMP) ||
type.equals(TIMESTAMP_WITH_TIME_ZONE) ||
type.equals(INTERVAL_DAY_TIME) ||
type.equals(INTERVAL_YEAR_MONTH);
}
@Override
protected Type visitBetweenPredicate(BetweenPredicate node, StackableAstVisitorContext<AnalysisContext> context)
{
return getOperator(context, node, OperatorType.BETWEEN, node.getValue(), node.getMin(), node.getMax());
}
@Override
public Type visitCast(Cast node, StackableAstVisitorContext<AnalysisContext> context)
{
Type type = typeManager.getType(parseTypeSignature(node.getType()));
if (type == null) {
throw new SemanticException(TYPE_MISMATCH, node, "Unknown type: " + node.getType());
}
if (type.equals(UNKNOWN)) {
throw new SemanticException(TYPE_MISMATCH, node, "UNKNOWN is not a valid type");
}
Type value = process(node.getExpression(), context);
if (!value.equals(UNKNOWN) && !node.isTypeOnly()) {
try {
functionRegistry.getCoercion(value, type);
}
catch (OperatorNotFoundException e) {
throw new SemanticException(TYPE_MISMATCH, node, "Cannot cast %s to %s", value, type);
}
}
expressionTypes.put(node, type);
return type;
}
@Override
protected Type visitInPredicate(InPredicate node, StackableAstVisitorContext<AnalysisContext> context)
{
Expression value = node.getValue();
process(value, context);
Expression valueList = node.getValueList();
process(valueList, context);
if (valueList instanceof InListExpression) {
InListExpression inListExpression = (InListExpression) valueList;
coerceToSingleType(context,
"IN value and list items must be the same type: %s",
ImmutableList.<Expression>builder().add(value).addAll(inListExpression.getValues()).build());
}
else if (valueList instanceof SubqueryExpression) {
coerceToSingleType(context, node, "value and result of subquery must be of the same type for IN expression: %s vs %s", value, valueList);
}
expressionTypes.put(node, BOOLEAN);
return BOOLEAN;
}
@Override
protected Type visitInListExpression(InListExpression node, StackableAstVisitorContext<AnalysisContext> context)
{
Type type = coerceToSingleType(context, "All IN list values must be the same type: %s", node.getValues());
expressionTypes.put(node, type);
return type; // TODO: this really should a be relation type
}
@Override
protected Type visitSubqueryExpression(SubqueryExpression node, StackableAstVisitorContext<AnalysisContext> context)
{
StatementAnalyzer analyzer = statementAnalyzerFactory.apply(node);
RelationType descriptor = analyzer.process(node.getQuery(), context.getContext());
// Subquery should only produce one column
if (descriptor.getVisibleFieldCount() != 1) {
throw new SemanticException(MULTIPLE_FIELDS_FROM_SUBQUERY,
node,
"Multiple columns returned by subquery are not yet supported. Found %s",
descriptor.getVisibleFieldCount());
}
Optional<Node> previousNode = context.getPreviousNode();
if (previousNode.isPresent() && previousNode.get() instanceof InPredicate && ((InPredicate) previousNode.get()).getValue() != node) {
subqueryInPredicates.add((InPredicate) previousNode.get());
}
else {
scalarSubqueries.add(node);
}
Type type = Iterables.getOnlyElement(descriptor.getVisibleFields()).getType();
expressionTypes.put(node, type);
return type;
}
@Override
public Type visitInputReference(InputReference node, StackableAstVisitorContext<AnalysisContext> context)
{
Type type = tupleDescriptor.getFieldByIndex(node.getChannel()).getType();
expressionTypes.put(node, type);
return type;
}
@Override
protected Type visitExpression(Expression node, StackableAstVisitorContext<AnalysisContext> context)
{
throw new SemanticException(NOT_SUPPORTED, node, "not yet implemented: " + node.getClass().getName());
}
private Type getOperator(StackableAstVisitorContext<AnalysisContext> context, Expression node, OperatorType operatorType, Expression... arguments)
{
ImmutableList.Builder<Type> argumentTypes = ImmutableList.builder();
for (Expression expression : arguments) {
argumentTypes.add(process(expression, context));
}
Signature operatorSignature;
try {
operatorSignature = functionRegistry.resolveOperator(operatorType, argumentTypes.build());
}
catch (OperatorNotFoundException e) {
throw new SemanticException(TYPE_MISMATCH, node, "%s", e.getMessage());
}
for (int i = 0; i < arguments.length; i++) {
Expression expression = arguments[i];
Type type = typeManager.getType(operatorSignature.getArgumentTypes().get(i));
coerceType(context, expression, type, format("Operator %s argument %d", operatorSignature, i));
}
Type type = typeManager.getType(operatorSignature.getReturnType());
expressionTypes.put(node, type);
return type;
}
private void coerceType(StackableAstVisitorContext<AnalysisContext> context, Expression expression, Type expectedType, String message)
{
Type actualType = process(expression, context);
if (!actualType.equals(expectedType)) {
if (!canCoerce(actualType, expectedType)) {
throw new SemanticException(TYPE_MISMATCH, expression, message + " must evaluate to a %s (actual: %s)", expectedType, actualType);
}
expressionCoercions.put(expression, expectedType);
}
}
private Type coerceToSingleType(StackableAstVisitorContext<AnalysisContext> context, Node node, String message, Expression first, Expression second)
{
Type firstType = null;
if (first != null) {
firstType = process(first, context);
}
Type secondType = null;
if (second != null) {
secondType = process(second, context);
}
if (firstType == null) {
return secondType;
}
if (secondType == null) {
return firstType;
}
if (firstType.equals(secondType)) {
return firstType;
}
// coerce types if possible
if (canCoerce(firstType, secondType)) {
expressionCoercions.put(first, secondType);
return secondType;
}
if (canCoerce(secondType, firstType)) {
expressionCoercions.put(second, firstType);
return firstType;
}
throw new SemanticException(TYPE_MISMATCH, node, message, firstType, secondType);
}
private Type coerceToSingleType(StackableAstVisitorContext<AnalysisContext> context, String message, List<Expression> expressions)
{
// determine super type
Type superType = UNKNOWN;
for (Expression expression : expressions) {
Optional<Type> newSuperType = typeManager.getCommonSuperType(superType, process(expression, context));
if (!newSuperType.isPresent()) {
throw new SemanticException(TYPE_MISMATCH, expression, message, superType);
}
superType = newSuperType.get();
}
// verify all expressions can be coerced to the superType
for (Expression expression : expressions) {
Type type = process(expression, context);
if (!type.equals(superType)) {
if (!canCoerce(type, superType)) {
throw new SemanticException(TYPE_MISMATCH, expression, message, superType);
}
expressionCoercions.put(expression, superType);
}
}
return superType;
}
}
public static IdentityHashMap<Expression, Type> getExpressionTypes(
Session session,
Metadata metadata,
SqlParser sqlParser,
Map<Symbol, Type> types,
Expression expression)
{
return getExpressionTypes(session, metadata, sqlParser, types, ImmutableList.of(expression));
}
public static IdentityHashMap<Expression, Type> getExpressionTypes(
Session session,
Metadata metadata,
SqlParser sqlParser,
Map<Symbol, Type> types,
Iterable<? extends Expression> expressions)
{
return analyzeExpressionsWithSymbols(session, metadata, sqlParser, types, expressions).getExpressionTypes();
}
public static IdentityHashMap<Expression, Type> getExpressionTypesFromInput(
Session session,
Metadata metadata,
SqlParser sqlParser,
Map<Integer, Type> types,
Expression expression)
{
return getExpressionTypesFromInput(session, metadata, sqlParser, types, ImmutableList.of(expression));
}
public static IdentityHashMap<Expression, Type> getExpressionTypesFromInput(
Session session,
Metadata metadata,
SqlParser sqlParser,
Map<Integer, Type> types,
Iterable<? extends Expression> expressions)
{
return analyzeExpressionsWithInputs(session, metadata, sqlParser, types, expressions).getExpressionTypes();
}
public static ExpressionAnalysis analyzeExpressionsWithSymbols(
Session session,
Metadata metadata,
SqlParser sqlParser,
Map<Symbol, Type> types,
Iterable<? extends Expression> expressions)
{
List<Field> fields = DependencyExtractor.extractUnique(expressions).stream()
.map(symbol -> {
Type type = types.get(symbol);
checkArgument(type != null, "No type for symbol %s", symbol);
return Field.newUnqualified(symbol.getName(), type);
})
.collect(toImmutableList());
return analyzeExpressions(session, metadata, sqlParser, new RelationType(fields), expressions);
}
private static ExpressionAnalysis analyzeExpressionsWithInputs(
Session session,
Metadata metadata,
SqlParser sqlParser,
Map<Integer, Type> types,
Iterable<? extends Expression> expressions)
{
Field[] fields = new Field[types.size()];
for (Entry<Integer, Type> entry : types.entrySet()) {
fields[entry.getKey()] = Field.newUnqualified(Optional.empty(), entry.getValue());
}
RelationType tupleDescriptor = new RelationType(fields);
return analyzeExpressions(session, metadata, sqlParser, tupleDescriptor, expressions);
}
private static ExpressionAnalysis analyzeExpressions(
Session session,
Metadata metadata,
SqlParser sqlParser,
RelationType tupleDescriptor,
Iterable<? extends Expression> expressions)
{
// expressions at this point can not have sub queries so deny all access checks
// in the future, we will need a full access controller here to verify access to functions
ExpressionAnalyzer analyzer = create(new Analysis(), session, metadata, sqlParser, new DenyAllAccessControl(), false);
for (Expression expression : expressions) {
analyzer.analyze(expression, tupleDescriptor, new AnalysisContext());
}
return new ExpressionAnalysis(
analyzer.getExpressionTypes(),
analyzer.getExpressionCoercions(),
analyzer.getSubqueryInPredicates(),
analyzer.getScalarSubqueries(),
analyzer.getResolvedNames().keySet());
}
public static ExpressionAnalysis analyzeExpression(
Session session,
Metadata metadata,
AccessControl accessControl,
SqlParser sqlParser,
RelationType tupleDescriptor,
Analysis analysis,
boolean approximateQueriesEnabled,
AnalysisContext context,
Expression expression)
{
ExpressionAnalyzer analyzer = create(analysis, session, metadata, sqlParser, accessControl, approximateQueriesEnabled);
analyzer.analyze(expression, tupleDescriptor, context);
IdentityHashMap<Expression, Type> expressionTypes = analyzer.getExpressionTypes();
IdentityHashMap<Expression, Type> expressionCoercions = analyzer.getExpressionCoercions();
IdentityHashMap<FunctionCall, Signature> resolvedFunctions = analyzer.getResolvedFunctions();
analysis.addTypes(expressionTypes);
analysis.addCoercions(expressionCoercions);
analysis.addFunctionSignatures(resolvedFunctions);
analysis.addResolvedNames(analyzer.getResolvedNames());
return new ExpressionAnalysis(
expressionTypes,
expressionCoercions,
analyzer.getSubqueryInPredicates(),
analyzer.getScalarSubqueries(),
analyzer.getColumnReferences());
}
public static ExpressionAnalyzer create(
Analysis analysis,
Session session,
Metadata metadata,
SqlParser sqlParser,
AccessControl accessControl,
boolean experimentalSyntaxEnabled)
{
return new ExpressionAnalyzer(
metadata.getFunctionRegistry(),
metadata.getTypeManager(),
node -> new StatementAnalyzer(analysis, metadata, sqlParser, accessControl, session, experimentalSyntaxEnabled, Optional.empty()),
session);
}
public static ExpressionAnalyzer createConstantAnalyzer(Metadata metadata, Session session)
{
return createWithoutSubqueries(
metadata.getFunctionRegistry(),
metadata.getTypeManager(),
session,
EXPRESSION_NOT_CONSTANT,
"Constant expression cannot contain a subquery");
}
public static ExpressionAnalyzer createWithoutSubqueries(FunctionRegistry functionRegistry, TypeManager typeManager, Session session, SemanticErrorCode errorCode, String message)
{
return new ExpressionAnalyzer(functionRegistry, typeManager, node -> {
throw new SemanticException(errorCode, node, message);
}, session);
}
}
| |
/*
* The Alluxio Open Foundation licenses this work under the Apache License, version 2.0
* (the "License"). You may not use this work except in compliance with the License, which is
* available at www.apache.org/licenses/LICENSE-2.0
*
* This software is distributed on an "AS IS" basis, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied, as more fully set forth in the License.
*
* See the NOTICE file distributed with this work for information regarding copyright ownership.
*/
package alluxio.master;
import static alluxio.util.network.NetworkAddressUtils.ServiceType;
import alluxio.AlluxioURI;
import alluxio.executor.ExecutorServiceBuilder;
import alluxio.RuntimeConstants;
import alluxio.conf.PropertyKey;
import alluxio.conf.ServerConfiguration;
import alluxio.grpc.GrpcServer;
import alluxio.grpc.GrpcServerAddress;
import alluxio.grpc.GrpcServerBuilder;
import alluxio.grpc.GrpcService;
import alluxio.grpc.JournalDomain;
import alluxio.master.journal.DefaultJournalMaster;
import alluxio.master.journal.JournalMasterClientServiceHandler;
import alluxio.master.journal.JournalSystem;
import alluxio.master.journal.JournalUtils;
import alluxio.master.journal.raft.RaftJournalSystem;
import alluxio.metrics.MetricKey;
import alluxio.metrics.MetricsSystem;
import alluxio.resource.CloseableResource;
import alluxio.security.user.ServerUserState;
import alluxio.underfs.MasterUfsManager;
import alluxio.underfs.UnderFileSystem;
import alluxio.underfs.UnderFileSystemConfiguration;
import alluxio.util.CommonUtils;
import alluxio.util.CommonUtils.ProcessType;
import alluxio.util.JvmPauseMonitor;
import alluxio.util.URIUtils;
import alluxio.util.WaitForOptions;
import alluxio.util.network.NetworkAddressUtils;
import alluxio.web.MasterWebServer;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Preconditions;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.io.InputStream;
import java.net.InetSocketAddress;
import java.net.URI;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import javax.annotation.Nullable;
import javax.annotation.concurrent.NotThreadSafe;
import javax.annotation.concurrent.ThreadSafe;
/**
* An Alluxio Master which runs a web and rpc server to handle FileSystem operations.
*/
@NotThreadSafe
public class AlluxioMasterProcess extends MasterProcess {
private static final Logger LOG = LoggerFactory.getLogger(AlluxioMasterProcess.class);
/** The master registry. */
private final MasterRegistry mRegistry;
/** The JVMMonitor Progress. */
private JvmPauseMonitor mJvmPauseMonitor;
/** The connect address for the rpc server. */
final InetSocketAddress mRpcConnectAddress;
/** The manager of safe mode state. */
protected final SafeModeManager mSafeModeManager;
/** Master context. */
protected final MasterContext mContext;
/** The manager for creating and restoring backups. */
private final BackupManager mBackupManager;
/** The manager of all ufs. */
private final MasterUfsManager mUfsManager;
private AlluxioExecutorService mRPCExecutor = null;
/**
* Creates a new {@link AlluxioMasterProcess}.
*/
AlluxioMasterProcess(JournalSystem journalSystem) {
super(journalSystem, ServiceType.MASTER_RPC, ServiceType.MASTER_WEB);
mRpcConnectAddress = NetworkAddressUtils.getConnectAddress(ServiceType.MASTER_RPC,
ServerConfiguration.global());
try {
if (!mJournalSystem.isFormatted()) {
throw new RuntimeException(
String.format("Journal %s has not been formatted!", mJournalSystem));
}
// Create masters.
mRegistry = new MasterRegistry();
mSafeModeManager = new DefaultSafeModeManager();
mBackupManager = new BackupManager(mRegistry);
String baseDir = ServerConfiguration.get(PropertyKey.MASTER_METASTORE_DIR);
mUfsManager = new MasterUfsManager();
mContext = CoreMasterContext.newBuilder()
.setJournalSystem(mJournalSystem)
.setSafeModeManager(mSafeModeManager)
.setBackupManager(mBackupManager)
.setBlockStoreFactory(MasterUtils.getBlockStoreFactory(baseDir))
.setInodeStoreFactory(MasterUtils.getInodeStoreFactory(baseDir))
.setStartTimeMs(mStartTimeMs)
.setPort(NetworkAddressUtils
.getPort(ServiceType.MASTER_RPC, ServerConfiguration.global()))
.setUfsManager(mUfsManager)
.build();
MasterUtils.createMasters(mRegistry, mContext);
} catch (Exception e) {
throw new RuntimeException(e);
}
}
@Override
public <T extends Master> T getMaster(Class<T> clazz) {
return mRegistry.get(clazz);
}
/**
* @return true if Alluxio is running in safe mode, false otherwise
*/
public boolean isInSafeMode() {
return mSafeModeManager.isInSafeMode();
}
@Override
@Nullable
public InetSocketAddress getWebAddress() {
if (mWebServer != null) {
return new InetSocketAddress(mWebServer.getBindHost(), mWebServer.getLocalPort());
}
return null;
}
@Override
public InetSocketAddress getRpcAddress() {
return mRpcConnectAddress;
}
@Override
public void start() throws Exception {
LOG.info("Starting...");
mJournalSystem.start();
mJournalSystem.gainPrimacy();
startMasters(true);
startServing();
}
@Override
public void stop() throws Exception {
LOG.info("Stopping...");
stopRejectingServers();
stopServing();
mJournalSystem.stop();
closeMasters();
LOG.info("Stopped.");
}
private void initFromBackup(AlluxioURI backup) throws IOException {
CloseableResource<UnderFileSystem> ufsResource;
if (URIUtils.isLocalFilesystem(backup.toString())) {
UnderFileSystem ufs = UnderFileSystem.Factory.create("/",
UnderFileSystemConfiguration.defaults(ServerConfiguration.global()));
ufsResource = new CloseableResource<UnderFileSystem>(ufs) {
@Override
public void closeResource() { }
};
} else {
ufsResource = mUfsManager.getRoot().acquireUfsResource();
}
try (CloseableResource<UnderFileSystem> closeUfs = ufsResource;
InputStream ufsIn = closeUfs.get().open(backup.getPath())) {
LOG.info("Initializing metadata from backup {}", backup);
mBackupManager.initFromBackup(ufsIn);
}
}
/**
* Starts all masters, including block master, FileSystem master, and additional masters.
*
* @param isLeader if the Master is leader
*/
protected void startMasters(boolean isLeader) throws IOException {
LOG.info("Starting all masters as: {}.", (isLeader) ? "leader" : "follower");
if (isLeader) {
if (ServerConfiguration.isSet(PropertyKey.MASTER_JOURNAL_INIT_FROM_BACKUP)) {
AlluxioURI backup =
new AlluxioURI(ServerConfiguration.get(PropertyKey.MASTER_JOURNAL_INIT_FROM_BACKUP));
if (mJournalSystem.isEmpty()) {
initFromBackup(backup);
} else {
LOG.info("The journal system is not freshly formatted, skipping restoring backup from "
+ backup);
}
}
mSafeModeManager.notifyPrimaryMasterStarted();
} else {
startRejectingServers();
}
mRegistry.start(isLeader);
// Signal state-lock-manager that masters are ready.
mContext.getStateLockManager().mastersStartedCallback();
LOG.info("All masters started.");
}
/**
* Stops all masters, including block master, fileSystem master and additional masters.
*/
protected void stopMasters() {
try {
LOG.info("Stopping all masters.");
mRegistry.stop();
LOG.info("All masters stopped.");
} catch (IOException e) {
throw new RuntimeException(e);
}
}
/**
* Closes all masters, including block master, fileSystem master and additional masters.
*/
protected void closeMasters() {
try {
LOG.info("Closing all masters.");
mRegistry.close();
LOG.info("Closed all masters.");
} catch (IOException e) {
throw new RuntimeException(e);
}
}
/**
* Starts serving web ui server, resetting master web port, adding the metrics servlet to the web
* server and starting web ui.
*/
protected void startServingWebServer() {
LOG.info("Alluxio master web server version {}. webAddress={}",
RuntimeConstants.VERSION, mWebBindAddress);
stopRejectingWebServer();
mWebServer =
new MasterWebServer(ServiceType.MASTER_WEB.getServiceName(), mWebBindAddress, this);
// reset master web port
// start web ui
mWebServer.start();
}
/**
* Starts jvm monitor process, to monitor jvm.
*/
protected void startJvmMonitorProcess() {
if (ServerConfiguration.getBoolean(PropertyKey.MASTER_JVM_MONITOR_ENABLED)) {
mJvmPauseMonitor = new JvmPauseMonitor(
ServerConfiguration.getMs(PropertyKey.JVM_MONITOR_SLEEP_INTERVAL_MS),
ServerConfiguration.getMs(PropertyKey.JVM_MONITOR_WARN_THRESHOLD_MS),
ServerConfiguration.getMs(PropertyKey.JVM_MONITOR_INFO_THRESHOLD_MS));
mJvmPauseMonitor.start();
MetricsSystem.registerGaugeIfAbsent(
MetricsSystem.getMetricName(MetricKey.TOTAL_EXTRA_TIME.getName()),
mJvmPauseMonitor::getTotalExtraTime);
MetricsSystem.registerGaugeIfAbsent(
MetricsSystem.getMetricName(MetricKey.INFO_TIME_EXCEEDED.getName()),
mJvmPauseMonitor::getInfoTimeExceeded);
MetricsSystem.registerGaugeIfAbsent(
MetricsSystem.getMetricName(MetricKey.WARN_TIME_EXCEEDED.getName()),
mJvmPauseMonitor::getWarnTimeExceeded);
}
}
/**
* Starts serving, letting {@link MetricsSystem} start sink and starting the web ui server and RPC
* Server.
*
* @param startMessage empty string or the message that the master gains the leadership
* @param stopMessage empty string or the message that the master loses the leadership
*/
protected void startServing(String startMessage, String stopMessage) {
// start all common services for non-ha master or leader master
startCommonServices();
startJvmMonitorProcess();
startLeaderServing(startMessage, stopMessage);
}
protected void startLeaderServing(String startMessage, String stopMessage) {
startServingRPCServer();
LOG.info(
"Alluxio master version {} started{}. bindAddress={}, connectAddress={}, webAddress={}",
RuntimeConstants.VERSION, startMessage, mRpcBindAddress, mRpcConnectAddress,
mWebBindAddress);
// Blocks until RPC server is shut down. (via #stopServing)
mGrpcServer.awaitTermination();
LOG.info("Alluxio master ended {}", stopMessage);
}
/**
* Entrance of the services that can run whether the master state is the primary or standby.
*/
protected void startCommonServices() {
MetricsSystem.startSinks(
ServerConfiguration.get(PropertyKey.METRICS_CONF_FILE));
startServingWebServer();
}
/**
* Starts the gRPC server. The AlluxioMaster registers the Services of registered
* {@link Master}s and meta services.
*/
protected void startServingRPCServer() {
stopRejectingRpcServer();
LOG.info("Starting gRPC server on address:{}", mRpcBindAddress);
mGrpcServer = createRPCServer();
try {
// Start serving.
mGrpcServer.start();
mSafeModeManager.notifyRpcServerStarted();
// Acquire and log bind port from newly started server.
InetSocketAddress listeningAddress = InetSocketAddress
.createUnresolved(mRpcBindAddress.getHostName(), mGrpcServer.getBindPort());
LOG.info("gRPC server listening on: {}", listeningAddress);
} catch (IOException e) {
LOG.error("gRPC serving failed.", e);
throw new RuntimeException("gRPC serving failed");
}
}
private GrpcServer createRPCServer() {
// Create an executor for Master RPC server.
mRPCExecutor = ExecutorServiceBuilder.buildExecutorService(
ExecutorServiceBuilder.RpcExecutorHost.MASTER);
MetricsSystem.registerGaugeIfAbsent(MetricKey.MASTER_RPC_QUEUE_LENGTH.getName(),
mRPCExecutor::getRpcQueueLength);
// Create underlying gRPC server.
GrpcServerBuilder builder = GrpcServerBuilder
.forAddress(GrpcServerAddress.create(mRpcConnectAddress.getHostName(), mRpcBindAddress),
ServerConfiguration.global(), ServerUserState.global())
.executor(mRPCExecutor)
.flowControlWindow(
(int) ServerConfiguration.getBytes(PropertyKey.MASTER_NETWORK_FLOWCONTROL_WINDOW))
.keepAliveTime(
ServerConfiguration.getMs(PropertyKey.MASTER_NETWORK_KEEPALIVE_TIME_MS),
TimeUnit.MILLISECONDS)
.keepAliveTimeout(
ServerConfiguration.getMs(PropertyKey.MASTER_NETWORK_KEEPALIVE_TIMEOUT_MS),
TimeUnit.MILLISECONDS)
.permitKeepAlive(
ServerConfiguration.getMs(PropertyKey.MASTER_NETWORK_PERMIT_KEEPALIVE_TIME_MS),
TimeUnit.MILLISECONDS)
.maxInboundMessageSize((int) ServerConfiguration.getBytes(
PropertyKey.MASTER_NETWORK_MAX_INBOUND_MESSAGE_SIZE));
// Bind manifests of each Alluxio master to RPC server.
for (Master master : mRegistry.getServers()) {
registerServices(builder, master.getServices());
}
// Bind manifest of Alluxio JournalMaster service.
// TODO(ggezer) Merge this with registerServices() logic.
builder.addService(alluxio.grpc.ServiceType.JOURNAL_MASTER_CLIENT_SERVICE,
new GrpcService(new JournalMasterClientServiceHandler(
new DefaultJournalMaster(JournalDomain.MASTER, mJournalSystem))));
// Builds a server that is not started yet.
return builder.build();
}
protected void stopLeaderServing() {
if (isGrpcServing()) {
if (!mGrpcServer.shutdown()) {
LOG.warn("Alluxio master RPC server shutdown timed out.");
}
}
if (mRPCExecutor != null) {
mRPCExecutor.shutdownNow();
try {
mRPCExecutor.awaitTermination(
ServerConfiguration.getMs(PropertyKey.NETWORK_CONNECTION_SERVER_SHUTDOWN_TIMEOUT),
TimeUnit.MILLISECONDS);
} catch (InterruptedException ie) {
Thread.currentThread().interrupt();
}
}
}
protected void stopCommonServices() throws Exception {
MetricsSystem.stopSinks();
stopServingWebServer();
}
/**
* Stops all services.
*/
protected void stopServing() throws Exception {
stopLeaderServing();
stopCommonServices();
stopJvmMonitorProcess();
}
protected void stopServingWebServer() throws Exception {
if (mWebServer != null) {
mWebServer.stop();
mWebServer = null;
}
}
protected void stopJvmMonitorProcess() {
if (mJvmPauseMonitor != null) {
mJvmPauseMonitor.stop();
}
}
/**
* Waits until the web server is ready to serve requests.
*
* @param timeoutMs how long to wait in milliseconds
* @return whether the web server became ready before the specified timeout
*/
@VisibleForTesting
public boolean waitForWebServerReady(int timeoutMs) {
try {
CommonUtils.waitFor(this + " to start",
this::isWebServing, WaitForOptions.defaults().setTimeoutMs(timeoutMs));
return true;
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
return false;
} catch (TimeoutException e) {
return false;
}
}
@Override
public String toString() {
return "Alluxio master @" + mRpcConnectAddress;
}
/**
* Factory for creating {@link AlluxioMasterProcess}.
*/
@ThreadSafe
public static final class Factory {
/**
* Creates a new {@link AlluxioMasterProcess}.
*
* @return a new instance of {@link MasterProcess} using the given sockets for the master
*/
public static AlluxioMasterProcess create() {
URI journalLocation = JournalUtils.getJournalLocation();
JournalSystem journalSystem = new JournalSystem.Builder()
.setLocation(journalLocation).build(ProcessType.MASTER);
if (ServerConfiguration.getBoolean(PropertyKey.ZOOKEEPER_ENABLED)) {
Preconditions.checkState(!(journalSystem instanceof RaftJournalSystem),
"Raft-based embedded journal and Zookeeper cannot be used at the same time.");
PrimarySelector primarySelector = PrimarySelector.Factory.createZkPrimarySelector();
return new FaultTolerantAlluxioMasterProcess(journalSystem, primarySelector);
} else if (journalSystem instanceof RaftJournalSystem) {
PrimarySelector primarySelector = ((RaftJournalSystem) journalSystem).getPrimarySelector();
return new FaultTolerantAlluxioMasterProcess(journalSystem, primarySelector);
}
return new AlluxioMasterProcess(journalSystem);
}
private Factory() {} // prevent instantiation
}
}
| |
package net.deltaplay.tweener;
import com.badlogic.gdx.math.Interpolation;
import com.badlogic.gdx.utils.GdxRuntimeException;
import net.deltaplay.tweener.Tweener.TweenAccessor;
import java.util.Arrays;
public class ValueTween extends TimeTween<ValueTween> {
private int size = 0;
private float[] from;
private float[] to;
private float[] current;
private float[] start;
private boolean hasFrom, hasTo, isRelative;
private boolean initialized;
private TweenAccessor<Object> accessor;
private Interpolation interpolation = Interpolation.linear;
private Object object;
public ValueTween() {
}
ValueTween size(int size) {
if (this.size != size || this.from == null) {
this.size = size;
from = new float[size];
to = new float[size];
current = new float[size];
start = new float[size];
}
return this;
}
ValueTween object(Object object) {
this.object = object;
return this;
}
private void initialize() {
if (!hasTo) {
accessor.get(object, to);
} else if (!hasFrom) {
accessor.get(object, from);
}
accessor.get(object, start);
initialized = true;
}
public ValueTween from(float v1) {
if (accessor.getCount() != 1)
throw new GdxRuntimeException("Wrong value count!");
from[0] = v1;
hasFrom = true;
return this;
}
public ValueTween from(float v1, float v2) {
if (accessor.getCount() != 2)
throw new GdxRuntimeException("Wrong value count!");
from[0] = v1;
from[1] = v2;
hasFrom = true;
return this;
}
public ValueTween from(float v1, float v2, float v3) {
if (accessor.getCount() != 3)
throw new GdxRuntimeException("Wrong value count!");
from[0] = v1;
from[1] = v2;
from[1] = v3;
hasFrom = true;
return this;
}
public ValueTween from(float... values) {
if (values.length != accessor.getCount())
throw new GdxRuntimeException("Wrong value count!");
System.arraycopy(values, 0, from, 0, accessor.getCount());
hasFrom = true;
return this;
}
public ValueTween to(float v1) {
if (accessor.getCount() != 1)
throw new GdxRuntimeException("Wrong value count!");
to[0] = v1;
hasTo = true;
return this;
}
public ValueTween to(float v1, float v2) {
if (accessor.getCount() != 2)
throw new GdxRuntimeException("Wrong value count!");
to[0] = v1;
to[1] = v2;
hasTo = true;
return this;
}
public ValueTween to(float v1, float v2, float v3) {
if (accessor.getCount() != 3)
throw new GdxRuntimeException("Wrong value count!");
to[0] = v1;
to[1] = v2;
to[1] = v3;
hasTo = true;
return this;
}
public ValueTween to(float... values) {
if (values.length != accessor.getCount())
throw new GdxRuntimeException("Wrong value count!");
System.arraycopy(values, 0, to, 0, accessor.getCount());
hasTo = true;
return this;
}
public ValueTween relative() {
this.isRelative = true;
return this;
}
ValueTween accessor(TweenAccessor accessor) {
this.accessor = (TweenAccessor<Object>) accessor;
return this;
}
public ValueTween interp(Interpolation interpolation) {
this.interpolation = interpolation;
return this;
}
public void update(float delta) {
super.update(delta);
if (!initialized) initialize();
float percent = duration == 0 ? 1f : Math.min(1f, time / duration);
if (interpolation != null) percent = interpolation.apply(percent);
accessor.get(object, current);
float absFrom, absTo;
for (int i = 0; i < accessor.getCount(); i++) {
absFrom = from[i];
absTo = to[i];
if (isRelative) {
if (hasFrom) {
absFrom += start[i];
}
if (hasTo) {
absTo += start[i];
}
}
current[i] = absFrom + (absTo - absFrom) * percent;
}
accessor.set(object, current);
}
@Override
public ValueTween getThis() {
return this;
}
@Override
public void restart() {
super.restart();
initialized = false;
}
@Override
public void reset() {
super.reset();
accessor = null;
object = null;
initialized = false;
isRelative = false;
hasFrom = false;
hasTo = false;
interpolation = null;
Arrays.fill(from, 0);
Arrays.fill(to, 0);
Arrays.fill(current, 0);
Arrays.fill(start, 0);
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.activemq.broker;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.TimeUnit;
import javax.jms.DeliveryMode;
import junit.framework.Test;
import org.apache.activemq.command.ActiveMQDestination;
import org.apache.activemq.command.ActiveMQQueue;
import org.apache.activemq.command.ActiveMQTopic;
import org.apache.activemq.command.ConnectionInfo;
import org.apache.activemq.command.ConsumerInfo;
import org.apache.activemq.command.LocalTransactionId;
import org.apache.activemq.command.Message;
import org.apache.activemq.command.MessageAck;
import org.apache.activemq.command.ProducerInfo;
import org.apache.activemq.command.SessionInfo;
public class BrokerTest extends BrokerTestSupport {
public ActiveMQDestination destination;
public int deliveryMode;
public int prefetch;
public byte destinationType;
public boolean durableConsumer;
protected static final int MAX_NULL_WAIT=500;
public void initCombosForTestQueueOnlyOnceDeliveryWith2Consumers() {
addCombinationValues("deliveryMode", new Object[] {Integer.valueOf(DeliveryMode.NON_PERSISTENT),
Integer.valueOf(DeliveryMode.PERSISTENT)});
}
public void testQueueOnlyOnceDeliveryWith2Consumers() throws Exception {
ActiveMQDestination destination = new ActiveMQQueue("TEST");
// Setup a first connection
StubConnection connection1 = createConnection();
ConnectionInfo connectionInfo1 = createConnectionInfo();
SessionInfo sessionInfo1 = createSessionInfo(connectionInfo1);
ProducerInfo producerInfo = createProducerInfo(sessionInfo1);
connection1.send(connectionInfo1);
connection1.send(sessionInfo1);
connection1.send(producerInfo);
ConsumerInfo consumerInfo1 = createConsumerInfo(sessionInfo1, destination);
consumerInfo1.setPrefetchSize(1);
connection1.request(consumerInfo1);
// Setup a second connection
StubConnection connection2 = createConnection();
ConnectionInfo connectionInfo2 = createConnectionInfo();
SessionInfo sessionInfo2 = createSessionInfo(connectionInfo2);
ConsumerInfo consumerInfo2 = createConsumerInfo(sessionInfo2, destination);
consumerInfo2.setPrefetchSize(1);
connection2.send(connectionInfo2);
connection2.send(sessionInfo2);
connection2.request(consumerInfo2);
// Send the messages
connection1.send(createMessage(producerInfo, destination, deliveryMode));
connection1.send(createMessage(producerInfo, destination, deliveryMode));
connection1.send(createMessage(producerInfo, destination, deliveryMode));
connection1.request(createMessage(producerInfo, destination, deliveryMode));
for (int i = 0; i < 2; i++) {
Message m1 = receiveMessage(connection1);
Message m2 = receiveMessage(connection2);
assertNotNull("m1 is null for index: " + i, m1);
assertNotNull("m2 is null for index: " + i, m2);
assertNotSame(m1.getMessageId(), m2.getMessageId());
connection1.send(createAck(consumerInfo1, m1, 1, MessageAck.STANDARD_ACK_TYPE));
connection2.send(createAck(consumerInfo2, m2, 1, MessageAck.STANDARD_ACK_TYPE));
}
assertNoMessagesLeft(connection1);
assertNoMessagesLeft(connection2);
}
public void initCombosForTestQueueBrowserWith2Consumers() {
addCombinationValues("deliveryMode", new Object[] {Integer.valueOf(DeliveryMode.NON_PERSISTENT),
Integer.valueOf(DeliveryMode.PERSISTENT)});
}
public void testQueueBrowserWith2Consumers() throws Exception {
ActiveMQDestination destination = new ActiveMQQueue("TEST");
// Setup a first connection
StubConnection connection1 = createConnection();
ConnectionInfo connectionInfo1 = createConnectionInfo();
SessionInfo sessionInfo1 = createSessionInfo(connectionInfo1);
ProducerInfo producerInfo = createProducerInfo(sessionInfo1);
connection1.send(connectionInfo1);
connection1.send(sessionInfo1);
connection1.send(producerInfo);
ConsumerInfo consumerInfo1 = createConsumerInfo(sessionInfo1, destination);
consumerInfo1.setPrefetchSize(10);
connection1.request(consumerInfo1);
// Send the messages
connection1.send(createMessage(producerInfo, destination, deliveryMode));
connection1.send(createMessage(producerInfo, destination, deliveryMode));
connection1.send(createMessage(producerInfo, destination, deliveryMode));
//as the messages are sent async - need to synchronize the last
//one to ensure they arrive in the order we want
connection1.request(createMessage(producerInfo, destination, deliveryMode));
// Setup a second connection with a queue browser.
StubConnection connection2 = createConnection();
ConnectionInfo connectionInfo2 = createConnectionInfo();
SessionInfo sessionInfo2 = createSessionInfo(connectionInfo2);
ConsumerInfo consumerInfo2 = createConsumerInfo(sessionInfo2, destination);
consumerInfo2.setPrefetchSize(1);
consumerInfo2.setBrowser(true);
connection2.send(connectionInfo2);
connection2.send(sessionInfo2);
connection2.request(consumerInfo2);
List<Message> messages = new ArrayList<Message>();
for (int i = 0; i < 4; i++) {
Message m1 = receiveMessage(connection1);
assertNotNull("m1 is null for index: " + i, m1);
messages.add(m1);
}
for (int i = 0; i < 4; i++) {
Message m1 = messages.get(i);
Message m2 = receiveMessage(connection2);
assertNotNull("m2 is null for index: " + i, m2);
assertEquals(m1.getMessageId(), m2.getMessageId());
connection2.send(createAck(consumerInfo2, m2, 1, MessageAck.DELIVERED_ACK_TYPE));
}
assertNoMessagesLeft(connection1);
assertNoMessagesLeft(connection2);
}
/*
* change the order of the above test
*/
public void testQueueBrowserWith2ConsumersBrowseFirst() throws Exception {
ActiveMQDestination destination = new ActiveMQQueue("TEST");
deliveryMode = DeliveryMode.NON_PERSISTENT;
// Setup a second connection with a queue browser.
StubConnection connection2 = createConnection();
ConnectionInfo connectionInfo2 = createConnectionInfo();
SessionInfo sessionInfo2 = createSessionInfo(connectionInfo2);
ConsumerInfo consumerInfo2 = createConsumerInfo(sessionInfo2, destination);
consumerInfo2.setPrefetchSize(10);
consumerInfo2.setBrowser(true);
connection2.send(connectionInfo2);
connection2.send(sessionInfo2);
connection2.request(consumerInfo2);
// Setup a first connection
StubConnection connection1 = createConnection();
ConnectionInfo connectionInfo1 = createConnectionInfo();
SessionInfo sessionInfo1 = createSessionInfo(connectionInfo1);
ProducerInfo producerInfo = createProducerInfo(sessionInfo1);
connection1.send(connectionInfo1);
connection1.send(sessionInfo1);
connection1.send(producerInfo);
ConsumerInfo consumerInfo1 = createConsumerInfo(sessionInfo1, destination);
consumerInfo1.setPrefetchSize(10);
connection1.request(consumerInfo1);
// Send the messages
connection1.send(createMessage(producerInfo, destination, deliveryMode));
connection1.send(createMessage(producerInfo, destination, deliveryMode));
connection1.send(createMessage(producerInfo, destination, deliveryMode));
//as the messages are sent async - need to synchronize the last
//one to ensure they arrive in the order we want
connection1.request(createMessage(producerInfo, destination, deliveryMode));
List<Message> messages = new ArrayList<Message>();
for (int i = 0; i < 4; i++) {
Message m1 = receiveMessage(connection1);
assertNotNull("m1 is null for index: " + i, m1);
messages.add(m1);
}
// no messages present in queue browser as there were no messages when it
// was created
assertNoMessagesLeft(connection1);
assertNoMessagesLeft(connection2);
}
public void testQueueBrowserWith2ConsumersInterleaved() throws Exception {
ActiveMQDestination destination = new ActiveMQQueue("TEST");
deliveryMode = DeliveryMode.NON_PERSISTENT;
// Setup a first connection
StubConnection connection1 = createConnection();
ConnectionInfo connectionInfo1 = createConnectionInfo();
SessionInfo sessionInfo1 = createSessionInfo(connectionInfo1);
ProducerInfo producerInfo = createProducerInfo(sessionInfo1);
connection1.send(connectionInfo1);
connection1.send(sessionInfo1);
connection1.send(producerInfo);
ConsumerInfo consumerInfo1 = createConsumerInfo(sessionInfo1, destination);
consumerInfo1.setPrefetchSize(10);
connection1.request(consumerInfo1);
// Send the messages
connection1.request(createMessage(producerInfo, destination, deliveryMode));
// Setup a second connection with a queue browser.
StubConnection connection2 = createConnection();
ConnectionInfo connectionInfo2 = createConnectionInfo();
SessionInfo sessionInfo2 = createSessionInfo(connectionInfo2);
ConsumerInfo consumerInfo2 = createConsumerInfo(sessionInfo2, destination);
consumerInfo2.setPrefetchSize(1);
consumerInfo2.setBrowser(true);
connection2.send(connectionInfo2);
connection2.send(sessionInfo2);
connection2.request(consumerInfo2);
connection1.send(createMessage(producerInfo, destination, deliveryMode));
connection1.send(createMessage(producerInfo, destination, deliveryMode));
//as the messages are sent async - need to synchronize the last
//one to ensure they arrive in the order we want
connection1.request(createMessage(producerInfo, destination, deliveryMode));
List<Message> messages = new ArrayList<Message>();
for (int i = 0; i < 4; i++) {
Message m1 = receiveMessage(connection1);
assertNotNull("m1 is null for index: " + i, m1);
messages.add(m1);
}
for (int i = 0; i < 4; i++) {
Message m1 = messages.get(i);
Message m2 = receiveMessage(connection2);
assertNotNull("m2 is null for index: " + i, m2);
assertEquals(m1.getMessageId(), m2.getMessageId());
connection2.send(createAck(consumerInfo2, m2, 1, MessageAck.DELIVERED_ACK_TYPE));
}
assertNoMessagesLeft(connection1);
assertNoMessagesLeft(connection2);
}
public void initCombosForTestConsumerPrefetchAndStandardAck() {
addCombinationValues("deliveryMode", new Object[] {
// Integer.valueOf(DeliveryMode.NON_PERSISTENT),
Integer.valueOf(DeliveryMode.PERSISTENT)});
addCombinationValues("destinationType",
new Object[] {Byte.valueOf(ActiveMQDestination.QUEUE_TYPE),
Byte.valueOf(ActiveMQDestination.TOPIC_TYPE),
Byte.valueOf(ActiveMQDestination.TEMP_QUEUE_TYPE),
Byte.valueOf(ActiveMQDestination.TEMP_TOPIC_TYPE)});
}
public void testConsumerPrefetchAndStandardAck() throws Exception {
// Start a producer and consumer
StubConnection connection = createConnection();
ConnectionInfo connectionInfo = createConnectionInfo();
SessionInfo sessionInfo = createSessionInfo(connectionInfo);
ProducerInfo producerInfo = createProducerInfo(sessionInfo);
connection.send(connectionInfo);
connection.send(sessionInfo);
connection.send(producerInfo);
destination = createDestinationInfo(connection, connectionInfo, destinationType);
ConsumerInfo consumerInfo = createConsumerInfo(sessionInfo, destination);
consumerInfo.setPrefetchSize(1);
connection.send(consumerInfo);
// Send 3 messages to the broker.
connection.send(createMessage(producerInfo, destination, deliveryMode));
connection.send(createMessage(producerInfo, destination, deliveryMode));
connection.request(createMessage(producerInfo, destination, deliveryMode));
// Make sure only 1 message was delivered.
Message m1 = receiveMessage(connection);
assertNotNull(m1);
assertNoMessagesLeft(connection);
// Acknowledge the first message. This should cause the next message to
// get dispatched.
connection.send(createAck(consumerInfo, m1, 1, MessageAck.STANDARD_ACK_TYPE));
Message m2 = receiveMessage(connection);
assertNotNull(m2);
connection.send(createAck(consumerInfo, m2, 1, MessageAck.STANDARD_ACK_TYPE));
Message m3 = receiveMessage(connection);
assertNotNull(m3);
connection.send(createAck(consumerInfo, m3, 1, MessageAck.STANDARD_ACK_TYPE));
connection.send(closeConnectionInfo(connectionInfo));
}
public void initCombosForTestTransactedAckWithPrefetchOfOne() {
addCombinationValues("deliveryMode", new Object[] {Integer.valueOf(DeliveryMode.NON_PERSISTENT),
Integer.valueOf(DeliveryMode.PERSISTENT)});
addCombinationValues("destinationType",
new Object[] {Byte.valueOf(ActiveMQDestination.QUEUE_TYPE),
Byte.valueOf(ActiveMQDestination.TOPIC_TYPE),
Byte.valueOf(ActiveMQDestination.TEMP_QUEUE_TYPE),
Byte.valueOf(ActiveMQDestination.TEMP_TOPIC_TYPE)});
}
public void testTransactedAckWithPrefetchOfOne() throws Exception {
// Setup a first connection
StubConnection connection1 = createConnection();
ConnectionInfo connectionInfo1 = createConnectionInfo();
SessionInfo sessionInfo1 = createSessionInfo(connectionInfo1);
ProducerInfo producerInfo1 = createProducerInfo(sessionInfo1);
connection1.send(connectionInfo1);
connection1.send(sessionInfo1);
connection1.send(producerInfo1);
destination = createDestinationInfo(connection1, connectionInfo1, destinationType);
ConsumerInfo consumerInfo1 = createConsumerInfo(sessionInfo1, destination);
consumerInfo1.setPrefetchSize(1);
connection1.send(consumerInfo1);
// Send the messages
for (int i = 0; i < 4; i++) {
Message message = createMessage(producerInfo1, destination, deliveryMode);
connection1.send(message);
}
// Now get the messages.
for (int i = 0; i < 4; i++) {
// Begin the transaction.
LocalTransactionId txid = createLocalTransaction(sessionInfo1);
connection1.send(createBeginTransaction(connectionInfo1, txid));
Message m1 = receiveMessage(connection1);
assertNotNull(m1);
MessageAck ack = createAck(consumerInfo1, m1, 1, MessageAck.STANDARD_ACK_TYPE);
ack.setTransactionId(txid);
connection1.send(ack);
// Commit the transaction.
connection1.send(createCommitTransaction1Phase(connectionInfo1, txid));
}
assertNoMessagesLeft(connection1);
}
public void initCombosForTestTransactedSend() {
addCombinationValues("deliveryMode", new Object[] {Integer.valueOf(DeliveryMode.NON_PERSISTENT),
Integer.valueOf(DeliveryMode.PERSISTENT)});
addCombinationValues("destinationType",
new Object[] {Byte.valueOf(ActiveMQDestination.QUEUE_TYPE),
Byte.valueOf(ActiveMQDestination.TOPIC_TYPE),
Byte.valueOf(ActiveMQDestination.TEMP_QUEUE_TYPE),
Byte.valueOf(ActiveMQDestination.TEMP_TOPIC_TYPE)});
}
public void testTransactedSend() throws Exception {
// Setup a first connection
StubConnection connection1 = createConnection();
ConnectionInfo connectionInfo1 = createConnectionInfo();
SessionInfo sessionInfo1 = createSessionInfo(connectionInfo1);
ProducerInfo producerInfo1 = createProducerInfo(sessionInfo1);
connection1.send(connectionInfo1);
connection1.send(sessionInfo1);
connection1.send(producerInfo1);
destination = createDestinationInfo(connection1, connectionInfo1, destinationType);
ConsumerInfo consumerInfo1 = createConsumerInfo(sessionInfo1, destination);
consumerInfo1.setPrefetchSize(100);
connection1.send(consumerInfo1);
// Begin the transaction.
LocalTransactionId txid = createLocalTransaction(sessionInfo1);
connection1.send(createBeginTransaction(connectionInfo1, txid));
// Send the messages
for (int i = 0; i < 4; i++) {
Message message = createMessage(producerInfo1, destination, deliveryMode);
message.setTransactionId(txid);
connection1.request(message);
}
// The point of this test is that message should not be delivered until
// send is committed.
assertNull(receiveMessage(connection1,MAX_NULL_WAIT));
// Commit the transaction.
connection1.send(createCommitTransaction1Phase(connectionInfo1, txid));
// Now get the messages.
for (int i = 0; i < 4; i++) {
Message m1 = receiveMessage(connection1);
assertNotNull(m1);
}
assertNoMessagesLeft(connection1);
}
public void initCombosForTestQueueTransactedAck() {
addCombinationValues("deliveryMode", new Object[] {Integer.valueOf(DeliveryMode.NON_PERSISTENT),
Integer.valueOf(DeliveryMode.PERSISTENT)});
addCombinationValues("destinationType",
new Object[] {Byte.valueOf(ActiveMQDestination.QUEUE_TYPE),
Byte.valueOf(ActiveMQDestination.TEMP_QUEUE_TYPE)});
}
public void testQueueTransactedAck() throws Exception {
// Setup a first connection
StubConnection connection1 = createConnection();
ConnectionInfo connectionInfo1 = createConnectionInfo();
SessionInfo sessionInfo1 = createSessionInfo(connectionInfo1);
ProducerInfo producerInfo1 = createProducerInfo(sessionInfo1);
connection1.send(connectionInfo1);
connection1.send(sessionInfo1);
connection1.send(producerInfo1);
destination = createDestinationInfo(connection1, connectionInfo1, destinationType);
ConsumerInfo consumerInfo1 = createConsumerInfo(sessionInfo1, destination);
consumerInfo1.setPrefetchSize(100);
connection1.send(consumerInfo1);
// Send the messages
for (int i = 0; i < 4; i++) {
Message message = createMessage(producerInfo1, destination, deliveryMode);
connection1.send(message);
}
// Begin the transaction.
LocalTransactionId txid = createLocalTransaction(sessionInfo1);
connection1.send(createBeginTransaction(connectionInfo1, txid));
// Acknowledge the first 2 messages.
for (int i = 0; i < 2; i++) {
Message m1 = receiveMessage(connection1);
assertNotNull("m1 is null for index: " + i, m1);
MessageAck ack = createAck(consumerInfo1, m1, 1, MessageAck.STANDARD_ACK_TYPE);
ack.setTransactionId(txid);
connection1.request(ack);
}
// Commit the transaction.
connection1.send(createCommitTransaction1Phase(connectionInfo1, txid));
// The queue should now only have the remaining 2 messages
assertEquals(2, countMessagesInQueue(connection1, connectionInfo1, destination));
}
public void initCombosForTestConsumerCloseCausesRedelivery() {
addCombinationValues("deliveryMode", new Object[] {Integer.valueOf(DeliveryMode.NON_PERSISTENT),
Integer.valueOf(DeliveryMode.PERSISTENT)});
addCombinationValues("destination", new Object[] {new ActiveMQQueue("TEST")});
}
public void testConsumerCloseCausesRedelivery() throws Exception {
// Setup a first connection
StubConnection connection1 = createConnection();
ConnectionInfo connectionInfo1 = createConnectionInfo();
SessionInfo sessionInfo1 = createSessionInfo(connectionInfo1);
ProducerInfo producerInfo1 = createProducerInfo(sessionInfo1);
connection1.send(connectionInfo1);
connection1.send(sessionInfo1);
connection1.send(producerInfo1);
ConsumerInfo consumerInfo1 = createConsumerInfo(sessionInfo1, destination);
consumerInfo1.setPrefetchSize(100);
connection1.request(consumerInfo1);
// Send the messages
connection1.send(createMessage(producerInfo1, destination, deliveryMode));
connection1.send(createMessage(producerInfo1, destination, deliveryMode));
connection1.send(createMessage(producerInfo1, destination, deliveryMode));
connection1.send(createMessage(producerInfo1, destination, deliveryMode));
// Receive the messages.
for (int i = 0; i < 4; i++) {
Message m1 = receiveMessage(connection1);
assertNotNull("m1 is null for index: " + i, m1);
assertFalse(m1.isRedelivered());
}
// Close the consumer without acking.. this should cause re-delivery of
// the messages.
connection1.send(consumerInfo1.createRemoveCommand());
// Create another consumer that should get the messages again.
ConsumerInfo consumerInfo2 = createConsumerInfo(sessionInfo1, destination);
consumerInfo2.setPrefetchSize(100);
connection1.request(consumerInfo2);
// Receive the messages.
for (int i = 0; i < 4; i++) {
Message m1 = receiveMessage(connection1);
assertNotNull("m1 is null for index: " + i, m1);
assertTrue(m1.isRedelivered());
}
assertNoMessagesLeft(connection1);
}
public void testTopicDurableSubscriptionCanBeRestored() throws Exception {
ActiveMQDestination destination = new ActiveMQTopic("TEST");
// Setup a first connection
StubConnection connection1 = createConnection();
ConnectionInfo connectionInfo1 = createConnectionInfo();
connectionInfo1.setClientId("clientid1");
SessionInfo sessionInfo1 = createSessionInfo(connectionInfo1);
ProducerInfo producerInfo1 = createProducerInfo(sessionInfo1);
connection1.send(connectionInfo1);
connection1.send(sessionInfo1);
connection1.send(producerInfo1);
ConsumerInfo consumerInfo1 = createConsumerInfo(sessionInfo1, destination);
consumerInfo1.setPrefetchSize(100);
consumerInfo1.setSubscriptionName("test");
connection1.send(consumerInfo1);
// Send the messages
connection1.send(createMessage(producerInfo1, destination, DeliveryMode.PERSISTENT));
connection1.send(createMessage(producerInfo1, destination, DeliveryMode.PERSISTENT));
connection1.send(createMessage(producerInfo1, destination, DeliveryMode.PERSISTENT));
connection1.request(createMessage(producerInfo1, destination, DeliveryMode.PERSISTENT));
// Get the messages
Message m = null;
for (int i = 0; i < 2; i++) {
m = receiveMessage(connection1);
assertNotNull(m);
}
// Ack the last message.
connection1.send(createAck(consumerInfo1, m, 2, MessageAck.STANDARD_ACK_TYPE));
// Close the connection.
connection1.request(closeConnectionInfo(connectionInfo1));
connection1.stop();
// Setup a second connection
StubConnection connection2 = createConnection();
ConnectionInfo connectionInfo2 = createConnectionInfo();
connectionInfo2.setClientId("clientid1");
SessionInfo sessionInfo2 = createSessionInfo(connectionInfo2);
ConsumerInfo consumerInfo2 = createConsumerInfo(sessionInfo2, destination);
consumerInfo2.setPrefetchSize(100);
consumerInfo2.setSubscriptionName("test");
connection2.send(connectionInfo2);
connection2.send(sessionInfo2);
connection2.send(consumerInfo2);
// Get the rest of the messages
for (int i = 0; i < 2; i++) {
Message m1 = receiveMessage(connection2);
assertNotNull("m1 is null for index: " + i, m1);
}
assertNoMessagesLeft(connection2);
}
public void initCombosForTestGroupedMessagesDeliveredToOnlyOneConsumer() {
addCombinationValues("deliveryMode", new Object[] {Integer.valueOf(DeliveryMode.NON_PERSISTENT),
Integer.valueOf(DeliveryMode.PERSISTENT)});
}
public void testGroupedMessagesDeliveredToOnlyOneConsumer() throws Exception {
ActiveMQDestination destination = new ActiveMQQueue("TEST");
// Setup a first connection
StubConnection connection1 = createConnection();
ConnectionInfo connectionInfo1 = createConnectionInfo();
SessionInfo sessionInfo1 = createSessionInfo(connectionInfo1);
ProducerInfo producerInfo = createProducerInfo(sessionInfo1);
connection1.send(connectionInfo1);
connection1.send(sessionInfo1);
connection1.send(producerInfo);
ConsumerInfo consumerInfo1 = createConsumerInfo(sessionInfo1, destination);
consumerInfo1.setPrefetchSize(1);
connection1.send(consumerInfo1);
// Send the messages.
for (int i = 0; i < 4; i++) {
Message message = createMessage(producerInfo, destination, deliveryMode);
message.setGroupID("TEST-GROUP");
message.setGroupSequence(i + 1);
connection1.request(message);
}
// Setup a second connection
StubConnection connection2 = createConnection();
ConnectionInfo connectionInfo2 = createConnectionInfo();
SessionInfo sessionInfo2 = createSessionInfo(connectionInfo2);
connection2.send(connectionInfo2);
connection2.send(sessionInfo2);
ConsumerInfo consumerInfo2 = createConsumerInfo(sessionInfo2, destination);
consumerInfo2.setPrefetchSize(1);
connection2.send(consumerInfo2);
// All the messages should have been sent down connection 1.. just get
// the first 3
for (int i = 0; i < 3; i++) {
Message m1 = receiveMessage(connection1);
assertNotNull("m1 is null for index: " + i, m1);
connection1.send(createAck(consumerInfo1, m1, 1, MessageAck.STANDARD_ACK_TYPE));
}
// Close the first consumer.
connection1.request(closeConsumerInfo(consumerInfo1));
// The last messages should now go the the second consumer.
for (int i = 0; i < 1; i++) {
Message m1 = receiveMessage(connection2);
assertNotNull("m1 is null for index: " + i, m1);
connection2.request(createAck(consumerInfo2, m1, 1, MessageAck.STANDARD_ACK_TYPE));
}
assertNoMessagesLeft(connection2);
}
public void initCombosForTestTopicConsumerOnlySeeMessagesAfterCreation() {
addCombinationValues("deliveryMode", new Object[] {Integer.valueOf(DeliveryMode.NON_PERSISTENT),
Integer.valueOf(DeliveryMode.PERSISTENT)});
addCombinationValues("durableConsumer", new Object[] {Boolean.TRUE, Boolean.FALSE});
}
public void testTopicConsumerOnlySeeMessagesAfterCreation() throws Exception {
ActiveMQDestination destination = new ActiveMQTopic("TEST");
// Setup a first connection
StubConnection connection1 = createConnection();
ConnectionInfo connectionInfo1 = createConnectionInfo();
connectionInfo1.setClientId("A");
SessionInfo sessionInfo1 = createSessionInfo(connectionInfo1);
ProducerInfo producerInfo1 = createProducerInfo(sessionInfo1);
connection1.send(connectionInfo1);
connection1.send(sessionInfo1);
connection1.send(producerInfo1);
// Send the 1st message
connection1.send(createMessage(producerInfo1, destination, deliveryMode));
// Create the durable subscription.
ConsumerInfo consumerInfo1 = createConsumerInfo(sessionInfo1, destination);
if (durableConsumer) {
consumerInfo1.setSubscriptionName("test");
}
consumerInfo1.setPrefetchSize(100);
connection1.send(consumerInfo1);
Message m = createMessage(producerInfo1, destination, deliveryMode);
connection1.send(m);
connection1.send(createMessage(producerInfo1, destination, deliveryMode));
// Subscription should skip over the first message
Message m2 = receiveMessage(connection1);
assertNotNull(m2);
assertEquals(m.getMessageId(), m2.getMessageId());
m2 = receiveMessage(connection1);
assertNotNull(m2);
assertNoMessagesLeft(connection1);
}
public void initCombosForTestTopicRetroactiveConsumerSeeMessagesBeforeCreation() {
addCombinationValues("deliveryMode", new Object[] {Integer.valueOf(DeliveryMode.NON_PERSISTENT),
Integer.valueOf(DeliveryMode.PERSISTENT)});
addCombinationValues("durableConsumer", new Object[] {Boolean.TRUE, Boolean.FALSE});
}
public void testTopicRetroactiveConsumerSeeMessagesBeforeCreation() throws Exception {
ActiveMQDestination destination = new ActiveMQTopic("TEST");
// Setup a first connection
StubConnection connection1 = createConnection();
ConnectionInfo connectionInfo1 = createConnectionInfo();
connectionInfo1.setClientId("A");
SessionInfo sessionInfo1 = createSessionInfo(connectionInfo1);
ProducerInfo producerInfo1 = createProducerInfo(sessionInfo1);
connection1.send(connectionInfo1);
connection1.send(sessionInfo1);
connection1.send(producerInfo1);
// Send the messages
Message m = createMessage(producerInfo1, destination, deliveryMode);
connection1.send(m);
// Create the durable subscription.
ConsumerInfo consumerInfo1 = createConsumerInfo(sessionInfo1, destination);
if (durableConsumer) {
consumerInfo1.setSubscriptionName("test");
}
consumerInfo1.setPrefetchSize(100);
consumerInfo1.setRetroactive(true);
connection1.send(consumerInfo1);
connection1.send(createMessage(producerInfo1, destination, deliveryMode));
connection1.request(createMessage(producerInfo1, destination, deliveryMode));
// the behavior is VERY dependent on the recovery policy used.
// But the default broker settings try to make it as consistent as
// possible
// Subscription should see all messages sent.
Message m2 = receiveMessage(connection1);
assertNotNull(m2);
assertEquals(m.getMessageId(), m2.getMessageId());
for (int i = 0; i < 2; i++) {
m2 = receiveMessage(connection1);
assertNotNull(m2);
}
assertNoMessagesLeft(connection1);
}
//
// TODO: need to reimplement this since we don't fail when we send to a
// non-existant
// destination. But if we can access the Region directly then we should be
// able to
// check that if the destination was removed.
//
// public void initCombosForTestTempDestinationsRemovedOnConnectionClose() {
// addCombinationValues( "deliveryMode", new Object[]{
// Integer.valueOf(DeliveryMode.NON_PERSISTENT),
// Integer.valueOf(DeliveryMode.PERSISTENT)} );
// addCombinationValues( "destinationType", new Object[]{
// Byte.valueOf(ActiveMQDestination.TEMP_QUEUE_TYPE),
// Byte.valueOf(ActiveMQDestination.TEMP_TOPIC_TYPE)} );
// }
//
// public void testTempDestinationsRemovedOnConnectionClose() throws
// Exception {
//
// // Setup a first connection
// StubConnection connection1 = createConnection();
// ConnectionInfo connectionInfo1 = createConnectionInfo();
// SessionInfo sessionInfo1 = createSessionInfo(connectionInfo1);
// ProducerInfo producerInfo1 = createProducerInfo(sessionInfo1);
// connection1.send(connectionInfo1);
// connection1.send(sessionInfo1);
// connection1.send(producerInfo1);
//
// destination = createDestinationInfo(connection1, connectionInfo1,
// destinationType);
//
// StubConnection connection2 = createConnection();
// ConnectionInfo connectionInfo2 = createConnectionInfo();
// SessionInfo sessionInfo2 = createSessionInfo(connectionInfo2);
// ProducerInfo producerInfo2 = createProducerInfo(sessionInfo2);
// connection2.send(connectionInfo2);
// connection2.send(sessionInfo2);
// connection2.send(producerInfo2);
//
// // Send from connection2 to connection1's temp destination. Should
// succeed.
// connection2.send(createMessage(producerInfo2, destination,
// deliveryMode));
//
// // Close connection 1
// connection1.request(closeConnectionInfo(connectionInfo1));
//
// try {
// // Send from connection2 to connection1's temp destination. Should not
// succeed.
// connection2.request(createMessage(producerInfo2, destination,
// deliveryMode));
// fail("Expected JMSException.");
// } catch ( JMSException success ) {
// }
//
// }
// public void initCombosForTestTempDestinationsAreNotAutoCreated() {
// addCombinationValues( "deliveryMode", new Object[]{
// Integer.valueOf(DeliveryMode.NON_PERSISTENT),
// Integer.valueOf(DeliveryMode.PERSISTENT)} );
// addCombinationValues( "destinationType", new Object[]{
// Byte.valueOf(ActiveMQDestination.TEMP_QUEUE_TYPE),
// Byte.valueOf(ActiveMQDestination.TEMP_TOPIC_TYPE)} );
// }
//
//
// We create temp destination on demand now so this test case is no longer
// valid.
//
// public void testTempDestinationsAreNotAutoCreated() throws Exception {
//
// // Setup a first connection
// StubConnection connection1 = createConnection();
// ConnectionInfo connectionInfo1 = createConnectionInfo();
// SessionInfo sessionInfo1 = createSessionInfo(connectionInfo1);
// ProducerInfo producerInfo1 = createProducerInfo(sessionInfo1);
// connection1.send(connectionInfo1);
// connection1.send(sessionInfo1);
// connection1.send(producerInfo1);
//
// destination =
// ActiveMQDestination.createDestination(connectionInfo1.getConnectionId()+":1",
// destinationType);
//
// // Should not be able to send to a non-existant temp destination.
// try {
// connection1.request(createMessage(producerInfo1, destination,
// deliveryMode));
// fail("Expected JMSException.");
// } catch ( JMSException success ) {
// }
//
// }
public void initCombosForTestExclusiveQueueDeliversToOnlyOneConsumer() {
addCombinationValues("deliveryMode", new Object[] {Integer.valueOf(DeliveryMode.NON_PERSISTENT),
Integer.valueOf(DeliveryMode.PERSISTENT)});
}
public void testExclusiveQueueDeliversToOnlyOneConsumer() throws Exception {
ActiveMQDestination destination = new ActiveMQQueue("TEST");
// Setup a first connection
StubConnection connection1 = createConnection();
ConnectionInfo connectionInfo1 = createConnectionInfo();
SessionInfo sessionInfo1 = createSessionInfo(connectionInfo1);
ProducerInfo producerInfo = createProducerInfo(sessionInfo1);
connection1.send(connectionInfo1);
connection1.send(sessionInfo1);
connection1.send(producerInfo);
ConsumerInfo consumerInfo1 = createConsumerInfo(sessionInfo1, destination);
consumerInfo1.setPrefetchSize(1);
consumerInfo1.setExclusive(true);
connection1.send(consumerInfo1);
// Send a message.. this should make consumer 1 the exclusive owner.
connection1.request(createMessage(producerInfo, destination, deliveryMode));
// Setup a second connection
StubConnection connection2 = createConnection();
ConnectionInfo connectionInfo2 = createConnectionInfo();
SessionInfo sessionInfo2 = createSessionInfo(connectionInfo2);
ConsumerInfo consumerInfo2 = createConsumerInfo(sessionInfo2, destination);
consumerInfo2.setPrefetchSize(1);
consumerInfo2.setExclusive(true);
connection2.send(connectionInfo2);
connection2.send(sessionInfo2);
connection2.request(consumerInfo2);
// Second message should go to consumer 1 even though consumer 2 is
// ready
// for dispatch.
connection1.send(createMessage(producerInfo, destination, deliveryMode));
connection1.send(createMessage(producerInfo, destination, deliveryMode));
// Acknowledge the first 2 messages
for (int i = 0; i < 2; i++) {
Message m1 = receiveMessage(connection1);
assertNotNull(m1);
connection1.send(createAck(consumerInfo1, m1, 1, MessageAck.STANDARD_ACK_TYPE));
}
// Close the first consumer.
connection1.send(closeConsumerInfo(consumerInfo1));
// The last two messages should now go the the second consumer.
connection1.send(createMessage(producerInfo, destination, deliveryMode));
for (int i = 0; i < 2; i++) {
Message m1 = receiveMessage(connection2);
assertNotNull(m1);
connection2.send(createAck(consumerInfo2, m1, 1, MessageAck.STANDARD_ACK_TYPE));
}
assertNoMessagesLeft(connection2);
}
public void initCombosForTestWildcardConsume() {
addCombinationValues("deliveryMode", new Object[] {Integer.valueOf(DeliveryMode.NON_PERSISTENT),
Integer.valueOf(DeliveryMode.PERSISTENT)});
addCombinationValues("destinationType", new Object[] {Byte.valueOf(ActiveMQDestination.QUEUE_TYPE),
Byte.valueOf(ActiveMQDestination.TOPIC_TYPE)});
}
public void testWildcardConsume() throws Exception {
// Setup a first connection
StubConnection connection1 = createConnection();
ConnectionInfo connectionInfo1 = createConnectionInfo();
SessionInfo sessionInfo1 = createSessionInfo(connectionInfo1);
ProducerInfo producerInfo1 = createProducerInfo(sessionInfo1);
connection1.send(connectionInfo1);
connection1.send(sessionInfo1);
connection1.send(producerInfo1);
// setup the wildcard consumer.
ActiveMQDestination compositeDestination = ActiveMQDestination.createDestination("WILD.*.TEST",
destinationType);
ConsumerInfo consumerInfo1 = createConsumerInfo(sessionInfo1, compositeDestination);
consumerInfo1.setPrefetchSize(100);
connection1.send(consumerInfo1);
// These two message should NOT match the wild card.
connection1.send(createMessage(producerInfo1, ActiveMQDestination.createDestination("WILD.CARD",
destinationType),
deliveryMode));
connection1.send(createMessage(producerInfo1, ActiveMQDestination.createDestination("WILD.TEST",
destinationType),
deliveryMode));
// These two message should match the wild card.
ActiveMQDestination d1 = ActiveMQDestination.createDestination("WILD.CARD.TEST", destinationType);
connection1.send(createMessage(producerInfo1, d1, deliveryMode));
Message m = receiveMessage(connection1);
assertNotNull(m);
assertEquals(d1, m.getDestination());
ActiveMQDestination d2 = ActiveMQDestination.createDestination("WILD.FOO.TEST", destinationType);
connection1.request(createMessage(producerInfo1, d2, deliveryMode));
m = receiveMessage(connection1);
assertNotNull(m);
assertEquals(d2, m.getDestination());
assertNoMessagesLeft(connection1);
connection1.send(closeConnectionInfo(connectionInfo1));
}
public void initCombosForTestCompositeConsume() {
addCombinationValues("deliveryMode", new Object[] {Integer.valueOf(DeliveryMode.NON_PERSISTENT),
Integer.valueOf(DeliveryMode.PERSISTENT)});
addCombinationValues("destinationType", new Object[] {Byte.valueOf(ActiveMQDestination.QUEUE_TYPE),
Byte.valueOf(ActiveMQDestination.TOPIC_TYPE)});
}
public void testCompositeConsume() throws Exception {
// Setup a first connection
StubConnection connection1 = createConnection();
ConnectionInfo connectionInfo1 = createConnectionInfo();
SessionInfo sessionInfo1 = createSessionInfo(connectionInfo1);
ProducerInfo producerInfo1 = createProducerInfo(sessionInfo1);
connection1.send(connectionInfo1);
connection1.send(sessionInfo1);
connection1.send(producerInfo1);
// setup the composite consumer.
ActiveMQDestination compositeDestination = ActiveMQDestination.createDestination("A,B",
destinationType);
ConsumerInfo consumerInfo1 = createConsumerInfo(sessionInfo1, compositeDestination);
consumerInfo1.setRetroactive(true);
consumerInfo1.setPrefetchSize(100);
connection1.send(consumerInfo1);
// Publish to the two destinations
ActiveMQDestination destinationA = ActiveMQDestination.createDestination("A", destinationType);
ActiveMQDestination destinationB = ActiveMQDestination.createDestination("B", destinationType);
// Send a message to each destination .
connection1.send(createMessage(producerInfo1, destinationA, deliveryMode));
connection1.send(createMessage(producerInfo1, destinationB, deliveryMode));
// The consumer should get both messages.
for (int i = 0; i < 2; i++) {
Message m1 = receiveMessage(connection1);
assertNotNull(m1);
}
assertNoMessagesLeft(connection1);
connection1.send(closeConnectionInfo(connectionInfo1));
}
public void initCombosForTestCompositeSend() {
addCombinationValues("deliveryMode", new Object[] {Integer.valueOf(DeliveryMode.NON_PERSISTENT),
Integer.valueOf(DeliveryMode.PERSISTENT)});
addCombinationValues("destinationType", new Object[] {Byte.valueOf(ActiveMQDestination.QUEUE_TYPE),
Byte.valueOf(ActiveMQDestination.TOPIC_TYPE)});
}
public void testCompositeSend() throws Exception {
// Setup a first connection
StubConnection connection1 = createConnection();
ConnectionInfo connectionInfo1 = createConnectionInfo();
SessionInfo sessionInfo1 = createSessionInfo(connectionInfo1);
ProducerInfo producerInfo1 = createProducerInfo(sessionInfo1);
connection1.send(connectionInfo1);
connection1.send(sessionInfo1);
connection1.send(producerInfo1);
ActiveMQDestination destinationA = ActiveMQDestination.createDestination("A", destinationType);
ConsumerInfo consumerInfo1 = createConsumerInfo(sessionInfo1, destinationA);
consumerInfo1.setRetroactive(true);
consumerInfo1.setPrefetchSize(100);
connection1.request(consumerInfo1);
// Setup a second connection
StubConnection connection2 = createConnection();
ConnectionInfo connectionInfo2 = createConnectionInfo();
SessionInfo sessionInfo2 = createSessionInfo(connectionInfo2);
connection2.send(connectionInfo2);
connection2.send(sessionInfo2);
ActiveMQDestination destinationB = ActiveMQDestination.createDestination("B", destinationType);
ConsumerInfo consumerInfo2 = createConsumerInfo(sessionInfo2, destinationB);
consumerInfo2.setRetroactive(true);
consumerInfo2.setPrefetchSize(100);
connection2.request(consumerInfo2);
// Send the messages to the composite destination.
ActiveMQDestination compositeDestination = ActiveMQDestination.createDestination("A,B",
destinationType);
for (int i = 0; i < 4; i++) {
connection1.request(createMessage(producerInfo1, compositeDestination, deliveryMode));
}
// The messages should have been delivered to both the A and B
// destination.
for (int i = 0; i < 4; i++) {
Message m1 = receiveMessage(connection1);
Message m2 = receiveMessage(connection2);
assertNotNull(m1);
assertNotNull(m2);
assertEquals(m1.getMessageId(), m2.getMessageId());
assertEquals(compositeDestination, m1.getOriginalDestination());
assertEquals(compositeDestination, m2.getOriginalDestination());
connection1.request(createAck(consumerInfo1, m1, 1, MessageAck.STANDARD_ACK_TYPE));
connection2.request(createAck(consumerInfo2, m2, 1, MessageAck.STANDARD_ACK_TYPE));
}
assertNoMessagesLeft(connection1);
assertNoMessagesLeft(connection2);
connection1.send(closeConnectionInfo(connectionInfo1));
connection2.send(closeConnectionInfo(connectionInfo2));
}
public void initCombosForTestConnectionCloseCascades() {
addCombinationValues("deliveryMode", new Object[] {Integer.valueOf(DeliveryMode.NON_PERSISTENT),
Integer.valueOf(DeliveryMode.PERSISTENT)});
addCombinationValues("destination", new Object[] {new ActiveMQTopic("TEST"),
new ActiveMQQueue("TEST")});
}
public void testConnectionCloseCascades() throws Exception {
// Setup a first connection
StubConnection connection1 = createConnection();
ConnectionInfo connectionInfo1 = createConnectionInfo();
SessionInfo sessionInfo1 = createSessionInfo(connectionInfo1);
ProducerInfo producerInfo1 = createProducerInfo(sessionInfo1);
connection1.send(connectionInfo1);
connection1.send(sessionInfo1);
connection1.send(producerInfo1);
ConsumerInfo consumerInfo1 = createConsumerInfo(sessionInfo1, destination);
consumerInfo1.setPrefetchSize(100);
consumerInfo1.setNoLocal(true);
connection1.request(consumerInfo1);
// Setup a second connection
StubConnection connection2 = createConnection();
ConnectionInfo connectionInfo2 = createConnectionInfo();
SessionInfo sessionInfo2 = createSessionInfo(connectionInfo2);
ProducerInfo producerInfo2 = createProducerInfo(sessionInfo2);
connection2.send(connectionInfo2);
connection2.send(sessionInfo2);
connection2.send(producerInfo2);
// Send the messages
connection2.send(createMessage(producerInfo2, destination, deliveryMode));
connection2.send(createMessage(producerInfo2, destination, deliveryMode));
connection2.send(createMessage(producerInfo2, destination, deliveryMode));
connection2.send(createMessage(producerInfo2, destination, deliveryMode));
for (int i = 0; i < 4; i++) {
Message m1 = receiveMessage(connection1);
assertNotNull(m1);
connection1.send(createAck(consumerInfo1, m1, 1, MessageAck.STANDARD_ACK_TYPE));
}
// give the async ack a chance to perculate and validate all are currently consumed
Message msg = receiveMessage(connection1, MAX_NULL_WAIT);
assertNull("all messages were received " + msg, msg);
// Close the connection, this should in turn close the consumer.
connection1.request(closeConnectionInfo(connectionInfo1));
// Send another message, connection1 should not get the message.
connection2.request(createMessage(producerInfo2, destination, deliveryMode));
assertNull("no message received", receiveMessage(connection1, MAX_NULL_WAIT));
}
public void initCombosForTestSessionCloseCascades() {
addCombinationValues("deliveryMode", new Object[] {Integer.valueOf(DeliveryMode.NON_PERSISTENT),
Integer.valueOf(DeliveryMode.PERSISTENT)});
addCombinationValues("destination", new Object[] {new ActiveMQTopic("TEST"),
new ActiveMQQueue("TEST")});
}
public void testSessionCloseCascades() throws Exception {
// Setup a first connection
StubConnection connection1 = createConnection();
ConnectionInfo connectionInfo1 = createConnectionInfo();
SessionInfo sessionInfo1 = createSessionInfo(connectionInfo1);
ProducerInfo producerInfo1 = createProducerInfo(sessionInfo1);
connection1.send(connectionInfo1);
connection1.send(sessionInfo1);
connection1.send(producerInfo1);
ConsumerInfo consumerInfo1 = createConsumerInfo(sessionInfo1, destination);
consumerInfo1.setPrefetchSize(100);
consumerInfo1.setNoLocal(true);
connection1.request(consumerInfo1);
// Setup a second connection
StubConnection connection2 = createConnection();
ConnectionInfo connectionInfo2 = createConnectionInfo();
SessionInfo sessionInfo2 = createSessionInfo(connectionInfo2);
ProducerInfo producerInfo2 = createProducerInfo(sessionInfo2);
connection2.send(connectionInfo2);
connection2.send(sessionInfo2);
connection2.send(producerInfo2);
// Send the messages
connection2.send(createMessage(producerInfo2, destination, deliveryMode));
connection2.send(createMessage(producerInfo2, destination, deliveryMode));
connection2.send(createMessage(producerInfo2, destination, deliveryMode));
connection2.send(createMessage(producerInfo2, destination, deliveryMode));
for (int i = 0; i < 4; i++) {
Message m1 = receiveMessage(connection1);
assertNotNull(m1);
connection1.send(createAck(consumerInfo1, m1, 1, MessageAck.STANDARD_ACK_TYPE));
}
// Close the session, this should in turn close the consumer.
connection1.request(closeSessionInfo(sessionInfo1));
// Send another message, connection1 should not get the message.
connection2.request(createMessage(producerInfo2, destination, deliveryMode));
Message msg = receiveMessage(connection1,MAX_NULL_WAIT);
assertNull("no message received from connection1 after session close", msg);
}
public void initCombosForTestConsumerClose() {
addCombinationValues("deliveryMode", new Object[] {Integer.valueOf(DeliveryMode.NON_PERSISTENT),
Integer.valueOf(DeliveryMode.PERSISTENT)});
addCombinationValues("destination", new Object[] {new ActiveMQTopic("TEST"),
new ActiveMQQueue("TEST")});
}
public void testConsumerClose() throws Exception {
// Setup a first connection
StubConnection connection1 = createConnection();
ConnectionInfo connectionInfo1 = createConnectionInfo();
SessionInfo sessionInfo1 = createSessionInfo(connectionInfo1);
ProducerInfo producerInfo1 = createProducerInfo(sessionInfo1);
connection1.send(connectionInfo1);
connection1.send(sessionInfo1);
connection1.send(producerInfo1);
ConsumerInfo consumerInfo1 = createConsumerInfo(sessionInfo1, destination);
consumerInfo1.setPrefetchSize(100);
consumerInfo1.setNoLocal(true);
connection1.request(consumerInfo1);
// Setup a second connection
StubConnection connection2 = createConnection();
ConnectionInfo connectionInfo2 = createConnectionInfo();
SessionInfo sessionInfo2 = createSessionInfo(connectionInfo2);
ProducerInfo producerInfo2 = createProducerInfo(sessionInfo2);
connection2.send(connectionInfo2);
connection2.send(sessionInfo2);
connection2.send(producerInfo2);
// Send the messages
connection2.send(createMessage(producerInfo2, destination, deliveryMode));
connection2.send(createMessage(producerInfo2, destination, deliveryMode));
connection2.send(createMessage(producerInfo2, destination, deliveryMode));
connection2.send(createMessage(producerInfo2, destination, deliveryMode));
for (int i = 0; i < 4; i++) {
Message m1 = receiveMessage(connection1);
assertNotNull(m1);
connection1.send(createAck(consumerInfo1, m1, 1, MessageAck.STANDARD_ACK_TYPE));
}
// give the async ack a chance to perculate and validate all are currently consumed
// use receive rather than poll as broker info is sent async and may still need to be dequeued
Message result = receiveMessage(connection1, MAX_NULL_WAIT);
assertNull("no more messages " + result, result);
// Close the consumer.
connection1.request(closeConsumerInfo(consumerInfo1));
// Send another message, connection1 should not get the message.
connection2.request(createMessage(producerInfo2, destination, deliveryMode));
result = receiveMessage(connection1, MAX_NULL_WAIT);
assertNull("no message received after close " + result, result);
}
public void initCombosForTestTopicNoLocal() {
addCombinationValues("deliveryMode", new Object[] {Integer.valueOf(DeliveryMode.NON_PERSISTENT),
Integer.valueOf(DeliveryMode.PERSISTENT)});
}
public void testTopicNoLocal() throws Exception {
ActiveMQDestination destination = new ActiveMQTopic("TEST");
// Setup a first connection
StubConnection connection1 = createConnection();
ConnectionInfo connectionInfo1 = createConnectionInfo();
SessionInfo sessionInfo1 = createSessionInfo(connectionInfo1);
ProducerInfo producerInfo1 = createProducerInfo(sessionInfo1);
connection1.send(connectionInfo1);
connection1.send(sessionInfo1);
connection1.send(producerInfo1);
ConsumerInfo consumerInfo1 = createConsumerInfo(sessionInfo1, destination);
consumerInfo1.setRetroactive(true);
consumerInfo1.setPrefetchSize(100);
consumerInfo1.setNoLocal(true);
connection1.send(consumerInfo1);
// Setup a second connection
StubConnection connection2 = createConnection();
ConnectionInfo connectionInfo2 = createConnectionInfo();
SessionInfo sessionInfo2 = createSessionInfo(connectionInfo2);
ProducerInfo producerInfo2 = createProducerInfo(sessionInfo2);
connection2.send(connectionInfo2);
connection2.send(sessionInfo2);
connection2.send(producerInfo2);
ConsumerInfo consumerInfo2 = createConsumerInfo(sessionInfo2, destination);
consumerInfo2.setRetroactive(true);
consumerInfo2.setPrefetchSize(100);
consumerInfo2.setNoLocal(true);
connection2.send(consumerInfo2);
// Send the messages
connection1.send(createMessage(producerInfo1, destination, deliveryMode));
connection1.send(createMessage(producerInfo1, destination, deliveryMode));
connection1.send(createMessage(producerInfo1, destination, deliveryMode));
connection1.send(createMessage(producerInfo1, destination, deliveryMode));
// The 2nd connection should get the messages.
for (int i = 0; i < 4; i++) {
Message m1 = receiveMessage(connection2);
assertNotNull(m1);
}
// Send a message with the 2nd connection
Message message = createMessage(producerInfo2, destination, deliveryMode);
connection2.send(message);
// The first connection should not see the initial 4 local messages sent
// but should
// see the messages from connection 2.
Message m = receiveMessage(connection1);
assertNotNull(m);
assertEquals(message.getMessageId(), m.getMessageId());
assertNoMessagesLeft(connection1);
assertNoMessagesLeft(connection2);
}
public void initCombosForTopicDispatchIsBroadcast() {
addCombinationValues("deliveryMode", new Object[] {Integer.valueOf(DeliveryMode.NON_PERSISTENT),
Integer.valueOf(DeliveryMode.PERSISTENT)});
}
public void testTopicDispatchIsBroadcast() throws Exception {
ActiveMQDestination destination = new ActiveMQTopic("TEST");
// Setup a first connection
StubConnection connection1 = createConnection();
ConnectionInfo connectionInfo1 = createConnectionInfo();
SessionInfo sessionInfo1 = createSessionInfo(connectionInfo1);
ProducerInfo producerInfo1 = createProducerInfo(sessionInfo1);
connection1.send(connectionInfo1);
connection1.send(sessionInfo1);
connection1.send(producerInfo1);
ConsumerInfo consumerInfo1 = createConsumerInfo(sessionInfo1, destination);
consumerInfo1.setRetroactive(true);
consumerInfo1.setPrefetchSize(100);
connection1.send(consumerInfo1);
// Setup a second connection
StubConnection connection2 = createConnection();
ConnectionInfo connectionInfo2 = createConnectionInfo();
SessionInfo sessionInfo2 = createSessionInfo(connectionInfo2);
ConsumerInfo consumerInfo2 = createConsumerInfo(sessionInfo2, destination);
consumerInfo2.setRetroactive(true);
consumerInfo2.setPrefetchSize(100);
connection2.send(connectionInfo2);
connection2.send(sessionInfo2);
connection2.send(consumerInfo2);
// Send the messages
connection1.send(createMessage(producerInfo1, destination, deliveryMode));
connection1.send(createMessage(producerInfo1, destination, deliveryMode));
connection1.send(createMessage(producerInfo1, destination, deliveryMode));
connection1.send(createMessage(producerInfo1, destination, deliveryMode));
// Get the messages
for (int i = 0; i < 4; i++) {
Message m1 = receiveMessage(connection1);
assertNotNull(m1);
m1 = receiveMessage(connection2);
assertNotNull(m1);
}
}
public void initCombosForTestQueueDispatchedAreRedeliveredOnConsumerClose() {
addCombinationValues("deliveryMode", new Object[] {Integer.valueOf(DeliveryMode.NON_PERSISTENT),
Integer.valueOf(DeliveryMode.PERSISTENT)});
addCombinationValues("destinationType",
new Object[] {Byte.valueOf(ActiveMQDestination.QUEUE_TYPE),
Byte.valueOf(ActiveMQDestination.TEMP_QUEUE_TYPE)});
}
public void testQueueDispatchedAreRedeliveredOnConsumerClose() throws Exception {
// Setup a first connection
StubConnection connection1 = createConnection();
ConnectionInfo connectionInfo1 = createConnectionInfo();
SessionInfo sessionInfo1 = createSessionInfo(connectionInfo1);
ProducerInfo producerInfo = createProducerInfo(sessionInfo1);
connection1.send(connectionInfo1);
connection1.send(sessionInfo1);
connection1.send(producerInfo);
destination = createDestinationInfo(connection1, connectionInfo1, destinationType);
ConsumerInfo consumerInfo1 = createConsumerInfo(sessionInfo1, destination);
consumerInfo1.setPrefetchSize(100);
connection1.send(consumerInfo1);
// Send the messages
connection1.send(createMessage(producerInfo, destination, deliveryMode));
connection1.send(createMessage(producerInfo, destination, deliveryMode));
connection1.send(createMessage(producerInfo, destination, deliveryMode));
connection1.send(createMessage(producerInfo, destination, deliveryMode));
// Get the messages
for (int i = 0; i < 4; i++) {
Message m1 = receiveMessage(connection1);
assertNotNull(m1);
assertFalse(m1.isRedelivered());
}
// Close the consumer without sending any ACKS.
connection1.send(closeConsumerInfo(consumerInfo1));
// Drain any in flight messages..
while (connection1.getDispatchQueue().poll(0, TimeUnit.MILLISECONDS) != null) {
}
// Add the second consumer
ConsumerInfo consumerInfo2 = createConsumerInfo(sessionInfo1, destination);
consumerInfo2.setPrefetchSize(100);
connection1.send(consumerInfo2);
// Make sure the messages were re delivered to the 2nd consumer.
for (int i = 0; i < 4; i++) {
Message m1 = receiveMessage(connection1);
assertNotNull(m1);
assertTrue(m1.isRedelivered());
}
}
public void initCombosForTestQueueBrowseMessages() {
addCombinationValues("deliveryMode", new Object[] {Integer.valueOf(DeliveryMode.NON_PERSISTENT),
Integer.valueOf(DeliveryMode.PERSISTENT)});
addCombinationValues("destinationType",
new Object[] {Byte.valueOf(ActiveMQDestination.QUEUE_TYPE),
Byte.valueOf(ActiveMQDestination.TEMP_QUEUE_TYPE)});
}
public void testQueueBrowseMessages() throws Exception {
// Start a producer and consumer
StubConnection connection = createConnection();
ConnectionInfo connectionInfo = createConnectionInfo();
SessionInfo sessionInfo = createSessionInfo(connectionInfo);
ProducerInfo producerInfo = createProducerInfo(sessionInfo);
connection.send(connectionInfo);
connection.send(sessionInfo);
connection.send(producerInfo);
destination = createDestinationInfo(connection, connectionInfo, destinationType);
connection.send(createMessage(producerInfo, destination, deliveryMode));
connection.send(createMessage(producerInfo, destination, deliveryMode));
connection.send(createMessage(producerInfo, destination, deliveryMode));
connection.send(createMessage(producerInfo, destination, deliveryMode));
// Use selector to skip first message.
ConsumerInfo consumerInfo = createConsumerInfo(sessionInfo, destination);
consumerInfo.setBrowser(true);
connection.send(consumerInfo);
for (int i = 0; i < 4; i++) {
Message m = receiveMessage(connection);
assertNotNull(m);
connection.send(createAck(consumerInfo, m, 1, MessageAck.DELIVERED_ACK_TYPE));
}
assertNoMessagesLeft(connection);
}
public void initCombosForTestQueueSendThenAddConsumer() {
addCombinationValues("deliveryMode", new Object[] {Integer.valueOf(DeliveryMode.NON_PERSISTENT),
Integer.valueOf(DeliveryMode.PERSISTENT)});
addCombinationValues("destinationType",
new Object[] {Byte.valueOf(ActiveMQDestination.QUEUE_TYPE),
Byte.valueOf(ActiveMQDestination.TEMP_QUEUE_TYPE)});
}
public void testQueueSendThenAddConsumer() throws Exception {
// Start a producer
StubConnection connection = createConnection();
ConnectionInfo connectionInfo = createConnectionInfo();
SessionInfo sessionInfo = createSessionInfo(connectionInfo);
ProducerInfo producerInfo = createProducerInfo(sessionInfo);
connection.send(connectionInfo);
connection.send(sessionInfo);
connection.send(producerInfo);
destination = createDestinationInfo(connection, connectionInfo, destinationType);
// Send a message to the broker.
connection.send(createMessage(producerInfo, destination, deliveryMode));
// Start the consumer
ConsumerInfo consumerInfo = createConsumerInfo(sessionInfo, destination);
connection.send(consumerInfo);
// Make sure the message was delivered.
Message m = receiveMessage(connection);
assertNotNull(m);
}
public void initCombosForTestQueueAckRemovesMessage() {
addCombinationValues("deliveryMode", new Object[] {Integer.valueOf(DeliveryMode.NON_PERSISTENT),
Integer.valueOf(DeliveryMode.PERSISTENT)});
addCombinationValues("destinationType",
new Object[] {Byte.valueOf(ActiveMQDestination.QUEUE_TYPE),
Byte.valueOf(ActiveMQDestination.TEMP_QUEUE_TYPE)});
}
public void testQueueAckRemovesMessage() throws Exception {
// Start a producer and consumer
StubConnection connection = createConnection();
ConnectionInfo connectionInfo = createConnectionInfo();
SessionInfo sessionInfo = createSessionInfo(connectionInfo);
ProducerInfo producerInfo = createProducerInfo(sessionInfo);
connection.send(connectionInfo);
connection.send(sessionInfo);
connection.send(producerInfo);
destination = createDestinationInfo(connection, connectionInfo, destinationType);
Message message1 = createMessage(producerInfo, destination, deliveryMode);
Message message2 = createMessage(producerInfo, destination, deliveryMode);
connection.send(message1);
connection.send(message2);
// Make sure the message was delivered.
ConsumerInfo consumerInfo = createConsumerInfo(sessionInfo, destination);
connection.request(consumerInfo);
Message m = receiveMessage(connection);
assertNotNull(m);
assertEquals(m.getMessageId(), message1.getMessageId());
assertTrue(countMessagesInQueue(connection, connectionInfo, destination) == 2);
connection.send(createAck(consumerInfo, m, 1, MessageAck.DELIVERED_ACK_TYPE));
assertTrue(countMessagesInQueue(connection, connectionInfo, destination) == 2);
connection.send(createAck(consumerInfo, m, 1, MessageAck.STANDARD_ACK_TYPE));
assertTrue(countMessagesInQueue(connection, connectionInfo, destination) == 1);
}
public void initCombosForTestSelectorSkipsMessages() {
addCombinationValues("destination", new Object[] {new ActiveMQTopic("TEST_TOPIC"),
new ActiveMQQueue("TEST_QUEUE")});
addCombinationValues("destinationType",
new Object[] {Byte.valueOf(ActiveMQDestination.QUEUE_TYPE),
Byte.valueOf(ActiveMQDestination.TOPIC_TYPE),
Byte.valueOf(ActiveMQDestination.TEMP_QUEUE_TYPE),
Byte.valueOf(ActiveMQDestination.TEMP_TOPIC_TYPE)});
}
public void testSelectorSkipsMessages() throws Exception {
// Start a producer and consumer
StubConnection connection = createConnection();
ConnectionInfo connectionInfo = createConnectionInfo();
SessionInfo sessionInfo = createSessionInfo(connectionInfo);
ProducerInfo producerInfo = createProducerInfo(sessionInfo);
connection.send(connectionInfo);
connection.send(sessionInfo);
connection.send(producerInfo);
destination = createDestinationInfo(connection, connectionInfo, destinationType);
ConsumerInfo consumerInfo = createConsumerInfo(sessionInfo, destination);
consumerInfo.setSelector("JMSType='last'");
connection.send(consumerInfo);
Message message1 = createMessage(producerInfo, destination, deliveryMode);
message1.setType("first");
Message message2 = createMessage(producerInfo, destination, deliveryMode);
message2.setType("last");
connection.send(message1);
connection.send(message2);
// Use selector to skip first message.
Message m = receiveMessage(connection);
assertNotNull(m);
assertEquals(m.getMessageId(), message2.getMessageId());
connection.send(createAck(consumerInfo, m, 1, MessageAck.STANDARD_ACK_TYPE));
connection.send(closeConsumerInfo(consumerInfo));
assertNoMessagesLeft(connection);
}
public void initCombosForTestAddConsumerThenSend() {
addCombinationValues("deliveryMode", new Object[] {Integer.valueOf(DeliveryMode.NON_PERSISTENT),
Integer.valueOf(DeliveryMode.PERSISTENT)});
addCombinationValues("destinationType",
new Object[] {Byte.valueOf(ActiveMQDestination.QUEUE_TYPE),
Byte.valueOf(ActiveMQDestination.TOPIC_TYPE),
Byte.valueOf(ActiveMQDestination.TEMP_QUEUE_TYPE),
Byte.valueOf(ActiveMQDestination.TEMP_TOPIC_TYPE)});
}
public void testAddConsumerThenSend() throws Exception {
// Start a producer and consumer
StubConnection connection = createConnection();
ConnectionInfo connectionInfo = createConnectionInfo();
SessionInfo sessionInfo = createSessionInfo(connectionInfo);
ProducerInfo producerInfo = createProducerInfo(sessionInfo);
connection.send(connectionInfo);
connection.send(sessionInfo);
connection.send(producerInfo);
destination = createDestinationInfo(connection, connectionInfo, destinationType);
ConsumerInfo consumerInfo = createConsumerInfo(sessionInfo, destination);
connection.send(consumerInfo);
connection.send(createMessage(producerInfo, destination, deliveryMode));
// Make sure the message was delivered.
Message m = receiveMessage(connection);
assertNotNull(m);
}
public void initCombosForTestConsumerPrefetchAtOne() {
addCombinationValues("deliveryMode", new Object[] {Integer.valueOf(DeliveryMode.NON_PERSISTENT),
Integer.valueOf(DeliveryMode.PERSISTENT)});
addCombinationValues("destinationType",
new Object[] {Byte.valueOf(ActiveMQDestination.QUEUE_TYPE),
Byte.valueOf(ActiveMQDestination.TOPIC_TYPE),
Byte.valueOf(ActiveMQDestination.TEMP_QUEUE_TYPE),
Byte.valueOf(ActiveMQDestination.TEMP_TOPIC_TYPE)});
}
public void testConsumerPrefetchAtOne() throws Exception {
// Start a producer and consumer
StubConnection connection = createConnection();
ConnectionInfo connectionInfo = createConnectionInfo();
SessionInfo sessionInfo = createSessionInfo(connectionInfo);
ProducerInfo producerInfo = createProducerInfo(sessionInfo);
connection.send(connectionInfo);
connection.send(sessionInfo);
connection.send(producerInfo);
destination = createDestinationInfo(connection, connectionInfo, destinationType);
ConsumerInfo consumerInfo = createConsumerInfo(sessionInfo, destination);
consumerInfo.setPrefetchSize(1);
connection.send(consumerInfo);
// Send 2 messages to the broker.
connection.send(createMessage(producerInfo, destination, deliveryMode));
connection.send(createMessage(producerInfo, destination, deliveryMode));
// Make sure only 1 message was delivered.
Message m = receiveMessage(connection);
assertNotNull(m);
assertNoMessagesLeft(connection);
}
public void initCombosForTestConsumerPrefetchAtTwo() {
addCombinationValues("deliveryMode", new Object[] {Integer.valueOf(DeliveryMode.NON_PERSISTENT),
Integer.valueOf(DeliveryMode.PERSISTENT)});
addCombinationValues("destinationType",
new Object[] {Byte.valueOf(ActiveMQDestination.QUEUE_TYPE),
Byte.valueOf(ActiveMQDestination.TOPIC_TYPE),
Byte.valueOf(ActiveMQDestination.TEMP_QUEUE_TYPE),
Byte.valueOf(ActiveMQDestination.TEMP_TOPIC_TYPE)});
}
public void testConsumerPrefetchAtTwo() throws Exception {
// Start a producer and consumer
StubConnection connection = createConnection();
ConnectionInfo connectionInfo = createConnectionInfo();
SessionInfo sessionInfo = createSessionInfo(connectionInfo);
ProducerInfo producerInfo = createProducerInfo(sessionInfo);
connection.send(connectionInfo);
connection.send(sessionInfo);
connection.send(producerInfo);
destination = createDestinationInfo(connection, connectionInfo, destinationType);
ConsumerInfo consumerInfo = createConsumerInfo(sessionInfo, destination);
consumerInfo.setPrefetchSize(2);
connection.send(consumerInfo);
// Send 3 messages to the broker.
connection.send(createMessage(producerInfo, destination, deliveryMode));
connection.send(createMessage(producerInfo, destination, deliveryMode));
connection.send(createMessage(producerInfo, destination, deliveryMode));
// Make sure only 1 message was delivered.
Message m = receiveMessage(connection);
assertNotNull(m);
m = receiveMessage(connection);
assertNotNull(m);
assertNoMessagesLeft(connection);
}
public void initCombosForTestConsumerPrefetchAndDeliveredAck() {
addCombinationValues("deliveryMode", new Object[] {Integer.valueOf(DeliveryMode.NON_PERSISTENT),
Integer.valueOf(DeliveryMode.PERSISTENT)});
addCombinationValues("destinationType",
new Object[] {Byte.valueOf(ActiveMQDestination.QUEUE_TYPE),
Byte.valueOf(ActiveMQDestination.TOPIC_TYPE),
Byte.valueOf(ActiveMQDestination.TEMP_QUEUE_TYPE),
Byte.valueOf(ActiveMQDestination.TEMP_TOPIC_TYPE)});
}
public void testConsumerPrefetchAndDeliveredAck() throws Exception {
// Start a producer and consumer
StubConnection connection = createConnection();
ConnectionInfo connectionInfo = createConnectionInfo();
SessionInfo sessionInfo = createSessionInfo(connectionInfo);
ProducerInfo producerInfo = createProducerInfo(sessionInfo);
connection.send(connectionInfo);
connection.send(sessionInfo);
connection.send(producerInfo);
destination = createDestinationInfo(connection, connectionInfo, destinationType);
ConsumerInfo consumerInfo = createConsumerInfo(sessionInfo, destination);
consumerInfo.setPrefetchSize(1);
connection.request(consumerInfo);
// Send 3 messages to the broker.
connection.send(createMessage(producerInfo, destination, deliveryMode));
connection.send(createMessage(producerInfo, destination, deliveryMode));
connection.request(createMessage(producerInfo, destination, deliveryMode));
// Make sure only 1 message was delivered.
Message m1 = receiveMessage(connection);
assertNotNull(m1);
assertNoMessagesLeft(connection);
// Acknowledge the first message. This should cause the next message to
// get dispatched.
connection.request(createAck(consumerInfo, m1, 1, MessageAck.DELIVERED_ACK_TYPE));
Message m2 = receiveMessage(connection);
assertNotNull(m2);
connection.request(createAck(consumerInfo, m2, 1, MessageAck.DELIVERED_ACK_TYPE));
Message m3 = receiveMessage(connection);
assertNotNull(m3);
connection.request(createAck(consumerInfo, m3, 1, MessageAck.DELIVERED_ACK_TYPE));
}
public void testGetServices() throws Exception {
assertTrue(broker.getServices().length != 0);
}
public static Test suite() {
return suite(BrokerTest.class);
}
public static void main(String[] args) {
junit.textui.TestRunner.run(suite());
}
}
| |
/*
* Copyright 2015 Adaptris Ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.adaptris.core;
import java.util.UUID;
import com.adaptris.core.event.AdapterCloseEvent;
import com.adaptris.core.jms.JmsConnection;
import com.adaptris.core.jms.PtpProducer;
import com.adaptris.core.jms.jndi.StandardJndiImplementation;
import com.adaptris.core.stubs.FailFirstMockMessageProducer;
import com.adaptris.core.stubs.MockMessageProducer;
import com.adaptris.core.util.LifecycleHelper;
public class DefaultEventHandlerTest extends ExampleEventHandlerCase {
public DefaultEventHandlerTest(java.lang.String testName) {
super(testName);
}
@Override
protected void setUp() throws CoreException {
}
@Override
protected void tearDown() throws Exception {
}
@Override
protected DefaultEventHandler newEventHandler(String uniqueId) throws CoreException {
DefaultEventHandler result = new DefaultEventHandler();
result.setUniqueId(uniqueId);
return result;
}
@Override
protected DefaultEventHandler applyConfiguration(EventHandler eh) throws CoreException {
DefaultEventHandler eventHandler = (DefaultEventHandler) eh;
eventHandler.setUniqueId(UUID.randomUUID().toString());
eventHandler.setConnection(new NullConnection());
eventHandler.setProducer(new MockMessageProducer());
eventHandler.setMarshaller(DefaultMarshaller.getDefaultMarshaller());
return eventHandler;
}
public void testSetters() throws Exception {
DefaultEventHandler eventHandler = newEventHandler("testSetters");
try {
eventHandler.setConnection(null);
fail();
}
catch (IllegalArgumentException expected) {
}
try {
eventHandler.setProducer(null);
fail();
}
catch (IllegalArgumentException expected) {
}
}
public void testSendEventWithException() throws Exception {
Event e = EventFactory.create(AdapterCloseEvent.class);
DefaultEventHandler eh = applyConfiguration(newEventHandler("testSendEventWithException"));
eh.setProducer(new FailFirstMockMessageProducer());
eh.requestStart();
eh.send(e);
eh.requestClose();
doAssertions(eh, 0, e.getClass());
}
public void testGettersWhenClosed() throws Exception {
DefaultEventHandler input = applyConfiguration(newEventHandler("testGettersWhenClosed"));
AdaptrisMessageProducer p1 = input.getProducer();
AdaptrisConnection con1 = input.getConnection();
input.requestStart();
input.requestClose();
AdaptrisMessageProducer p2 = input.getProducer();
AdaptrisConnection con2 = input.getConnection();
assertEquals("Producers when closed", p1, p2);
assertEquals("Connections when closed", con1, con2);
assertRoundtripEquality(p1, p2);
assertRoundtripEquality(con1, con2);
}
public void testSettersWhenClosed() throws Exception {
DefaultEventHandler input = applyConfiguration(newEventHandler("testSettersWhenClosed"));
AdaptrisMessageProducer p1 = input.getProducer();
AdaptrisConnection con1 = input.getConnection();
input.requestStart();
input.requestClose();
input.setConnection(con1);
input.setProducer(p1);
}
public void testSettersWhenInitialised() throws Exception {
DefaultEventHandler input = applyConfiguration(newEventHandler("testSettersWhenInitialised"));
AdaptrisMessageProducer p1 = input.getProducer();
AdaptrisConnection con1 = input.getConnection();
input.requestInit();
try {
input.setConnection(con1);
fail("Should not be able to reset connection once initialised");
}
catch (IllegalStateException expected) {
}
try {
input.setProducer(p1);
fail("Should not be able to reset producer once initialised");
}
catch (IllegalStateException expected) {
}
}
public void testSettersWhenStarted() throws Exception {
DefaultEventHandler input = applyConfiguration(newEventHandler("testSettersWhenStarted"));
AdaptrisMessageProducer p1 = input.getProducer();
AdaptrisConnection con1 = input.getConnection();
input.requestStart();
try {
input.setConnection(con1);
fail("Should not be able to reset connection once initialised");
}
catch (IllegalStateException expected) {
}
try {
input.setProducer(p1);
fail("Should not be able to reset producer once initialised");
}
catch (IllegalStateException expected) {
}
}
public void testSettersWhenStopped() throws Exception {
DefaultEventHandler input = applyConfiguration(newEventHandler("testSettersWhenStopped"));
AdaptrisMessageProducer p1 = input.getProducer();
AdaptrisConnection con1 = input.getConnection();
input.requestStart();
input.requestStop();
try {
input.setConnection(con1);
fail("Should not be able to reset connection once initialised");
}
catch (IllegalStateException expected) {
}
try {
input.setProducer(p1);
fail("Should not be able to reset producer once initialised");
}
catch (IllegalStateException expected) {
}
}
public void testBackReferences() throws Exception {
DefaultEventHandler evh = new DefaultEventHandler();
NullConnection conn = new NullConnection();
evh.setConnection(conn);
assertEquals(conn, evh.getConnection());
// Back references don't exist until "init".
try {
LifecycleHelper.init(evh);
assertEquals(1, conn.retrieveExceptionListeners().size());
assertTrue(evh == conn.retrieveExceptionListeners().toArray()[0]);
}
finally {
LifecycleHelper.close(evh);
}
}
/**
* @see com.adaptris.core.ExampleConfigCase#retrieveObjectForSampleConfig()
*/
@Override
protected Object retrieveObjectForSampleConfig() {
Adapter result = null;
try {
AdaptrisMessageProducer p = new PtpProducer();
p.setDestination(new ConfiguredProduceDestination("publishEventsTo"));
DefaultEventHandler eh = new DefaultEventHandler();
eh.setConnection(new JmsConnection(new StandardJndiImplementation("MyConnectionFactoryName")));
eh.setProducer(p);
eh.setMarshaller(DefaultMarshaller.getDefaultMarshaller());
result = new Adapter();
result.setChannelList(new ChannelList());
result.setEventHandler(eh);
result.setUniqueId(UUID.randomUUID().toString());
}
catch (Exception e) {
throw new RuntimeException(e);
}
return result;
}
}
| |
package com.androidsdk.snaphy.snaphyandroidsdk.repository;
import com.google.common.collect.ImmutableMap;
/*
Replacing with custom Snaphy callback methods
import com.strongloop.android.loopback.callbacks.ListCallback;
import com.strongloop.android.loopback.callbacks.ObjectCallback;
import com.strongloop.android.loopback.callbacks.VoidCallback;
*/
import com.androidsdk.snaphy.snaphyandroidsdk.callbacks.ObjectCallback;
import com.androidsdk.snaphy.snaphyandroidsdk.callbacks.DataListCallback;
import com.androidsdk.snaphy.snaphyandroidsdk.callbacks.VoidCallback;
import com.androidsdk.snaphy.snaphyandroidsdk.list.DataList;
import com.androidsdk.snaphy.snaphyandroidsdk.list.Util;
import com.strongloop.android.remoting.adapters.Adapter;
import com.strongloop.android.remoting.adapters.RestContract;
import com.strongloop.android.remoting.adapters.RestContractItem;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.HashMap;
import java.lang.reflect.Method;
import android.util.Log;
import android.content.ContentValues;
import android.content.pm.PackageManager;
import android.content.Context;
import android.content.pm.ApplicationInfo;
//Replaced by Custom ModelRepository method
//import com.strongloop.android.loopback.ModelRepository;
import org.json.JSONArray;
import org.json.JSONObject;
//Import its models too.
import com.androidsdk.snaphy.snaphyandroidsdk.models.Order;
import android.content.Context;
import com.androidsdk.snaphy.snaphyandroidsdk.db.OrderDb;
//Now import model of related models..
import com.androidsdk.snaphy.snaphyandroidsdk.models.Product;
import com.androidsdk.snaphy.snaphyandroidsdk.repository.ProductRepository;
import com.androidsdk.snaphy.snaphyandroidsdk.models.ProductOrder;
import com.androidsdk.snaphy.snaphyandroidsdk.repository.ProductOrderRepository;
import com.androidsdk.snaphy.snaphyandroidsdk.models.Delivery;
import com.androidsdk.snaphy.snaphyandroidsdk.repository.DeliveryRepository;
import com.androidsdk.snaphy.snaphyandroidsdk.models.Invoice;
import com.androidsdk.snaphy.snaphyandroidsdk.repository.InvoiceRepository;
import com.androidsdk.snaphy.snaphyandroidsdk.models.Customer;
import com.androidsdk.snaphy.snaphyandroidsdk.repository.CustomerRepository;
public class OrderRepository extends ModelRepository<Order> {
private Context context;
private String METADATA_DATABASE_NAME_KEY = "snaphy.database.name";
private static String DATABASE_NAME;
public OrderRepository(){
super("Order", null, Order.class);
}
public Context getContext(){
return context;
}
public OrderDb getDb() {
return orderDb;
}
public void setOrderDb(OrderDb orderDb) {
this.orderDb = orderDb;
}
private OrderDb orderDb;
//Flag to check either to store data locally or not..
private boolean STORE_LOCALLY = true;
public boolean isSTORE_LOCALLY() {
return STORE_LOCALLY;
}
public void persistData(boolean persist){
STORE_LOCALLY = persist;
}
public void reset__db(){
if(isSTORE_LOCALLY()){
getDb().reset__db();
}
}
public void addStorage(Context context){
try{
ApplicationInfo ai = context.getPackageManager().getApplicationInfo(context.getPackageName(), PackageManager.GET_META_DATA);
DATABASE_NAME = (String) ai.metaData.get(METADATA_DATABASE_NAME_KEY);
}
catch (Exception e){
Log.e("Snaphy", e.toString());
}
setOrderDb(new OrderDb(context, DATABASE_NAME, getRestAdapter()));
//allow data storage locally..
persistData(true);
this.context = context;
}
public RestContract createContract() {
RestContract contract = super.createContract();
contract.addItem(new RestContractItem("/" + getNameForRestUrl() + "/:orderId/customer", "GET"), "Order.prototype.__get__customer");
contract.addItem(new RestContractItem("/" + getNameForRestUrl() + "/:orderId/invoices", "GET"), "Order.prototype.__get__invoices");
contract.addItem(new RestContractItem("/" + getNameForRestUrl() + "/:orderId/invoices", "POST"), "Order.prototype.__create__invoices");
contract.addItem(new RestContractItem("/" + getNameForRestUrl() + "/:orderId/invoices", "PUT"), "Order.prototype.__update__invoices");
contract.addItem(new RestContractItem("/" + getNameForRestUrl() + "/:orderId/invoices", "DELETE"), "Order.prototype.__destroy__invoices");
contract.addItem(new RestContractItem("/" + getNameForRestUrl() + "/:orderId/deliveries", "GET"), "Order.prototype.__get__deliveries");
contract.addItem(new RestContractItem("/" + getNameForRestUrl() + "/:orderId/deliveries", "POST"), "Order.prototype.__create__deliveries");
contract.addItem(new RestContractItem("/" + getNameForRestUrl() + "/:orderId/deliveries", "PUT"), "Order.prototype.__update__deliveries");
contract.addItem(new RestContractItem("/" + getNameForRestUrl() + "/:orderId/deliveries", "DELETE"), "Order.prototype.__destroy__deliveries");
contract.addItem(new RestContractItem("/" + getNameForRestUrl() + "/:orderId/products/:fk", "GET"), "Order.prototype.__findById__products");
contract.addItem(new RestContractItem("/" + getNameForRestUrl() + "/:orderId/products/:fk", "DELETE"), "Order.prototype.__destroyById__products");
contract.addItem(new RestContractItem("/" + getNameForRestUrl() + "/:orderId/products/:fk", "PUT"), "Order.prototype.__updateById__products");
contract.addItem(new RestContractItem("/" + getNameForRestUrl() + "/:orderId/products/rel/:fk", "PUT"), "Order.prototype.__link__products");
contract.addItem(new RestContractItem("/" + getNameForRestUrl() + "/:orderId/products/rel/:fk", "DELETE"), "Order.prototype.__unlink__products");
contract.addItem(new RestContractItem("/" + getNameForRestUrl() + "/:orderId/products/rel/:fk", "HEAD"), "Order.prototype.__exists__products");
contract.addItem(new RestContractItem("/" + getNameForRestUrl() + "/:orderId/products", "GET"), "Order.prototype.__get__products");
contract.addItem(new RestContractItem("/" + getNameForRestUrl() + "/:orderId/products", "POST"), "Order.prototype.__create__products");
contract.addItem(new RestContractItem("/" + getNameForRestUrl() + "/:orderId/products", "DELETE"), "Order.prototype.__delete__products");
contract.addItem(new RestContractItem("/" + getNameForRestUrl() + "/:orderId/products/count", "GET"), "Order.prototype.__count__products");
contract.addItem(new RestContractItem("/" + getNameForRestUrl() + "/", "POST"), "Order.create");
contract.addItem(new RestContractItem("/" + getNameForRestUrl() + "/", "POST"), "Order.create");
contract.addItem(new RestContractItem("/" + getNameForRestUrl() + "/", "PUT"), "Order.upsert");
contract.addItem(new RestContractItem("/" + getNameForRestUrl() + "/:id/exists", "GET"), "Order.exists");
contract.addItem(new RestContractItem("/" + getNameForRestUrl() + "/:id", "GET"), "Order.findById");
contract.addItem(new RestContractItem("/" + getNameForRestUrl() + "/", "GET"), "Order.find");
contract.addItem(new RestContractItem("/" + getNameForRestUrl() + "/findOne", "GET"), "Order.findOne");
contract.addItem(new RestContractItem("/" + getNameForRestUrl() + "/update", "POST"), "Order.updateAll");
contract.addItem(new RestContractItem("/" + getNameForRestUrl() + "/:id", "DELETE"), "Order.deleteById");
contract.addItem(new RestContractItem("/" + getNameForRestUrl() + "/count", "GET"), "Order.count");
contract.addItem(new RestContractItem("/" + getNameForRestUrl() + "/:orderId", "PUT"), "Order.prototype.updateAttributes");
contract.addItem(new RestContractItem("/" + getNameForRestUrl() + "/getSchema", "POST"), "Order.getSchema");
contract.addItem(new RestContractItem("/" + getNameForRestUrl() + "/getAbsoluteSchema", "POST"), "Order.getAbsoluteSchema");
contract.addItem(new RestContractItem("/" + getNameForRestUrl() + "/getDetailSchema", "POST"), "Order.getDetailSchema");
contract.addItem(new RestContractItem("/" + getNameForRestUrl() + "/getModelRelationSchema", "POST"), "Order.getModelRelationSchema");
return contract;
}
//override getNameForRestUrlMethod
public String getNameForRestUrl() {
//call super method instead..
return super.getNameForRestUrl();
}
//Method get__customer definition
public void get__customer( String orderId, Boolean refresh, final ObjectCallback<Customer> callback){
/**
Call the onBefore event
*/
callback.onBefore();
//Definging hashMap for data conversion
Map<String, Object> hashMapObject = new HashMap<>();
//Now add the arguments...
hashMapObject.put("orderId", orderId);
hashMapObject.put("refresh", refresh);
invokeStaticMethod("prototype.__get__customer", hashMapObject, new Adapter.JsonObjectCallback() {
@Override
public void onError(Throwable t) {
callback.onError(t);
//Call the finally method..
callback.onFinally();
}
@Override
public void onSuccess(JSONObject response) {
try{
if(response != null){
CustomerRepository customerRepo = getRestAdapter().createRepository(CustomerRepository.class);
if(context != null){
try {
Method method = customerRepo.getClass().getMethod("addStorage", Context.class);
method.invoke(customerRepo, context);
} catch (Exception e) {
Log.e("Database Error", e.toString());
}
//customerRepo.addStorage(context);
}
Map<String, Object> result = Util.fromJson(response);
Customer customer = customerRepo.createObject(result);
//Add to database if persistent storage required..
if(isSTORE_LOCALLY()){
//http://stackoverflow.com/questions/160970/how-do-i-invoke-a-java-method-when-given-the-method-name-as-a-string
try {
Method method = customer.getClass().getMethod("save__db");
method.invoke(customer);
} catch (Exception e) {
Log.e("Database Error", e.toString());
}
}
callback.onSuccess(customer);
}else{
callback.onSuccess(null);
}
}catch(Exception e){
Log.e("Snaphy", e.toString());
}
//Call the finally method..
callback.onFinally();
}
});
}//Method get__customer definition ends here..
//Method get__invoices definition
public void get__invoices( String orderId, Boolean refresh, final ObjectCallback<Invoice> callback){
/**
Call the onBefore event
*/
callback.onBefore();
//Definging hashMap for data conversion
Map<String, Object> hashMapObject = new HashMap<>();
//Now add the arguments...
hashMapObject.put("orderId", orderId);
hashMapObject.put("refresh", refresh);
invokeStaticMethod("prototype.__get__invoices", hashMapObject, new Adapter.JsonObjectCallback() {
@Override
public void onError(Throwable t) {
callback.onError(t);
//Call the finally method..
callback.onFinally();
}
@Override
public void onSuccess(JSONObject response) {
try{
if(response != null){
InvoiceRepository invoiceRepo = getRestAdapter().createRepository(InvoiceRepository.class);
if(context != null){
try {
Method method = invoiceRepo.getClass().getMethod("addStorage", Context.class);
method.invoke(invoiceRepo, context);
} catch (Exception e) {
Log.e("Database Error", e.toString());
}
//invoiceRepo.addStorage(context);
}
Map<String, Object> result = Util.fromJson(response);
Invoice invoice = invoiceRepo.createObject(result);
//Add to database if persistent storage required..
if(isSTORE_LOCALLY()){
//http://stackoverflow.com/questions/160970/how-do-i-invoke-a-java-method-when-given-the-method-name-as-a-string
try {
Method method = invoice.getClass().getMethod("save__db");
method.invoke(invoice);
} catch (Exception e) {
Log.e("Database Error", e.toString());
}
}
callback.onSuccess(invoice);
}else{
callback.onSuccess(null);
}
}catch(Exception e){
Log.e("Snaphy", e.toString());
}
//Call the finally method..
callback.onFinally();
}
});
}//Method get__invoices definition ends here..
//Method create__invoices definition
public void create__invoices( String orderId, Map<String, ? extends Object> data, final ObjectCallback<Invoice> callback){
/**
Call the onBefore event
*/
callback.onBefore();
//Definging hashMap for data conversion
Map<String, Object> hashMapObject = new HashMap<>();
//Now add the arguments...
hashMapObject.put("orderId", orderId);
hashMapObject.putAll(data);
invokeStaticMethod("prototype.__create__invoices", hashMapObject, new Adapter.JsonObjectCallback() {
@Override
public void onError(Throwable t) {
callback.onError(t);
//Call the finally method..
callback.onFinally();
}
@Override
public void onSuccess(JSONObject response) {
try{
if(response != null){
InvoiceRepository invoiceRepo = getRestAdapter().createRepository(InvoiceRepository.class);
if(context != null){
try {
Method method = invoiceRepo.getClass().getMethod("addStorage", Context.class);
method.invoke(invoiceRepo, context);
} catch (Exception e) {
Log.e("Database Error", e.toString());
}
//invoiceRepo.addStorage(context);
}
Map<String, Object> result = Util.fromJson(response);
Invoice invoice = invoiceRepo.createObject(result);
//Add to database if persistent storage required..
if(isSTORE_LOCALLY()){
//http://stackoverflow.com/questions/160970/how-do-i-invoke-a-java-method-when-given-the-method-name-as-a-string
try {
Method method = invoice.getClass().getMethod("save__db");
method.invoke(invoice);
} catch (Exception e) {
Log.e("Database Error", e.toString());
}
}
callback.onSuccess(invoice);
}else{
callback.onSuccess(null);
}
}catch(Exception e){
Log.e("Snaphy", e.toString());
}
//Call the finally method..
callback.onFinally();
}
});
}//Method create__invoices definition ends here..
//Method update__invoices definition
public void update__invoices( String orderId, Map<String, ? extends Object> data, final ObjectCallback<Invoice> callback){
/**
Call the onBefore event
*/
callback.onBefore();
//Definging hashMap for data conversion
Map<String, Object> hashMapObject = new HashMap<>();
//Now add the arguments...
hashMapObject.put("orderId", orderId);
hashMapObject.putAll(data);
invokeStaticMethod("prototype.__update__invoices", hashMapObject, new Adapter.JsonObjectCallback() {
@Override
public void onError(Throwable t) {
callback.onError(t);
//Call the finally method..
callback.onFinally();
}
@Override
public void onSuccess(JSONObject response) {
try{
if(response != null){
InvoiceRepository invoiceRepo = getRestAdapter().createRepository(InvoiceRepository.class);
if(context != null){
try {
Method method = invoiceRepo.getClass().getMethod("addStorage", Context.class);
method.invoke(invoiceRepo, context);
} catch (Exception e) {
Log.e("Database Error", e.toString());
}
//invoiceRepo.addStorage(context);
}
Map<String, Object> result = Util.fromJson(response);
Invoice invoice = invoiceRepo.createObject(result);
//Add to database if persistent storage required..
if(isSTORE_LOCALLY()){
//http://stackoverflow.com/questions/160970/how-do-i-invoke-a-java-method-when-given-the-method-name-as-a-string
try {
Method method = invoice.getClass().getMethod("save__db");
method.invoke(invoice);
} catch (Exception e) {
Log.e("Database Error", e.toString());
}
}
callback.onSuccess(invoice);
}else{
callback.onSuccess(null);
}
}catch(Exception e){
Log.e("Snaphy", e.toString());
}
//Call the finally method..
callback.onFinally();
}
});
}//Method update__invoices definition ends here..
//Method destroy__invoices definition
public void destroy__invoices( String orderId, final VoidCallback callback){
/**
Call the onBefore event
*/
callback.onBefore();
//Definging hashMap for data conversion
Map<String, Object> hashMapObject = new HashMap<>();
//Now add the arguments...
hashMapObject.put("orderId", orderId);
invokeStaticMethod("prototype.__destroy__invoices", hashMapObject, new Adapter.Callback() {
@Override
public void onError(Throwable t) {
callback.onError(t);
//Call the finally method..
callback.onFinally();
}
@Override
public void onSuccess(String response) {
callback.onSuccess();
//Call the finally method..
callback.onFinally();
}
});
}//Method destroy__invoices definition ends here..
//Method get__deliveries definition
public void get__deliveries( String orderId, Boolean refresh, final ObjectCallback<Delivery> callback){
/**
Call the onBefore event
*/
callback.onBefore();
//Definging hashMap for data conversion
Map<String, Object> hashMapObject = new HashMap<>();
//Now add the arguments...
hashMapObject.put("orderId", orderId);
hashMapObject.put("refresh", refresh);
invokeStaticMethod("prototype.__get__deliveries", hashMapObject, new Adapter.JsonObjectCallback() {
@Override
public void onError(Throwable t) {
callback.onError(t);
//Call the finally method..
callback.onFinally();
}
@Override
public void onSuccess(JSONObject response) {
try{
if(response != null){
DeliveryRepository deliveryRepo = getRestAdapter().createRepository(DeliveryRepository.class);
if(context != null){
try {
Method method = deliveryRepo.getClass().getMethod("addStorage", Context.class);
method.invoke(deliveryRepo, context);
} catch (Exception e) {
Log.e("Database Error", e.toString());
}
//deliveryRepo.addStorage(context);
}
Map<String, Object> result = Util.fromJson(response);
Delivery delivery = deliveryRepo.createObject(result);
//Add to database if persistent storage required..
if(isSTORE_LOCALLY()){
//http://stackoverflow.com/questions/160970/how-do-i-invoke-a-java-method-when-given-the-method-name-as-a-string
try {
Method method = delivery.getClass().getMethod("save__db");
method.invoke(delivery);
} catch (Exception e) {
Log.e("Database Error", e.toString());
}
}
callback.onSuccess(delivery);
}else{
callback.onSuccess(null);
}
}catch(Exception e){
Log.e("Snaphy", e.toString());
}
//Call the finally method..
callback.onFinally();
}
});
}//Method get__deliveries definition ends here..
//Method create__deliveries definition
public void create__deliveries( String orderId, Map<String, ? extends Object> data, final ObjectCallback<Delivery> callback){
/**
Call the onBefore event
*/
callback.onBefore();
//Definging hashMap for data conversion
Map<String, Object> hashMapObject = new HashMap<>();
//Now add the arguments...
hashMapObject.put("orderId", orderId);
hashMapObject.putAll(data);
invokeStaticMethod("prototype.__create__deliveries", hashMapObject, new Adapter.JsonObjectCallback() {
@Override
public void onError(Throwable t) {
callback.onError(t);
//Call the finally method..
callback.onFinally();
}
@Override
public void onSuccess(JSONObject response) {
try{
if(response != null){
DeliveryRepository deliveryRepo = getRestAdapter().createRepository(DeliveryRepository.class);
if(context != null){
try {
Method method = deliveryRepo.getClass().getMethod("addStorage", Context.class);
method.invoke(deliveryRepo, context);
} catch (Exception e) {
Log.e("Database Error", e.toString());
}
//deliveryRepo.addStorage(context);
}
Map<String, Object> result = Util.fromJson(response);
Delivery delivery = deliveryRepo.createObject(result);
//Add to database if persistent storage required..
if(isSTORE_LOCALLY()){
//http://stackoverflow.com/questions/160970/how-do-i-invoke-a-java-method-when-given-the-method-name-as-a-string
try {
Method method = delivery.getClass().getMethod("save__db");
method.invoke(delivery);
} catch (Exception e) {
Log.e("Database Error", e.toString());
}
}
callback.onSuccess(delivery);
}else{
callback.onSuccess(null);
}
}catch(Exception e){
Log.e("Snaphy", e.toString());
}
//Call the finally method..
callback.onFinally();
}
});
}//Method create__deliveries definition ends here..
//Method update__deliveries definition
public void update__deliveries( String orderId, Map<String, ? extends Object> data, final ObjectCallback<Delivery> callback){
/**
Call the onBefore event
*/
callback.onBefore();
//Definging hashMap for data conversion
Map<String, Object> hashMapObject = new HashMap<>();
//Now add the arguments...
hashMapObject.put("orderId", orderId);
hashMapObject.putAll(data);
invokeStaticMethod("prototype.__update__deliveries", hashMapObject, new Adapter.JsonObjectCallback() {
@Override
public void onError(Throwable t) {
callback.onError(t);
//Call the finally method..
callback.onFinally();
}
@Override
public void onSuccess(JSONObject response) {
try{
if(response != null){
DeliveryRepository deliveryRepo = getRestAdapter().createRepository(DeliveryRepository.class);
if(context != null){
try {
Method method = deliveryRepo.getClass().getMethod("addStorage", Context.class);
method.invoke(deliveryRepo, context);
} catch (Exception e) {
Log.e("Database Error", e.toString());
}
//deliveryRepo.addStorage(context);
}
Map<String, Object> result = Util.fromJson(response);
Delivery delivery = deliveryRepo.createObject(result);
//Add to database if persistent storage required..
if(isSTORE_LOCALLY()){
//http://stackoverflow.com/questions/160970/how-do-i-invoke-a-java-method-when-given-the-method-name-as-a-string
try {
Method method = delivery.getClass().getMethod("save__db");
method.invoke(delivery);
} catch (Exception e) {
Log.e("Database Error", e.toString());
}
}
callback.onSuccess(delivery);
}else{
callback.onSuccess(null);
}
}catch(Exception e){
Log.e("Snaphy", e.toString());
}
//Call the finally method..
callback.onFinally();
}
});
}//Method update__deliveries definition ends here..
//Method destroy__deliveries definition
public void destroy__deliveries( String orderId, final VoidCallback callback){
/**
Call the onBefore event
*/
callback.onBefore();
//Definging hashMap for data conversion
Map<String, Object> hashMapObject = new HashMap<>();
//Now add the arguments...
hashMapObject.put("orderId", orderId);
invokeStaticMethod("prototype.__destroy__deliveries", hashMapObject, new Adapter.Callback() {
@Override
public void onError(Throwable t) {
callback.onError(t);
//Call the finally method..
callback.onFinally();
}
@Override
public void onSuccess(String response) {
callback.onSuccess();
//Call the finally method..
callback.onFinally();
}
});
}//Method destroy__deliveries definition ends here..
//Method findById__products definition
public void findById__products( String orderId, String fk, final ObjectCallback<Product> callback){
/**
Call the onBefore event
*/
callback.onBefore();
//Definging hashMap for data conversion
Map<String, Object> hashMapObject = new HashMap<>();
//Now add the arguments...
hashMapObject.put("orderId", orderId);
hashMapObject.put("fk", fk);
invokeStaticMethod("prototype.__findById__products", hashMapObject, new Adapter.JsonObjectCallback() {
@Override
public void onError(Throwable t) {
callback.onError(t);
//Call the finally method..
callback.onFinally();
}
@Override
public void onSuccess(JSONObject response) {
try{
if(response != null){
ProductRepository productRepo = getRestAdapter().createRepository(ProductRepository.class);
if(context != null){
try {
Method method = productRepo.getClass().getMethod("addStorage", Context.class);
method.invoke(productRepo, context);
} catch (Exception e) {
Log.e("Database Error", e.toString());
}
//productRepo.addStorage(context);
}
Map<String, Object> result = Util.fromJson(response);
Product product = productRepo.createObject(result);
//Add to database if persistent storage required..
if(isSTORE_LOCALLY()){
//http://stackoverflow.com/questions/160970/how-do-i-invoke-a-java-method-when-given-the-method-name-as-a-string
try {
Method method = product.getClass().getMethod("save__db");
method.invoke(product);
} catch (Exception e) {
Log.e("Database Error", e.toString());
}
}
callback.onSuccess(product);
}else{
callback.onSuccess(null);
}
}catch(Exception e){
Log.e("Snaphy", e.toString());
}
//Call the finally method..
callback.onFinally();
}
});
}//Method findById__products definition ends here..
//Method destroyById__products definition
public void destroyById__products( String orderId, String fk, final VoidCallback callback){
/**
Call the onBefore event
*/
callback.onBefore();
//Definging hashMap for data conversion
Map<String, Object> hashMapObject = new HashMap<>();
//Now add the arguments...
hashMapObject.put("orderId", orderId);
hashMapObject.put("fk", fk);
invokeStaticMethod("prototype.__destroyById__products", hashMapObject, new Adapter.Callback() {
@Override
public void onError(Throwable t) {
callback.onError(t);
//Call the finally method..
callback.onFinally();
}
@Override
public void onSuccess(String response) {
callback.onSuccess();
//Call the finally method..
callback.onFinally();
}
});
}//Method destroyById__products definition ends here..
//Method updateById__products definition
public void updateById__products( String orderId, String fk, Map<String, ? extends Object> data, final ObjectCallback<Product> callback){
/**
Call the onBefore event
*/
callback.onBefore();
//Definging hashMap for data conversion
Map<String, Object> hashMapObject = new HashMap<>();
//Now add the arguments...
hashMapObject.put("orderId", orderId);
hashMapObject.put("fk", fk);
hashMapObject.putAll(data);
invokeStaticMethod("prototype.__updateById__products", hashMapObject, new Adapter.JsonObjectCallback() {
@Override
public void onError(Throwable t) {
callback.onError(t);
//Call the finally method..
callback.onFinally();
}
@Override
public void onSuccess(JSONObject response) {
try{
if(response != null){
ProductRepository productRepo = getRestAdapter().createRepository(ProductRepository.class);
if(context != null){
try {
Method method = productRepo.getClass().getMethod("addStorage", Context.class);
method.invoke(productRepo, context);
} catch (Exception e) {
Log.e("Database Error", e.toString());
}
//productRepo.addStorage(context);
}
Map<String, Object> result = Util.fromJson(response);
Product product = productRepo.createObject(result);
//Add to database if persistent storage required..
if(isSTORE_LOCALLY()){
//http://stackoverflow.com/questions/160970/how-do-i-invoke-a-java-method-when-given-the-method-name-as-a-string
try {
Method method = product.getClass().getMethod("save__db");
method.invoke(product);
} catch (Exception e) {
Log.e("Database Error", e.toString());
}
}
callback.onSuccess(product);
}else{
callback.onSuccess(null);
}
}catch(Exception e){
Log.e("Snaphy", e.toString());
}
//Call the finally method..
callback.onFinally();
}
});
}//Method updateById__products definition ends here..
//Method link__products definition
public void link__products( String orderId, String fk, Map<String, ? extends Object> data, final ObjectCallback<ProductOrder> callback){
/**
Call the onBefore event
*/
callback.onBefore();
//Definging hashMap for data conversion
Map<String, Object> hashMapObject = new HashMap<>();
//Now add the arguments...
hashMapObject.put("orderId", orderId);
hashMapObject.put("fk", fk);
hashMapObject.putAll(data);
invokeStaticMethod("prototype.__link__products", hashMapObject, new Adapter.JsonObjectCallback() {
@Override
public void onError(Throwable t) {
callback.onError(t);
//Call the finally method..
callback.onFinally();
}
@Override
public void onSuccess(JSONObject response) {
try{
if(response != null){
ProductOrderRepository productOrderRepo = getRestAdapter().createRepository(ProductOrderRepository.class);
if(context != null){
try {
Method method = productOrderRepo.getClass().getMethod("addStorage", Context.class);
method.invoke(productOrderRepo, context);
} catch (Exception e) {
Log.e("Database Error", e.toString());
}
//productOrderRepo.addStorage(context);
}
Map<String, Object> result = Util.fromJson(response);
ProductOrder productOrder = productOrderRepo.createObject(result);
//Add to database if persistent storage required..
if(isSTORE_LOCALLY()){
//http://stackoverflow.com/questions/160970/how-do-i-invoke-a-java-method-when-given-the-method-name-as-a-string
try {
Method method = productOrder.getClass().getMethod("save__db");
method.invoke(productOrder);
} catch (Exception e) {
Log.e("Database Error", e.toString());
}
}
callback.onSuccess(productOrder);
}else{
callback.onSuccess(null);
}
}catch(Exception e){
Log.e("Snaphy", e.toString());
}
//Call the finally method..
callback.onFinally();
}
});
}//Method link__products definition ends here..
//Method unlink__products definition
public void unlink__products( String orderId, String fk, final VoidCallback callback){
/**
Call the onBefore event
*/
callback.onBefore();
//Definging hashMap for data conversion
Map<String, Object> hashMapObject = new HashMap<>();
//Now add the arguments...
hashMapObject.put("orderId", orderId);
hashMapObject.put("fk", fk);
invokeStaticMethod("prototype.__unlink__products", hashMapObject, new Adapter.Callback() {
@Override
public void onError(Throwable t) {
callback.onError(t);
//Call the finally method..
callback.onFinally();
}
@Override
public void onSuccess(String response) {
callback.onSuccess();
//Call the finally method..
callback.onFinally();
}
});
}//Method unlink__products definition ends here..
//Method exists__products definition
public void exists__products( String orderId, String fk, final ObjectCallback<JSONObject> callback ){
/**
Call the onBefore event
*/
callback.onBefore();
//Definging hashMap for data conversion
Map<String, Object> hashMapObject = new HashMap<>();
//Now add the arguments...
hashMapObject.put("orderId", orderId);
hashMapObject.put("fk", fk);
invokeStaticMethod("prototype.__exists__products", hashMapObject, new Adapter.JsonObjectCallback() {
@Override
public void onError(Throwable t) {
callback.onError(t);
//Call the finally method..
callback.onFinally();
}
@Override
public void onSuccess(JSONObject response) {
try{
callback.onSuccess(response);
}catch(Exception e){
Log.e("Snaphy", e.toString());
}
//Call the finally method..
callback.onFinally();
}
});
}//Method exists__products definition ends here..
//Method get__products definition
public void get__products( String orderId, Map<String, ? extends Object> filter, final DataListCallback<Product> callback){
/**
Call the onBefore event
*/
callback.onBefore();
//Definging hashMap for data conversion
Map<String, Object> hashMapObject = new HashMap<>();
//Now add the arguments...
hashMapObject.put("orderId", orderId);
hashMapObject.put("filter", filter);
invokeStaticMethod("prototype.__get__products", hashMapObject, new Adapter.JsonArrayCallback() {
@Override
public void onError(Throwable t) {
callback.onError(t);
//Call the finally method..
callback.onFinally();
}
@Override
public void onSuccess(JSONArray response) {
try{
if(response != null){
//Now converting jsonObject to list
DataList<Map<String, Object>> result = (DataList) Util.fromJson(response);
DataList<Product> productList = new DataList<Product>();
ProductRepository productRepo = getRestAdapter().createRepository(ProductRepository.class);
if(context != null){
try {
Method method = productRepo.getClass().getMethod("addStorage", Context.class);
method.invoke(productRepo, context);
} catch (Exception e) {
Log.e("Database Error", e.toString());
}
}
for (Map<String, Object> obj : result) {
Product product = productRepo.createObject(obj);
//Add to database if persistent storage required..
if(isSTORE_LOCALLY()){
//http://stackoverflow.com/questions/160970/how-do-i-invoke-a-java-method-when-given-the-method-name-as-a-string
try {
Method method = product.getClass().getMethod("save__db");
method.invoke(product);
} catch (Exception e) {
Log.e("Database Error", e.toString());
}
}
productList.add(product);
}
callback.onSuccess(productList);
}else{
callback.onSuccess(null);
}
}catch(Exception e){
Log.e("Snaphy",e.toString());
}
//Call the finally method..
callback.onFinally();
}
});
}//Method get__products definition ends here..
//Method create__products definition
public void create__products( String orderId, Map<String, ? extends Object> data, final ObjectCallback<Product> callback){
/**
Call the onBefore event
*/
callback.onBefore();
//Definging hashMap for data conversion
Map<String, Object> hashMapObject = new HashMap<>();
//Now add the arguments...
hashMapObject.put("orderId", orderId);
hashMapObject.putAll(data);
invokeStaticMethod("prototype.__create__products", hashMapObject, new Adapter.JsonObjectCallback() {
@Override
public void onError(Throwable t) {
callback.onError(t);
//Call the finally method..
callback.onFinally();
}
@Override
public void onSuccess(JSONObject response) {
try{
if(response != null){
ProductRepository productRepo = getRestAdapter().createRepository(ProductRepository.class);
if(context != null){
try {
Method method = productRepo.getClass().getMethod("addStorage", Context.class);
method.invoke(productRepo, context);
} catch (Exception e) {
Log.e("Database Error", e.toString());
}
//productRepo.addStorage(context);
}
Map<String, Object> result = Util.fromJson(response);
Product product = productRepo.createObject(result);
//Add to database if persistent storage required..
if(isSTORE_LOCALLY()){
//http://stackoverflow.com/questions/160970/how-do-i-invoke-a-java-method-when-given-the-method-name-as-a-string
try {
Method method = product.getClass().getMethod("save__db");
method.invoke(product);
} catch (Exception e) {
Log.e("Database Error", e.toString());
}
}
callback.onSuccess(product);
}else{
callback.onSuccess(null);
}
}catch(Exception e){
Log.e("Snaphy", e.toString());
}
//Call the finally method..
callback.onFinally();
}
});
}//Method create__products definition ends here..
//Method delete__products definition
public void delete__products( String orderId, final VoidCallback callback){
/**
Call the onBefore event
*/
callback.onBefore();
//Definging hashMap for data conversion
Map<String, Object> hashMapObject = new HashMap<>();
//Now add the arguments...
hashMapObject.put("orderId", orderId);
invokeStaticMethod("prototype.__delete__products", hashMapObject, new Adapter.Callback() {
@Override
public void onError(Throwable t) {
callback.onError(t);
//Call the finally method..
callback.onFinally();
}
@Override
public void onSuccess(String response) {
callback.onSuccess();
//Call the finally method..
callback.onFinally();
}
});
}//Method delete__products definition ends here..
//Method count__products definition
public void count__products( String orderId, Map<String, ? extends Object> where, final ObjectCallback<JSONObject> callback ){
/**
Call the onBefore event
*/
callback.onBefore();
//Definging hashMap for data conversion
Map<String, Object> hashMapObject = new HashMap<>();
//Now add the arguments...
hashMapObject.put("orderId", orderId);
hashMapObject.put("where", where);
invokeStaticMethod("prototype.__count__products", hashMapObject, new Adapter.JsonObjectCallback() {
@Override
public void onError(Throwable t) {
callback.onError(t);
//Call the finally method..
callback.onFinally();
}
@Override
public void onSuccess(JSONObject response) {
try{
callback.onSuccess(response);
}catch(Exception e){
Log.e("Snaphy", e.toString());
}
//Call the finally method..
callback.onFinally();
}
});
}//Method count__products definition ends here..
//Method create definition
public void create( Map<String, ? extends Object> data, final ObjectCallback<Order> callback){
/**
Call the onBefore event
*/
callback.onBefore();
//Definging hashMap for data conversion
Map<String, Object> hashMapObject = new HashMap<>();
//Now add the arguments...
hashMapObject.putAll(data);
invokeStaticMethod("create", hashMapObject, new Adapter.JsonObjectCallback() {
@Override
public void onError(Throwable t) {
callback.onError(t);
//Call the finally method..
callback.onFinally();
}
@Override
public void onSuccess(JSONObject response) {
try{
if(response != null){
OrderRepository orderRepo = getRestAdapter().createRepository(OrderRepository.class);
if(context != null){
try {
Method method = orderRepo.getClass().getMethod("addStorage", Context.class);
method.invoke(orderRepo, context);
} catch (Exception e) {
Log.e("Database Error", e.toString());
}
//orderRepo.addStorage(context);
}
Map<String, Object> result = Util.fromJson(response);
Order order = orderRepo.createObject(result);
//Add to database if persistent storage required..
if(isSTORE_LOCALLY()){
//http://stackoverflow.com/questions/160970/how-do-i-invoke-a-java-method-when-given-the-method-name-as-a-string
try {
Method method = order.getClass().getMethod("save__db");
method.invoke(order);
} catch (Exception e) {
Log.e("Database Error", e.toString());
}
}
callback.onSuccess(order);
}else{
callback.onSuccess(null);
}
}catch(Exception e){
Log.e("Snaphy", e.toString());
}
//Call the finally method..
callback.onFinally();
}
});
}//Method create definition ends here..
//Method upsert definition
public void upsert( Map<String, ? extends Object> data, final ObjectCallback<Order> callback){
/**
Call the onBefore event
*/
callback.onBefore();
//Definging hashMap for data conversion
Map<String, Object> hashMapObject = new HashMap<>();
//Now add the arguments...
hashMapObject.putAll(data);
invokeStaticMethod("upsert", hashMapObject, new Adapter.JsonObjectCallback() {
@Override
public void onError(Throwable t) {
callback.onError(t);
//Call the finally method..
callback.onFinally();
}
@Override
public void onSuccess(JSONObject response) {
try{
if(response != null){
OrderRepository orderRepo = getRestAdapter().createRepository(OrderRepository.class);
if(context != null){
try {
Method method = orderRepo.getClass().getMethod("addStorage", Context.class);
method.invoke(orderRepo, context);
} catch (Exception e) {
Log.e("Database Error", e.toString());
}
//orderRepo.addStorage(context);
}
Map<String, Object> result = Util.fromJson(response);
Order order = orderRepo.createObject(result);
//Add to database if persistent storage required..
if(isSTORE_LOCALLY()){
//http://stackoverflow.com/questions/160970/how-do-i-invoke-a-java-method-when-given-the-method-name-as-a-string
try {
Method method = order.getClass().getMethod("save__db");
method.invoke(order);
} catch (Exception e) {
Log.e("Database Error", e.toString());
}
}
callback.onSuccess(order);
}else{
callback.onSuccess(null);
}
}catch(Exception e){
Log.e("Snaphy", e.toString());
}
//Call the finally method..
callback.onFinally();
}
});
}//Method upsert definition ends here..
//Method exists definition
public void exists( String id, final ObjectCallback<JSONObject> callback ){
/**
Call the onBefore event
*/
callback.onBefore();
//Definging hashMap for data conversion
Map<String, Object> hashMapObject = new HashMap<>();
//Now add the arguments...
hashMapObject.put("id", id);
invokeStaticMethod("exists", hashMapObject, new Adapter.JsonObjectCallback() {
@Override
public void onError(Throwable t) {
callback.onError(t);
//Call the finally method..
callback.onFinally();
}
@Override
public void onSuccess(JSONObject response) {
try{
callback.onSuccess(response);
}catch(Exception e){
Log.e("Snaphy", e.toString());
}
//Call the finally method..
callback.onFinally();
}
});
}//Method exists definition ends here..
//Method findById definition
public void findById( String id, Map<String, ? extends Object> filter, final ObjectCallback<Order> callback){
/**
Call the onBefore event
*/
callback.onBefore();
//Definging hashMap for data conversion
Map<String, Object> hashMapObject = new HashMap<>();
//Now add the arguments...
hashMapObject.put("id", id);
hashMapObject.put("filter", filter);
invokeStaticMethod("findById", hashMapObject, new Adapter.JsonObjectCallback() {
@Override
public void onError(Throwable t) {
callback.onError(t);
//Call the finally method..
callback.onFinally();
}
@Override
public void onSuccess(JSONObject response) {
try{
if(response != null){
OrderRepository orderRepo = getRestAdapter().createRepository(OrderRepository.class);
if(context != null){
try {
Method method = orderRepo.getClass().getMethod("addStorage", Context.class);
method.invoke(orderRepo, context);
} catch (Exception e) {
Log.e("Database Error", e.toString());
}
//orderRepo.addStorage(context);
}
Map<String, Object> result = Util.fromJson(response);
Order order = orderRepo.createObject(result);
//Add to database if persistent storage required..
if(isSTORE_LOCALLY()){
//http://stackoverflow.com/questions/160970/how-do-i-invoke-a-java-method-when-given-the-method-name-as-a-string
try {
Method method = order.getClass().getMethod("save__db");
method.invoke(order);
} catch (Exception e) {
Log.e("Database Error", e.toString());
}
}
callback.onSuccess(order);
}else{
callback.onSuccess(null);
}
}catch(Exception e){
Log.e("Snaphy", e.toString());
}
//Call the finally method..
callback.onFinally();
}
});
}//Method findById definition ends here..
//Method find definition
public void find( Map<String, ? extends Object> filter, final DataListCallback<Order> callback){
/**
Call the onBefore event
*/
callback.onBefore();
//Definging hashMap for data conversion
Map<String, Object> hashMapObject = new HashMap<>();
//Now add the arguments...
hashMapObject.put("filter", filter);
invokeStaticMethod("find", hashMapObject, new Adapter.JsonArrayCallback() {
@Override
public void onError(Throwable t) {
callback.onError(t);
//Call the finally method..
callback.onFinally();
}
@Override
public void onSuccess(JSONArray response) {
try{
if(response != null){
//Now converting jsonObject to list
DataList<Map<String, Object>> result = (DataList) Util.fromJson(response);
DataList<Order> orderList = new DataList<Order>();
OrderRepository orderRepo = getRestAdapter().createRepository(OrderRepository.class);
if(context != null){
try {
Method method = orderRepo.getClass().getMethod("addStorage", Context.class);
method.invoke(orderRepo, context);
} catch (Exception e) {
Log.e("Database Error", e.toString());
}
}
for (Map<String, Object> obj : result) {
Order order = orderRepo.createObject(obj);
//Add to database if persistent storage required..
if(isSTORE_LOCALLY()){
//http://stackoverflow.com/questions/160970/how-do-i-invoke-a-java-method-when-given-the-method-name-as-a-string
try {
Method method = order.getClass().getMethod("save__db");
method.invoke(order);
} catch (Exception e) {
Log.e("Database Error", e.toString());
}
}
orderList.add(order);
}
callback.onSuccess(orderList);
}else{
callback.onSuccess(null);
}
}catch(Exception e){
Log.e("Snaphy",e.toString());
}
//Call the finally method..
callback.onFinally();
}
});
}//Method find definition ends here..
//Method findOne definition
public void findOne( Map<String, ? extends Object> filter, final ObjectCallback<Order> callback){
/**
Call the onBefore event
*/
callback.onBefore();
//Definging hashMap for data conversion
Map<String, Object> hashMapObject = new HashMap<>();
//Now add the arguments...
hashMapObject.put("filter", filter);
invokeStaticMethod("findOne", hashMapObject, new Adapter.JsonObjectCallback() {
@Override
public void onError(Throwable t) {
callback.onError(t);
//Call the finally method..
callback.onFinally();
}
@Override
public void onSuccess(JSONObject response) {
try{
if(response != null){
OrderRepository orderRepo = getRestAdapter().createRepository(OrderRepository.class);
if(context != null){
try {
Method method = orderRepo.getClass().getMethod("addStorage", Context.class);
method.invoke(orderRepo, context);
} catch (Exception e) {
Log.e("Database Error", e.toString());
}
//orderRepo.addStorage(context);
}
Map<String, Object> result = Util.fromJson(response);
Order order = orderRepo.createObject(result);
//Add to database if persistent storage required..
if(isSTORE_LOCALLY()){
//http://stackoverflow.com/questions/160970/how-do-i-invoke-a-java-method-when-given-the-method-name-as-a-string
try {
Method method = order.getClass().getMethod("save__db");
method.invoke(order);
} catch (Exception e) {
Log.e("Database Error", e.toString());
}
}
callback.onSuccess(order);
}else{
callback.onSuccess(null);
}
}catch(Exception e){
Log.e("Snaphy", e.toString());
}
//Call the finally method..
callback.onFinally();
}
});
}//Method findOne definition ends here..
//Method updateAll definition
public void updateAll( Map<String, ? extends Object> where, Map<String, ? extends Object> data, final ObjectCallback<JSONObject> callback ){
/**
Call the onBefore event
*/
callback.onBefore();
//Definging hashMap for data conversion
Map<String, Object> hashMapObject = new HashMap<>();
//Now add the arguments...
hashMapObject.put("where", where);
hashMapObject.putAll(data);
invokeStaticMethod("updateAll", hashMapObject, new Adapter.JsonObjectCallback() {
@Override
public void onError(Throwable t) {
callback.onError(t);
//Call the finally method..
callback.onFinally();
}
@Override
public void onSuccess(JSONObject response) {
try{
callback.onSuccess(response);
}catch(Exception e){
Log.e("Snaphy", e.toString());
}
//Call the finally method..
callback.onFinally();
}
});
}//Method updateAll definition ends here..
//Method deleteById definition
public void deleteById( String id, final ObjectCallback<JSONObject> callback ){
/**
Call the onBefore event
*/
callback.onBefore();
//Definging hashMap for data conversion
Map<String, Object> hashMapObject = new HashMap<>();
//Now add the arguments...
hashMapObject.put("id", id);
invokeStaticMethod("deleteById", hashMapObject, new Adapter.JsonObjectCallback() {
@Override
public void onError(Throwable t) {
callback.onError(t);
//Call the finally method..
callback.onFinally();
}
@Override
public void onSuccess(JSONObject response) {
try{
callback.onSuccess(response);
}catch(Exception e){
Log.e("Snaphy", e.toString());
}
//Call the finally method..
callback.onFinally();
}
});
}//Method deleteById definition ends here..
//Method count definition
public void count( Map<String, ? extends Object> where, final ObjectCallback<JSONObject> callback ){
/**
Call the onBefore event
*/
callback.onBefore();
//Definging hashMap for data conversion
Map<String, Object> hashMapObject = new HashMap<>();
//Now add the arguments...
hashMapObject.put("where", where);
invokeStaticMethod("count", hashMapObject, new Adapter.JsonObjectCallback() {
@Override
public void onError(Throwable t) {
callback.onError(t);
//Call the finally method..
callback.onFinally();
}
@Override
public void onSuccess(JSONObject response) {
try{
callback.onSuccess(response);
}catch(Exception e){
Log.e("Snaphy", e.toString());
}
//Call the finally method..
callback.onFinally();
}
});
}//Method count definition ends here..
//Method updateAttributes definition
public void updateAttributes( String orderId, Map<String, ? extends Object> data, final ObjectCallback<Order> callback){
/**
Call the onBefore event
*/
callback.onBefore();
//Definging hashMap for data conversion
Map<String, Object> hashMapObject = new HashMap<>();
//Now add the arguments...
hashMapObject.put("orderId", orderId);
hashMapObject.putAll(data);
invokeStaticMethod("prototype.updateAttributes", hashMapObject, new Adapter.JsonObjectCallback() {
@Override
public void onError(Throwable t) {
callback.onError(t);
//Call the finally method..
callback.onFinally();
}
@Override
public void onSuccess(JSONObject response) {
try{
if(response != null){
OrderRepository orderRepo = getRestAdapter().createRepository(OrderRepository.class);
if(context != null){
try {
Method method = orderRepo.getClass().getMethod("addStorage", Context.class);
method.invoke(orderRepo, context);
} catch (Exception e) {
Log.e("Database Error", e.toString());
}
//orderRepo.addStorage(context);
}
Map<String, Object> result = Util.fromJson(response);
Order order = orderRepo.createObject(result);
//Add to database if persistent storage required..
if(isSTORE_LOCALLY()){
//http://stackoverflow.com/questions/160970/how-do-i-invoke-a-java-method-when-given-the-method-name-as-a-string
try {
Method method = order.getClass().getMethod("save__db");
method.invoke(order);
} catch (Exception e) {
Log.e("Database Error", e.toString());
}
}
callback.onSuccess(order);
}else{
callback.onSuccess(null);
}
}catch(Exception e){
Log.e("Snaphy", e.toString());
}
//Call the finally method..
callback.onFinally();
}
});
}//Method updateAttributes definition ends here..
//Method getSchema definition
public void getSchema( final ObjectCallback<JSONObject> callback ){
/**
Call the onBefore event
*/
callback.onBefore();
//Definging hashMap for data conversion
Map<String, Object> hashMapObject = new HashMap<>();
//Now add the arguments...
invokeStaticMethod("getSchema", hashMapObject, new Adapter.JsonObjectCallback() {
@Override
public void onError(Throwable t) {
callback.onError(t);
//Call the finally method..
callback.onFinally();
}
@Override
public void onSuccess(JSONObject response) {
try{
callback.onSuccess(response);
}catch(Exception e){
Log.e("Snaphy", e.toString());
}
//Call the finally method..
callback.onFinally();
}
});
}//Method getSchema definition ends here..
//Method getAbsoluteSchema definition
public void getAbsoluteSchema( final ObjectCallback<JSONObject> callback ){
/**
Call the onBefore event
*/
callback.onBefore();
//Definging hashMap for data conversion
Map<String, Object> hashMapObject = new HashMap<>();
//Now add the arguments...
invokeStaticMethod("getAbsoluteSchema", hashMapObject, new Adapter.JsonObjectCallback() {
@Override
public void onError(Throwable t) {
callback.onError(t);
//Call the finally method..
callback.onFinally();
}
@Override
public void onSuccess(JSONObject response) {
try{
callback.onSuccess(response);
}catch(Exception e){
Log.e("Snaphy", e.toString());
}
//Call the finally method..
callback.onFinally();
}
});
}//Method getAbsoluteSchema definition ends here..
//Method getDetailSchema definition
public void getDetailSchema( final ObjectCallback<JSONObject> callback ){
/**
Call the onBefore event
*/
callback.onBefore();
//Definging hashMap for data conversion
Map<String, Object> hashMapObject = new HashMap<>();
//Now add the arguments...
invokeStaticMethod("getDetailSchema", hashMapObject, new Adapter.JsonObjectCallback() {
@Override
public void onError(Throwable t) {
callback.onError(t);
//Call the finally method..
callback.onFinally();
}
@Override
public void onSuccess(JSONObject response) {
try{
callback.onSuccess(response);
}catch(Exception e){
Log.e("Snaphy", e.toString());
}
//Call the finally method..
callback.onFinally();
}
});
}//Method getDetailSchema definition ends here..
//Method getModelRelationSchema definition
public void getModelRelationSchema( final ObjectCallback<JSONObject> callback ){
/**
Call the onBefore event
*/
callback.onBefore();
//Definging hashMap for data conversion
Map<String, Object> hashMapObject = new HashMap<>();
//Now add the arguments...
invokeStaticMethod("getModelRelationSchema", hashMapObject, new Adapter.JsonObjectCallback() {
@Override
public void onError(Throwable t) {
callback.onError(t);
//Call the finally method..
callback.onFinally();
}
@Override
public void onSuccess(JSONObject response) {
try{
callback.onSuccess(response);
}catch(Exception e){
Log.e("Snaphy", e.toString());
}
//Call the finally method..
callback.onFinally();
}
});
}//Method getModelRelationSchema definition ends here..
}
| |
/*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.lightsail.model;
import java.io.Serializable;
import javax.annotation.Generated;
import com.amazonaws.AmazonWebServiceRequest;
/**
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/lightsail-2016-11-28/UpdateRelationalDatabaseParameters"
* target="_top">AWS API Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class UpdateRelationalDatabaseParametersRequest extends com.amazonaws.AmazonWebServiceRequest implements Serializable, Cloneable {
/**
* <p>
* The name of your database for which to update parameters.
* </p>
*/
private String relationalDatabaseName;
/**
* <p>
* The database parameters to update.
* </p>
*/
private java.util.List<RelationalDatabaseParameter> parameters;
/**
* <p>
* The name of your database for which to update parameters.
* </p>
*
* @param relationalDatabaseName
* The name of your database for which to update parameters.
*/
public void setRelationalDatabaseName(String relationalDatabaseName) {
this.relationalDatabaseName = relationalDatabaseName;
}
/**
* <p>
* The name of your database for which to update parameters.
* </p>
*
* @return The name of your database for which to update parameters.
*/
public String getRelationalDatabaseName() {
return this.relationalDatabaseName;
}
/**
* <p>
* The name of your database for which to update parameters.
* </p>
*
* @param relationalDatabaseName
* The name of your database for which to update parameters.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public UpdateRelationalDatabaseParametersRequest withRelationalDatabaseName(String relationalDatabaseName) {
setRelationalDatabaseName(relationalDatabaseName);
return this;
}
/**
* <p>
* The database parameters to update.
* </p>
*
* @return The database parameters to update.
*/
public java.util.List<RelationalDatabaseParameter> getParameters() {
return parameters;
}
/**
* <p>
* The database parameters to update.
* </p>
*
* @param parameters
* The database parameters to update.
*/
public void setParameters(java.util.Collection<RelationalDatabaseParameter> parameters) {
if (parameters == null) {
this.parameters = null;
return;
}
this.parameters = new java.util.ArrayList<RelationalDatabaseParameter>(parameters);
}
/**
* <p>
* The database parameters to update.
* </p>
* <p>
* <b>NOTE:</b> This method appends the values to the existing list (if any). Use
* {@link #setParameters(java.util.Collection)} or {@link #withParameters(java.util.Collection)} if you want to
* override the existing values.
* </p>
*
* @param parameters
* The database parameters to update.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public UpdateRelationalDatabaseParametersRequest withParameters(RelationalDatabaseParameter... parameters) {
if (this.parameters == null) {
setParameters(new java.util.ArrayList<RelationalDatabaseParameter>(parameters.length));
}
for (RelationalDatabaseParameter ele : parameters) {
this.parameters.add(ele);
}
return this;
}
/**
* <p>
* The database parameters to update.
* </p>
*
* @param parameters
* The database parameters to update.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public UpdateRelationalDatabaseParametersRequest withParameters(java.util.Collection<RelationalDatabaseParameter> parameters) {
setParameters(parameters);
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getRelationalDatabaseName() != null)
sb.append("RelationalDatabaseName: ").append(getRelationalDatabaseName()).append(",");
if (getParameters() != null)
sb.append("Parameters: ").append(getParameters());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof UpdateRelationalDatabaseParametersRequest == false)
return false;
UpdateRelationalDatabaseParametersRequest other = (UpdateRelationalDatabaseParametersRequest) obj;
if (other.getRelationalDatabaseName() == null ^ this.getRelationalDatabaseName() == null)
return false;
if (other.getRelationalDatabaseName() != null && other.getRelationalDatabaseName().equals(this.getRelationalDatabaseName()) == false)
return false;
if (other.getParameters() == null ^ this.getParameters() == null)
return false;
if (other.getParameters() != null && other.getParameters().equals(this.getParameters()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getRelationalDatabaseName() == null) ? 0 : getRelationalDatabaseName().hashCode());
hashCode = prime * hashCode + ((getParameters() == null) ? 0 : getParameters().hashCode());
return hashCode;
}
@Override
public UpdateRelationalDatabaseParametersRequest clone() {
return (UpdateRelationalDatabaseParametersRequest) super.clone();
}
}
| |
/**
* This code is released under the
* Apache License Version 2.0 http://www.apache.org/licenses/.
*
* (c) Daniel Lemire, http://lemire.me/en/
*/
package me.lemire.integercompression;
/**
* This is an implementation of the popular Simple9 scheme. It is limited to
* 28-bit integers (between 0 and 2^28-1).
*
* Note that this does not use differential coding: if you are working on sorted
* lists, you must compute the deltas separately.
*
* @author Daniel Lemire
*
*/
public final class Simple9 implements IntegerCODEC, SkippableIntegerCODEC {
@Override
public void headlessCompress(int[] in, IntWrapper inpos, int inlength, int out[], IntWrapper outpos) {
int tmpoutpos = outpos.get();
int currentPos = inpos.get();
final int finalin = currentPos + inlength;
outer: while (currentPos < finalin - 28) {
mainloop: for (int selector = 0; selector < 8; selector++) {
int res = 0;
int compressedNum = codeNum[selector];
int b = bitLength[selector];
int max = 1 << b;
int i = 0;
for (; i < compressedNum; i++) {
if (max <= in[currentPos + i])
continue mainloop;
res = (res << b) + in[currentPos + i];
}
res |= selector << 28;
out[tmpoutpos++] = res;
currentPos += compressedNum;
continue outer;
}
final int selector = 8;
if (in[currentPos] >= 1 << bitLength[selector])
throw new RuntimeException("Too big a number");
out[tmpoutpos++] = in[currentPos++] | (selector << 28);
}
outer: while (currentPos < finalin) {
mainloop: for (int selector = 0; selector < 8; selector++) {
int res = 0;
int compressedNum = codeNum[selector];
if (finalin <= currentPos + compressedNum - 1)
compressedNum = finalin - currentPos;
int b = bitLength[selector];
int max = 1 << b;
int i = 0;
for (; i < compressedNum; i++) {
if (max <= in[currentPos + i])
continue mainloop;
res = (res << b) + in[currentPos + i];
}
if (compressedNum != codeNum[selector])
res <<= (codeNum[selector] - compressedNum) * b;
res |= selector << 28;
out[tmpoutpos++] = res;
currentPos += compressedNum;
continue outer;
}
final int selector = 8;
if (in[currentPos] >= 1 << bitLength[selector])
throw new RuntimeException("Too big a number");
out[tmpoutpos++] = in[currentPos++] | (selector << 28);
}
inpos.set(currentPos);
outpos.set(tmpoutpos);
}
@Override
public void headlessUncompress(int[] in, IntWrapper inpos, int inlength, int[] out, IntWrapper outpos,
int outlength) {
int currentPos = outpos.get();
int tmpinpos = inpos.get();
final int finalout = currentPos + outlength;
while (currentPos < finalout - 28) {
int val = in[tmpinpos++];
int header = val >>> 28;
switch (header) {
case 0: { // number : 28, bitwidth : 1
out[currentPos++] = (val << 4) >>> 31;
out[currentPos++] = (val << 5) >>> 31;
out[currentPos++] = (val << 6) >>> 31;
out[currentPos++] = (val << 7) >>> 31;
out[currentPos++] = (val << 8) >>> 31;
out[currentPos++] = (val << 9) >>> 31;
out[currentPos++] = (val << 10) >>> 31;
out[currentPos++] = (val << 11) >>> 31;
out[currentPos++] = (val << 12) >>> 31;
out[currentPos++] = (val << 13) >>> 31; // 10
out[currentPos++] = (val << 14) >>> 31;
out[currentPos++] = (val << 15) >>> 31;
out[currentPos++] = (val << 16) >>> 31;
out[currentPos++] = (val << 17) >>> 31;
out[currentPos++] = (val << 18) >>> 31;
out[currentPos++] = (val << 19) >>> 31;
out[currentPos++] = (val << 20) >>> 31;
out[currentPos++] = (val << 21) >>> 31;
out[currentPos++] = (val << 22) >>> 31;
out[currentPos++] = (val << 23) >>> 31; // 20
out[currentPos++] = (val << 24) >>> 31;
out[currentPos++] = (val << 25) >>> 31;
out[currentPos++] = (val << 26) >>> 31;
out[currentPos++] = (val << 27) >>> 31;
out[currentPos++] = (val << 28) >>> 31;
out[currentPos++] = (val << 29) >>> 31;
out[currentPos++] = (val << 30) >>> 31;
out[currentPos++] = (val << 31) >>> 31;
break;
}
case 1: { // number : 14, bitwidth : 2
out[currentPos++] = (val << 4) >>> 30;
out[currentPos++] = (val << 6) >>> 30;
out[currentPos++] = (val << 8) >>> 30;
out[currentPos++] = (val << 10) >>> 30;
out[currentPos++] = (val << 12) >>> 30;
out[currentPos++] = (val << 14) >>> 30;
out[currentPos++] = (val << 16) >>> 30;
out[currentPos++] = (val << 18) >>> 30;
out[currentPos++] = (val << 20) >>> 30;
out[currentPos++] = (val << 22) >>> 30; // 10
out[currentPos++] = (val << 24) >>> 30;
out[currentPos++] = (val << 26) >>> 30;
out[currentPos++] = (val << 28) >>> 30;
out[currentPos++] = (val << 30) >>> 30;
break;
}
case 2: { // number : 9, bitwidth : 3
out[currentPos++] = (val << 5) >>> 29;
out[currentPos++] = (val << 8) >>> 29;
out[currentPos++] = (val << 11) >>> 29;
out[currentPos++] = (val << 14) >>> 29;
out[currentPos++] = (val << 17) >>> 29;
out[currentPos++] = (val << 20) >>> 29;
out[currentPos++] = (val << 23) >>> 29;
out[currentPos++] = (val << 26) >>> 29;
out[currentPos++] = (val << 29) >>> 29;
break;
}
case 3: { // number : 7, bitwidth : 4
out[currentPos++] = (val << 4) >>> 28;
out[currentPos++] = (val << 8) >>> 28;
out[currentPos++] = (val << 12) >>> 28;
out[currentPos++] = (val << 16) >>> 28;
out[currentPos++] = (val << 20) >>> 28;
out[currentPos++] = (val << 24) >>> 28;
out[currentPos++] = (val << 28) >>> 28;
break;
}
case 4: { // number : 5, bitwidth : 5
out[currentPos++] = (val << 7) >>> 27;
out[currentPos++] = (val << 12) >>> 27;
out[currentPos++] = (val << 17) >>> 27;
out[currentPos++] = (val << 22) >>> 27;
out[currentPos++] = (val << 27) >>> 27;
break;
}
case 5: { // number : 4, bitwidth : 7
out[currentPos++] = (val << 4) >>> 25;
out[currentPos++] = (val << 11) >>> 25;
out[currentPos++] = (val << 18) >>> 25;
out[currentPos++] = (val << 25) >>> 25;
break;
}
case 6: { // number : 3, bitwidth : 9
out[currentPos++] = (val << 5) >>> 23;
out[currentPos++] = (val << 14) >>> 23;
out[currentPos++] = (val << 23) >>> 23;
break;
}
case 7: { // number : 2, bitwidth : 14
out[currentPos++] = (val << 4) >>> 18;
out[currentPos++] = (val << 18) >>> 18;
break;
}
case 8: { // number : 1, bitwidth : 28
out[currentPos++] = (val << 4) >>> 4;
break;
}
default: {
throw new RuntimeException("shouldn't happen: limited to 28-bit integers");
}
}
}
while (currentPos < finalout) {
int val = in[tmpinpos++];
int header = val >>> 28;
switch (header) {
case 0: { // number : 28, bitwidth : 1
final int howmany = finalout - currentPos;
for (int k = 0; k < howmany; ++k) {
out[currentPos++] = (val << (k + 4)) >>> 31;
}
break;
}
case 1: { // number : 14, bitwidth : 2
final int howmany = finalout - currentPos < 14 ? finalout - currentPos : 14;
for (int k = 0; k < howmany; ++k) {
out[currentPos++] = (val << (2 * k + 4)) >>> 30;
}
break;
}
case 2: { // number : 9, bitwidth : 3
final int howmany = finalout - currentPos < 9 ? finalout - currentPos : 9;
for (int k = 0; k < howmany; ++k) {
out[currentPos++] = (val << (3 * k + 5)) >>> 29;
}
break;
}
case 3: { // number : 7, bitwidth : 4
final int howmany = finalout - currentPos < 7 ? finalout - currentPos : 7;
for (int k = 0; k < howmany; ++k) {
out[currentPos++] = (val << (4 * k + 4)) >>> 28;
}
break;
}
case 4: { // number : 5, bitwidth : 5
final int howmany = finalout - currentPos < 5 ? finalout - currentPos : 5;
for (int k = 0; k < howmany; ++k) {
out[currentPos++] = (val << (5 * k + 7)) >>> 27;
}
break;
}
case 5: { // number : 4, bitwidth : 7
final int howmany = finalout - currentPos < 4 ? finalout - currentPos : 4;
for (int k = 0; k < howmany; ++k) {
out[currentPos++] = (val << (7 * k + 4)) >>> 25;
}
break;
}
case 6: { // number : 3, bitwidth : 9
final int howmany = finalout - currentPos < 3 ? finalout - currentPos : 3;
for (int k = 0; k < howmany; ++k) {
out[currentPos++] = (val << (9 * k + 5)) >>> 23;
}
break;
}
case 7: { // number : 2, bitwidth : 14
final int howmany = finalout - currentPos < 2 ? finalout - currentPos : 2;
for (int k = 0; k < howmany; ++k) {
out[currentPos++] = (val << (14 * k + 4)) >>> 18;
}
break;
}
case 8: { // number : 1, bitwidth : 28
out[currentPos++] = (val << 4) >>> 4;
break;
}
default: {
throw new RuntimeException("shouldn't happen");
}
}
}
outpos.set(currentPos);
inpos.set(tmpinpos);
}
@Override
public void compress(int[] in, IntWrapper inpos, int inlength, int[] out, IntWrapper outpos) {
if (inlength == 0)
return;
out[outpos.get()] = inlength;
outpos.increment();
headlessCompress(in, inpos, inlength, out, outpos);
}
@Override
public void uncompress(int[] in, IntWrapper inpos, int inlength, int[] out, IntWrapper outpos) {
if (inlength == 0)
return;
final int outlength = in[inpos.get()];
inpos.increment();
headlessUncompress(in, inpos, inlength, out, outpos, outlength);
}
private final static int bitLength[] = { 1, 2, 3, 4, 5, 7, 9, 14, 28 };
private final static int codeNum[] = { 28, 14, 9, 7, 5, 4, 3, 2, 1 };
@Override
public String toString() {
return this.getClass().getSimpleName();
}
}
| |
package org.nd4j.linalg.cpu.blas;
import com.github.fommil.netlib.BLAS;
import org.jblas.JavaBlas;
import org.jblas.NativeBlas;
import org.nd4j.linalg.api.blas.BlasBufferUtil;
import org.nd4j.linalg.api.blas.impl.BaseLevel1;
import org.nd4j.linalg.api.complex.IComplexDouble;
import org.nd4j.linalg.api.complex.IComplexFloat;
import org.nd4j.linalg.api.complex.IComplexNDArray;
import org.nd4j.linalg.api.ndarray.INDArray;
import org.nd4j.linalg.cpu.util.CpuComplex;
import org.nd4j.linalg.util.Shape;
import org.netlib.util.doubleW;
import org.netlib.util.floatW;
import static org.nd4j.linalg.api.blas.BlasBufferUtil.getBlasOffset;
import static org.nd4j.linalg.api.blas.BlasBufferUtil.setData;
/**
* @author Adam Gibson
*/
public class CpuLevel1 extends BaseLevel1 {
@Override
protected float sdsdot(int N, float alpha, INDArray X, int incX, INDArray Y, int incY) {
return BLAS.getInstance().sdsdot(N,alpha,getFloatData(X),getBlasOffset(X),incX,getFloatData(Y),getBlasOffset(Y),incY);
}
@Override
protected double dsdot(int N, INDArray X, int incX, INDArray Y, int incY) {
throw new UnsupportedOperationException();
}
@Override
protected float sdot(int N, INDArray X, int incX, INDArray Y, int incY) {
return BLAS.getInstance().sdot(N,getFloatData(X),getBlasOffset(X),incX,getFloatData(Y),getBlasOffset(Y),incY);
}
@Override
protected double ddot(int N, INDArray X, int incX, INDArray Y, int incY) {
return BLAS.getInstance().ddot(N, getDoubleData(X), getBlasOffset(X), incX, getDoubleData(Y), getBlasOffset(Y), incY);
}
@Override
protected void cdotu_sub(int N, IComplexNDArray X, int incX, IComplexNDArray Y, int incY, IComplexNDArray dotu) {
throw new UnsupportedOperationException();
}
@Override
protected void cdotc_sub(int N, IComplexNDArray X, int incX, IComplexNDArray Y, int incY, IComplexNDArray dotc) {
throw new UnsupportedOperationException();
}
@Override
protected void zdotu_sub(int N, IComplexNDArray X, int incX, IComplexNDArray Y, int incY, IComplexNDArray dotu) {
throw new UnsupportedOperationException();
}
@Override
protected void zdotc_sub(int N, IComplexNDArray X, int incX, IComplexNDArray Y, int incY, IComplexNDArray dotc) {
throw new UnsupportedOperationException();
}
@Override
protected float snrm2(int N, INDArray X, int incX) {
return BLAS.getInstance().snrm2(N,getFloatData(X),getBlasOffset(X),incX);
}
@Override
protected float sasum(int N, INDArray X, int incX) {
return BLAS.getInstance().sasum(N, getFloatData(X), getBlasOffset(X), incX);
}
@Override
protected double dnrm2(int N, INDArray X, int incX) {
return BLAS.getInstance().dnrm2(N, getDoubleData(X), getBlasOffset(X), incX);
}
@Override
protected double dasum(int N, INDArray X, int incX) {
return BLAS.getInstance().dasum(N, getDoubleData(X), getBlasOffset(X), incX);
}
@Override
protected float scnrm2(int N, IComplexNDArray X, int incX) {
throw new UnsupportedOperationException();
}
@Override
protected float scasum(int N, IComplexNDArray X, int incX) {
throw new UnsupportedOperationException();
}
@Override
protected double dznrm2(int N, IComplexNDArray X, int incX) {
X = (IComplexNDArray) Shape.toOffsetZero(X);
return NativeBlas.dznrm2(N,getDoubleData(X),getBlasOffset(X),incX);
}
@Override
protected double dzasum(int N, IComplexNDArray X, int incX) {
X = (IComplexNDArray) Shape.toOffsetZero(X);
return NativeBlas.dzasum(N,getDoubleData(X),getBlasOffset(X),incX);
}
@Override
protected int isamax(int N, INDArray X, int incX) {
return BLAS.getInstance().isamax(N, getFloatData(X), getBlasOffset(X), incX);
}
@Override
protected int idamax(int N, INDArray X, int incX) {
return BLAS.getInstance().idamax(N, getDoubleData(X), getBlasOffset(X), incX);
}
@Override
protected int icamax(int N, IComplexNDArray X, int incX) {
X = (IComplexNDArray) Shape.toOffsetZero(X);
return NativeBlas.icamax(N,getFloatData(X),getBlasOffset(X),incX);
}
@Override
protected int izamax(int N, IComplexNDArray X, int incX) {
X = (IComplexNDArray) Shape.toOffsetZero(X);
return NativeBlas.izamax(N,getDoubleData(X),getBlasOffset(X),incX);
}
@Override
protected void sswap(int N, INDArray X, int incX, INDArray Y, int incY) {
float[] yData = getFloatData(Y);
float[] xData = getFloatData(X);
BLAS.getInstance().sswap(N, xData, getBlasOffset(X), incX, yData, getBlasOffset(Y), incY);
setData(xData,X);
setData(yData,Y); }
@Override
protected void scopy(int N, INDArray X, int incX, INDArray Y, int incY) {
float[] yData = getFloatData(Y);
BLAS.getInstance().scopy(N, getFloatData(X), getBlasOffset(X), incX, yData, getBlasOffset(Y), incY);
setData(yData,Y);
}
@Override
protected void saxpy(int N, float alpha, INDArray X, int incX, INDArray Y, int incY) {
float[] yData = getFloatData(Y);
BLAS.getInstance().saxpy(N, alpha, getFloatData(X), getBlasOffset(X), incX, yData, getBlasOffset(Y), incY);
setData(yData,Y);
}
@Override
protected void dswap(int N, INDArray X, int incX, INDArray Y, int incY) {
double[] yData = getDoubleData(Y);
double[] xData = getDoubleData(X);
BLAS.getInstance().dswap(N,xData,getBlasOffset(X),incX,yData,getBlasOffset(Y),incY);
setData(xData, X);
setData(yData,Y); }
@Override
protected void dcopy(int N, INDArray X, int incX, INDArray Y, int incY) {
double[] yData = getDoubleData(Y);
BLAS.getInstance().dcopy(N, getDoubleData(X), getBlasOffset(X), incX, yData, getBlasOffset(Y), incY);
setData(yData,Y);
}
@Override
protected void daxpy(int N, double alpha, INDArray X, int incX, INDArray Y, int incY) {
double[] yData = getDoubleData(Y);
BLAS.getInstance().daxpy(N, alpha, getDoubleData(X), getBlasOffset(X), incX, yData, getBlasOffset(Y), incY);
setData(yData,Y);
}
@Override
protected void cswap(int N, IComplexNDArray X, int incX, IComplexNDArray Y, int incY) {
X = (IComplexNDArray) Shape.toOffsetZero(X);
float[] yData = getFloatData(Y);
NativeBlas.cswap(N,getFloatData(X),getBlasOffset(X),incX,yData,getBlasOffset(Y),incY);
setData(yData,X);
}
@Override
protected void ccopy(int N, IComplexNDArray X, int incX, IComplexNDArray Y, int incY) {
X = (IComplexNDArray) Shape.toOffsetZero(X);
Y = (IComplexNDArray) Shape.toOffsetZero(Y);
float[] yData = getFloatData(Y);
NativeBlas.ccopy(N,getFloatData(X),getBlasOffset(X),incX,yData,getBlasOffset(Y),incY);
setData(yData,Y);
}
@Override
protected void caxpy(int N, IComplexFloat alpha, IComplexNDArray X, int incX, IComplexNDArray Y, int incY) {
X = (IComplexNDArray) Shape.toOffsetZero(X);
Y = (IComplexNDArray) Shape.toOffsetZero(Y);
float[] yData = getFloatData(Y);
NativeBlas.caxpy(N, CpuComplex.getComplexFloat(alpha),getFloatData(X),getBlasOffset(X),incX,yData,getBlasOffset(Y),incY);
setData(yData,Y);
}
@Override
protected void zswap(int N, IComplexNDArray X, int incX, IComplexNDArray Y, int incY) {
double[] yData = getDoubleData(Y);
NativeBlas.zswap(N,getDoubleData(X),getBlasOffset(X),incX,yData,getBlasOffset(Y),incY);
setData(yData, Y);
}
@Override
protected void zcopy(int N, IComplexNDArray X, int incX, IComplexNDArray Y, int incY) {
double[] yData = getDoubleData(Y);
NativeBlas.zcopy(N,getDoubleData(X),getBlasOffset(X),incX,yData,getBlasOffset(Y),incY);
setData(yData,Y);
}
@Override
protected void zaxpy(int N, IComplexDouble alpha, IComplexNDArray X, int incX, IComplexNDArray Y, int incY) {
X = (IComplexNDArray) Shape.toOffsetZero(X);
Y = (IComplexNDArray) Shape.toOffsetZero(Y);
double[] yData = getDoubleData(Y);
NativeBlas.zaxpy(N, CpuComplex.getComplexDouble(alpha),getDoubleData(X),getBlasOffset(X),incX,yData,getBlasOffset(Y),incY);
setData(yData,Y);
}
@Override
protected void srotg(float a, float b, float c, float s) {
throw new UnsupportedOperationException();
}
@Override
protected void srotmg(float d1, float d2, float b1, float b2, INDArray P) {
float[] pData = getFloatData(P);
BLAS.getInstance().srotmg(new floatW(d1), new floatW(d2), new floatW(b1), b2, pData, getBlasOffset(P));
setData(pData,P);
}
@Override
protected void srot(int N, INDArray X, int incX, INDArray Y, int incY, float c, float s) {
throw new UnsupportedOperationException();
}
@Override
protected void srotm(int N, INDArray X, int incX, INDArray Y, int incY, INDArray P) {
throw new UnsupportedOperationException();
}
@Override
protected void drotg(double a, double b, double c, double s) {
throw new UnsupportedOperationException();
}
@Override
protected void drotmg(double d1, double d2, double b1, double b2, INDArray P) {
double[] pData = getDoubleData(P);
BLAS.getInstance().drotmg(new doubleW(d1),new doubleW(d2),new doubleW(b1),b2,pData,getBlasOffset(P));
setData(pData,P);
}
@Override
protected void drot(int N, INDArray X, int incX, INDArray Y, int incY, double c, double s) {
double[] yData = getDoubleData(Y);
BLAS.getInstance().drot(N,getDoubleData(X),getBlasOffset(X),incX,getDoubleData(Y),getBlasOffset(Y),incY,c,s);
setData(yData,Y);
}
@Override
protected void drotm(int N, INDArray X, int incX, INDArray Y, int incY, INDArray P) {
double[] pData = getDoubleData(P);
BLAS.getInstance().drotm(N,getDoubleData(X),getBlasOffset(X),incX,getDoubleData(Y),getBlasOffset(Y),incY,pData,getBlasOffset(P));
setData(pData,P);
}
@Override
protected void sscal(int N, float alpha, INDArray X, int incX) {
float[] data = getFloatData(X);
BLAS.getInstance().sscal(N, alpha, data, getBlasOffset(X), incX);
setData(data,X);
}
@Override
protected void dscal(int N, double alpha, INDArray X, int incX) {
double[] data = getDoubleData(X);
BLAS.getInstance().dscal(N, alpha, data, BlasBufferUtil.getBlasOffset(X), incX);
setData(data,X);
}
@Override
protected void cscal(int N, IComplexFloat alpha, IComplexNDArray X, int incX) {
X = (IComplexNDArray) Shape.toOffsetZero(X);
float[] xData = getFloatData(X);
NativeBlas.cscal(N, CpuComplex.getComplexFloat(alpha),xData,getBlasOffset(X),incX);
setData(xData, X);
}
@Override
protected void zscal(int N, IComplexDouble alpha, IComplexNDArray X, int incX) {
double[] xData = getDoubleData(X);
NativeBlas.zscal(N, CpuComplex.getComplexDouble(alpha),xData,getBlasOffset(X),incX);
setData(xData, X);
}
@Override
protected void csscal(int N, float alpha, IComplexNDArray X, int incX) {
X = (IComplexNDArray) Shape.toOffsetZero(X);
float[] xData = getFloatData(X);
NativeBlas.csscal(N,alpha,xData,getBlasOffset(X),incX);
setData(xData, X);
}
@Override
protected void zdscal(int N, double alpha, IComplexNDArray X, int incX) {
X = (IComplexNDArray) Shape.toOffsetZero(X);
double[] xData = getDoubleData(X);
NativeBlas.zdscal(N,alpha,xData,getBlasOffset(X),incX);
setData(xData, X);
}
}
| |
package foam.nanos.auth;
import foam.core.X;
import foam.dao.ListSink;
import javax.naming.AuthenticationException;
import javax.security.auth.AuthPermission;
import java.security.NoSuchAlgorithmException;
import java.util.ArrayList;
import java.util.concurrent.ThreadLocalRandom;
import java.util.concurrent.TimeUnit;
public class UserAndGroupAuthServiceTest
extends CachedUserAndGroupAuthService
{
protected int numUsers = 10;
protected int numGroups = 5;
protected int numPermissions = 10;
protected ArrayList<X> xArray = new ArrayList<>();
protected ArrayList<Permission> permissions = new ArrayList<>();
@Override
public void start() {
System.out.println("Starting");
super.start();
createGroupsAndPermissions();
addTestUsers();
testlogin();
testCheck();
testCachedCheck();
testChallengedLogin();
testUpdatePassword();
testLogout();
}
public void createGroupsAndPermissions() {
System.out.println("Creating " + numGroups + " groups with " + numPermissions + " permissions each");
long startTime = System.nanoTime();
/**
* Create numGroups and and add numPermissions to each group
* */
for ( int i = 0 ; i < numGroups ; i++ ) {
Group group = new Group();
group.setId("" + i);
group.setDescription("Group " + i + " users");
Permission[] permissions = new Permission[numPermissions];
for ( int j = 0 ; j < numPermissions ; j++ ) {
foam.nanos.auth.Permission permission = new foam.nanos.auth.Permission();
permission.setId(i + "" + j);
permission.setDescription("Group" + i + " permissions-" + j);
permissions[j] = permission;
}
group.setPermissions(permissions);
groupDAO_.put(group);
}
long endTime = System.nanoTime();
long durationInMilliseconds = (endTime - startTime) / 1000000;
System.out.println("Duration: " + durationInMilliseconds + "ms \n");
}
public void addTestUsers() {
System.out.println("Registering " + numUsers + " Users");
long startTime = System.nanoTime();
ListSink sink = (ListSink) groupDAO_.select(new ListSink());
/**
* For each user, randomly select a group from the groups created
* and assign the user to this group
* */
for ( int i = 0 ; i < numUsers ; i++ ) {
User user = new User();
user.setId(i);
user.setEmail("marc" + i + "@nanopay.net");
user.setFirstName("Marc" + i);
user.setLastName("R" + i);
try {
String salt = UserAndGroupAuthService.generateRandomSalt();
user.setPassword(UserAndGroupAuthService.hashPassword("marc" + i, salt) + ":" + salt);
} catch (NoSuchAlgorithmException e) {
System.out.println("Couldn't hash password with " + UserAndGroupAuthService.HASH_METHOD + "\nTest Failed");
}
int randomGroup = ThreadLocalRandom.current().nextInt(0, sink.getData().size());
Group group = (Group) sink.getData().get(randomGroup);
user.setGroup(group);
userDAO_.put(user);
}
long endTime = System.nanoTime();
long durationInMilliseconds = (endTime - startTime) / 1000000;
System.out.println("Duration: " + durationInMilliseconds + "ms \n");
}
public void testlogin() {
System.out.println("Login " + numUsers + " Users");
long startTime = System.nanoTime();
/**
* For each user, store the context into an array
* This array will be used later for checking permissions for that user
* */
for ( int i = 0; i < numUsers; i++ ) {
try {
xArray.add(login(i, "marc" + i));
} catch (AuthenticationException e) {
e.printStackTrace();
}
}
long endTime = System.nanoTime();
long durationInMilliseconds = (endTime - startTime) / 1000000;
System.out.println("Duration: " + durationInMilliseconds + "ms \n");
}
public void testCheck() {
System.out.println("Permissions Check for " + numUsers + " users");
long startTime = System.nanoTime();
ListSink sink = (ListSink) groupDAO_.select(new ListSink());
/**
* For each user, we check if they have access to a random permission
* We store these permissions in an array to test caching
* */
for ( int i = 0 ; i < xArray.size() ; i++ ) {
int randomGroup = ThreadLocalRandom.current().nextInt(0, sink.getData().size());
Group group = (Group) sink.getData().get(randomGroup);
int randomPermission = ThreadLocalRandom.current().nextInt(0, group.getPermissions().length);
Permission permission = group.getPermissions()[randomPermission];
permissions.add(permission);
AuthPermission authAdminpermission = new AuthPermission(permission.getId());
check(xArray.get(i), authAdminpermission);
}
long endTime = System.nanoTime();
long durationInMilliseconds = (endTime - startTime) / 1000000;
System.out.println("Duration: " + durationInMilliseconds + "ms \n");
}
public void testCachedCheck() {
System.out.println("Cached Permissions Check for " + numUsers + " users");
long startTime = System.nanoTime();
for ( int i = 0 ; i < xArray.size() ; i++ ) {
AuthPermission authAdminpermission = new AuthPermission(permissions.get(i).getId());
check(xArray.get(i), authAdminpermission);
}
long endTime = System.nanoTime();
long durationInMilliseconds = (endTime - startTime) / 1000000;
System.out.println("Duration: " + durationInMilliseconds + "ms \n");
}
public void testChallengedLogin() {
System.out.println("Challenge Login " + numUsers + " Users");
long startTime = System.nanoTime();
for ( int i = 0 ; i < numUsers ; i++ ) {
try {
challengedLogin(i, generateChallenge(i));
} catch (AuthenticationException e) {
e.printStackTrace();
}
}
long endTime = System.nanoTime();
long durationInMilliseconds = (endTime - startTime) / 1000000;
System.out.println("Duration: " + durationInMilliseconds + "ms \n");
}
public void testChallengedLoginWithExpiredChallenge() {
try {
String challenge = generateChallenge(0);
TimeUnit.SECONDS.sleep(6);
challengedLogin(0, challenge);
} catch (AuthenticationException e) {
e.printStackTrace();
} catch (InterruptedException e) {
e.printStackTrace();
}
}
public void testUpdatePassword() {
System.out.println("Update Password for " + numUsers + " Users");
long startTime = System.nanoTime();
for ( int i = 0 ; i < xArray.size() ; i++ ) {
try {
updatePassword(xArray.get(i), "marc" + i, "marcasdf");
} catch (AuthenticationException e) {
e.printStackTrace();
}
}
long endTime = System.nanoTime();
long durationInMilliseconds = (endTime - startTime) / 1000000;
System.out.println("Duration: " + durationInMilliseconds + "ms \n");
}
public void testLogout() {
System.out.println("Logout " + numUsers + " Users");
long startTime = System.nanoTime();
for ( int i = 0; i < xArray.size(); i++ ) {
logout(xArray.get(i));
}
long endTime = System.nanoTime();
long durationInMilliseconds = (endTime - startTime) / 1000000;
System.out.println("Duration: " + durationInMilliseconds + "ms \n");
}
}
| |
/*
* Copyright 2000-2016 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.psi.impl;
import com.intellij.lang.PsiBuilderFactory;
import com.intellij.openapi.Disposable;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.application.WriteAction;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.extensions.Extensions;
import com.intellij.openapi.fileEditor.FileDocumentManager;
import com.intellij.openapi.progress.ProgressIndicatorProvider;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.roots.FileIndexFacade;
import com.intellij.openapi.util.Disposer;
import com.intellij.openapi.vfs.NonPhysicalFileSystem;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.openapi.vfs.VirtualFileFilter;
import com.intellij.psi.*;
import com.intellij.psi.impl.file.impl.FileManager;
import com.intellij.psi.impl.file.impl.FileManagerImpl;
import com.intellij.psi.util.PsiModificationTracker;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.util.messages.MessageBus;
import com.intellij.util.messages.Topic;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.jetbrains.annotations.TestOnly;
import java.util.Arrays;
import java.util.List;
import java.util.concurrent.atomic.AtomicInteger;
public class PsiManagerImpl extends PsiManagerEx {
private static final Logger LOG = Logger.getInstance("#com.intellij.psi.impl.PsiManagerImpl");
private final Project myProject;
private final FileIndexFacade myFileIndex;
private final MessageBus myMessageBus;
private final PsiModificationTracker myModificationTracker;
private final FileManagerImpl myFileManager;
private final List<PsiTreeChangePreprocessor> myTreeChangePreprocessors = ContainerUtil.createLockFreeCopyOnWriteList();
private final List<PsiTreeChangeListener> myTreeChangeListeners = ContainerUtil.createLockFreeCopyOnWriteList();
private boolean myTreeChangeEventIsFiring;
private boolean myIsDisposed;
private VirtualFileFilter myAssertOnFileLoadingFilter = VirtualFileFilter.NONE;
private final AtomicInteger myBatchFilesProcessingModeCount = new AtomicInteger(0);
public static final Topic<AnyPsiChangeListener> ANY_PSI_CHANGE_TOPIC =
Topic.create("ANY_PSI_CHANGE_TOPIC", AnyPsiChangeListener.class, Topic.BroadcastDirection.TO_PARENT);
public PsiManagerImpl(Project project,
FileDocumentManager fileDocumentManager,
PsiBuilderFactory psiBuilderFactory,
FileIndexFacade fileIndex,
MessageBus messageBus,
PsiModificationTracker modificationTracker) {
myProject = project;
myFileIndex = fileIndex;
myMessageBus = messageBus;
myModificationTracker = modificationTracker;
//We need to initialize PsiBuilderFactory service so it won't initialize under PsiLock from ChameleonTransform
@SuppressWarnings({"UnusedDeclaration", "UnnecessaryLocalVariable"}) Object used = psiBuilderFactory;
myFileManager = new FileManagerImpl(this, fileDocumentManager, fileIndex);
myTreeChangePreprocessors.add((PsiTreeChangePreprocessor)modificationTracker);
Disposer.register(project, new Disposable() {
@Override
public void dispose() {
myIsDisposed = true;
}
});
}
@Override
public boolean isDisposed() {
return myIsDisposed;
}
@Override
public void dropResolveCaches() {
myFileManager.processQueue();
beforeChange(true);
beforeChange(false);
}
@Override
public void dropPsiCaches() {
dropResolveCaches();
WriteAction.run(myFileManager::firePropertyChangedForUnloadedPsi);
}
@Override
public boolean isInProject(@NotNull PsiElement element) {
if (element instanceof PsiDirectoryContainer) {
PsiDirectory[] dirs = ((PsiDirectoryContainer)element).getDirectories();
for (PsiDirectory dir : dirs) {
if (!isInProject(dir)) return false;
}
return true;
}
PsiFile file = element.getContainingFile();
VirtualFile virtualFile = null;
if (file != null) {
virtualFile = file.getViewProvider().getVirtualFile();
}
else if (element instanceof PsiFileSystemItem) {
virtualFile = ((PsiFileSystemItem)element).getVirtualFile();
}
if (file != null && file.isPhysical() && virtualFile.getFileSystem() instanceof NonPhysicalFileSystem) return true;
return virtualFile != null && myFileIndex.isInContent(virtualFile);
}
@Override
@TestOnly
public void setAssertOnFileLoadingFilter(@NotNull VirtualFileFilter filter, @NotNull Disposable parentDisposable) {
// Find something to ensure there's no changed files waiting to be processed in repository indices.
myAssertOnFileLoadingFilter = filter;
Disposer.register(parentDisposable, new Disposable() {
@Override
public void dispose() {
myAssertOnFileLoadingFilter = VirtualFileFilter.NONE;
}
});
}
@Override
public boolean isAssertOnFileLoading(@NotNull VirtualFile file) {
return myAssertOnFileLoadingFilter.accept(file);
}
@Override
@NotNull
public Project getProject() {
return myProject;
}
@Override
@NotNull
public FileManager getFileManager() {
return myFileManager;
}
@Override
public boolean areElementsEquivalent(PsiElement element1, PsiElement element2) {
ProgressIndicatorProvider.checkCanceled(); // We hope this method is being called often enough to cancel daemon processes smoothly
if (element1 == element2) return true;
if (element1 == null || element2 == null) {
return false;
}
return element1.equals(element2) || element1.isEquivalentTo(element2) || element2.isEquivalentTo(element1);
}
@Override
public PsiFile findFile(@NotNull VirtualFile file) {
ProgressIndicatorProvider.checkCanceled();
return myFileManager.findFile(file);
}
@Override
@Nullable
public FileViewProvider findViewProvider(@NotNull VirtualFile file) {
ProgressIndicatorProvider.checkCanceled();
return myFileManager.findViewProvider(file);
}
@Override
public PsiDirectory findDirectory(@NotNull VirtualFile file) {
ProgressIndicatorProvider.checkCanceled();
return myFileManager.findDirectory(file);
}
@Override
public void reloadFromDisk(@NotNull PsiFile file) {
myFileManager.reloadFromDisk(file);
}
@Override
public void addPsiTreeChangeListener(@NotNull PsiTreeChangeListener listener) {
myTreeChangeListeners.add(listener);
}
@Override
public void addPsiTreeChangeListener(@NotNull final PsiTreeChangeListener listener, @NotNull Disposable parentDisposable) {
addPsiTreeChangeListener(listener);
Disposer.register(parentDisposable, new Disposable() {
@Override
public void dispose() {
removePsiTreeChangeListener(listener);
}
});
}
@Override
public void removePsiTreeChangeListener(@NotNull PsiTreeChangeListener listener) {
myTreeChangeListeners.remove(listener);
}
private static String logPsi(@Nullable PsiElement element) {
return element == null ? " null" : element.getClass().getName();
}
@Override
public void beforeChildAddition(@NotNull PsiTreeChangeEventImpl event) {
beforeChange(true);
event.setCode(PsiTreeChangeEventImpl.PsiEventType.BEFORE_CHILD_ADDITION);
if (LOG.isDebugEnabled()) {
LOG.debug("beforeChildAddition: event = " + event);
}
fireEvent(event);
}
@Override
public void beforeChildRemoval(@NotNull PsiTreeChangeEventImpl event) {
beforeChange(true);
event.setCode(PsiTreeChangeEventImpl.PsiEventType.BEFORE_CHILD_REMOVAL);
if (LOG.isDebugEnabled()) {
LOG.debug("beforeChildRemoval: child = " + logPsi(event.getChild()) + ", parent = " + logPsi(event.getParent()));
}
fireEvent(event);
}
@Override
public void beforeChildReplacement(@NotNull PsiTreeChangeEventImpl event) {
beforeChange(true);
event.setCode(PsiTreeChangeEventImpl.PsiEventType.BEFORE_CHILD_REPLACEMENT);
if (LOG.isDebugEnabled()) {
LOG.debug("beforeChildReplacement: oldChild = " + logPsi(event.getOldChild()));
}
fireEvent(event);
}
public void beforeChildrenChange(@NotNull PsiTreeChangeEventImpl event) {
beforeChange(true);
event.setCode(PsiTreeChangeEventImpl.PsiEventType.BEFORE_CHILDREN_CHANGE);
if (LOG.isDebugEnabled()) {
LOG.debug("beforeChildrenChange: parent = " + logPsi(event.getParent()));
}
fireEvent(event);
}
public void beforeChildMovement(@NotNull PsiTreeChangeEventImpl event) {
beforeChange(true);
event.setCode(PsiTreeChangeEventImpl.PsiEventType.BEFORE_CHILD_MOVEMENT);
if (LOG.isDebugEnabled()) {
LOG.debug("beforeChildMovement: child = " + logPsi(event.getChild()) + ", oldParent = " + logPsi(event.getOldParent()) + ", newParent = " + logPsi(event.getNewParent()));
}
fireEvent(event);
}
public void beforePropertyChange(@NotNull PsiTreeChangeEventImpl event) {
beforeChange(true);
event.setCode(PsiTreeChangeEventImpl.PsiEventType.BEFORE_PROPERTY_CHANGE);
if (LOG.isDebugEnabled()) {
LOG.debug("beforePropertyChange: element = " + logPsi(event.getElement()) + ", propertyName = " + event.getPropertyName() + ", oldValue = " +
arrayToString(event.getOldValue()));
}
fireEvent(event);
}
private static Object arrayToString(Object value) {
return value instanceof Object[] ? Arrays.deepToString((Object[])value) : value;
}
public void childAdded(@NotNull PsiTreeChangeEventImpl event) {
event.setCode(PsiTreeChangeEventImpl.PsiEventType.CHILD_ADDED);
if (LOG.isDebugEnabled()) {
LOG.debug("childAdded: child = " + logPsi(event.getChild()) + ", parent = " + logPsi(event.getParent()));
}
fireEvent(event);
afterChange(true);
}
public void childRemoved(@NotNull PsiTreeChangeEventImpl event) {
event.setCode(PsiTreeChangeEventImpl.PsiEventType.CHILD_REMOVED);
if (LOG.isDebugEnabled()) {
LOG.debug("childRemoved: child = " + logPsi(event.getChild()) + ", parent = " + logPsi(event.getParent()));
}
fireEvent(event);
afterChange(true);
}
public void childReplaced(@NotNull PsiTreeChangeEventImpl event) {
event.setCode(PsiTreeChangeEventImpl.PsiEventType.CHILD_REPLACED);
if (LOG.isDebugEnabled()) {
LOG.debug("childReplaced: oldChild = " + logPsi(event.getOldChild()) + ", newChild = " + logPsi(event.getNewChild()) + ", parent = " + logPsi(event.getParent()));
}
fireEvent(event);
afterChange(true);
}
public void childMoved(@NotNull PsiTreeChangeEventImpl event) {
event.setCode(PsiTreeChangeEventImpl.PsiEventType.CHILD_MOVED);
if (LOG.isDebugEnabled()) {
LOG.debug("childMoved: child = " + logPsi(event.getChild()) + ", oldParent = " + logPsi(event.getOldParent()) + ", newParent = " + logPsi(event.getNewParent()));
}
fireEvent(event);
afterChange(true);
}
public void childrenChanged(@NotNull PsiTreeChangeEventImpl event) {
event.setCode(PsiTreeChangeEventImpl.PsiEventType.CHILDREN_CHANGED);
if (LOG.isDebugEnabled()) {
LOG.debug("childrenChanged: parent = " + logPsi(event.getParent()));
}
fireEvent(event);
afterChange(true);
}
public void propertyChanged(@NotNull PsiTreeChangeEventImpl event) {
event.setCode(PsiTreeChangeEventImpl.PsiEventType.PROPERTY_CHANGED);
if (LOG.isDebugEnabled()) {
LOG.debug(
"propertyChanged: element = " + logPsi(event.getElement())
+ ", propertyName = " + event.getPropertyName()
+ ", oldValue = " + arrayToString(event.getOldValue())
+ ", newValue = " + arrayToString(event.getNewValue())
);
}
fireEvent(event);
afterChange(true);
}
public void addTreeChangePreprocessor(@NotNull PsiTreeChangePreprocessor preprocessor) {
myTreeChangePreprocessors.add(preprocessor);
}
public void removeTreeChangePreprocessor(@NotNull PsiTreeChangePreprocessor preprocessor) {
myTreeChangePreprocessors.remove(preprocessor);
}
private void fireEvent(@NotNull PsiTreeChangeEventImpl event) {
boolean isRealTreeChange = event.getCode() != PsiTreeChangeEventImpl.PsiEventType.PROPERTY_CHANGED
&& event.getCode() != PsiTreeChangeEventImpl.PsiEventType.BEFORE_PROPERTY_CHANGE;
PsiFile file = event.getFile();
if (file == null || file.isPhysical()) {
ApplicationManager.getApplication().assertWriteAccessAllowed();
}
if (isRealTreeChange) {
LOG.assertTrue(!myTreeChangeEventIsFiring, "Changes to PSI are not allowed inside event processing");
myTreeChangeEventIsFiring = true;
}
try {
for (PsiTreeChangePreprocessor preprocessor : myTreeChangePreprocessors) {
preprocessor.treeChanged(event);
}
boolean enableOutOfCodeBlockTracking = ((PsiModificationTrackerImpl)myModificationTracker).isEnableCodeBlockTracker();
for (PsiTreeChangePreprocessor preprocessor : Extensions.getExtensions(PsiTreeChangePreprocessor.EP_NAME, myProject)) {
if (!enableOutOfCodeBlockTracking && preprocessor instanceof PsiTreeChangePreprocessorBase) continue;
preprocessor.treeChanged(event);
}
if (!enableOutOfCodeBlockTracking) {
for (PsiTreeChangePreprocessor preprocessor : Extensions.getExtensions(PsiTreeChangePreprocessor.EP_NAME, myProject)) {
if (!(preprocessor instanceof PsiTreeChangePreprocessorBase)) continue;
((PsiTreeChangePreprocessorBase)preprocessor).onOutOfCodeBlockModification(event);
}
}
for (PsiTreeChangeListener listener : myTreeChangeListeners) {
try {
switch (event.getCode()) {
case BEFORE_CHILD_ADDITION:
listener.beforeChildAddition(event);
break;
case BEFORE_CHILD_REMOVAL:
listener.beforeChildRemoval(event);
break;
case BEFORE_CHILD_REPLACEMENT:
listener.beforeChildReplacement(event);
break;
case BEFORE_CHILD_MOVEMENT:
listener.beforeChildMovement(event);
break;
case BEFORE_CHILDREN_CHANGE:
listener.beforeChildrenChange(event);
break;
case BEFORE_PROPERTY_CHANGE:
listener.beforePropertyChange(event);
break;
case CHILD_ADDED:
listener.childAdded(event);
break;
case CHILD_REMOVED:
listener.childRemoved(event);
break;
case CHILD_REPLACED:
listener.childReplaced(event);
break;
case CHILD_MOVED:
listener.childMoved(event);
break;
case CHILDREN_CHANGED:
listener.childrenChanged(event);
break;
case PROPERTY_CHANGED:
listener.propertyChanged(event);
break;
}
}
catch (Exception e) {
LOG.error(e);
}
}
}
finally {
if (isRealTreeChange) {
myTreeChangeEventIsFiring = false;
}
}
}
@Override
public void registerRunnableToRunOnChange(@NotNull final Runnable runnable) {
myMessageBus.connect().subscribe(ANY_PSI_CHANGE_TOPIC, new AnyPsiChangeListener.Adapter() {
@Override
public void beforePsiChanged(boolean isPhysical) {
if (isPhysical) runnable.run();
}
});
}
@Override
public void registerRunnableToRunOnAnyChange(@NotNull final Runnable runnable) { // includes non-physical changes
myMessageBus.connect().subscribe(ANY_PSI_CHANGE_TOPIC, new AnyPsiChangeListener.Adapter() {
@Override
public void beforePsiChanged(boolean isPhysical) {
runnable.run();
}
});
}
@Override
public void registerRunnableToRunAfterAnyChange(@NotNull final Runnable runnable) { // includes non-physical changes
myMessageBus.connect().subscribe(ANY_PSI_CHANGE_TOPIC, new AnyPsiChangeListener.Adapter() {
@Override
public void afterPsiChanged(boolean isPhysical) {
runnable.run();
}
});
}
@Override
public void beforeChange(boolean isPhysical) {
myMessageBus.syncPublisher(ANY_PSI_CHANGE_TOPIC).beforePsiChanged(isPhysical);
}
@Override
public void afterChange(boolean isPhysical) {
myMessageBus.syncPublisher(ANY_PSI_CHANGE_TOPIC).afterPsiChanged(isPhysical);
}
@Override
@NotNull
public PsiModificationTracker getModificationTracker() {
return myModificationTracker;
}
@Override
public void startBatchFilesProcessingMode() {
myBatchFilesProcessingModeCount.incrementAndGet();
}
@Override
public void finishBatchFilesProcessingMode() {
myBatchFilesProcessingModeCount.decrementAndGet();
LOG.assertTrue(myBatchFilesProcessingModeCount.get() >= 0);
}
@Override
public boolean isBatchFilesProcessingMode() {
return myBatchFilesProcessingModeCount.get() > 0;
}
@TestOnly
public void cleanupForNextTest() {
assert ApplicationManager.getApplication().isUnitTestMode();
myFileManager.cleanupForNextTest();
dropPsiCaches();
}
}
| |
/**
* Copyright 2005-2014 Red Hat, Inc.
*
* Red Hat licenses this file to you under the Apache License, version
* 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package io.fabric8.forge.camel.commands.project;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import javax.inject.Inject;
import io.fabric8.forge.camel.commands.project.helper.CamelCommandsHelper;
import io.fabric8.forge.camel.commands.project.helper.CamelProjectHelper;
import io.fabric8.forge.camel.commands.project.helper.LineNumberHelper;
import io.fabric8.forge.camel.commands.project.helper.StringHelper;
import org.apache.camel.catalog.CamelCatalog;
import org.apache.camel.catalog.DefaultCamelCatalog;
import org.apache.camel.catalog.JSonSchemaHelper;
import org.jboss.forge.addon.dependencies.Dependency;
import org.jboss.forge.addon.dependencies.DependencyResolver;
import org.jboss.forge.addon.projects.Project;
import org.jboss.forge.addon.projects.dependencies.DependencyInstaller;
import org.jboss.forge.addon.projects.facets.ResourcesFacet;
import org.jboss.forge.addon.projects.facets.WebResourcesFacet;
import org.jboss.forge.addon.resource.FileResource;
import org.jboss.forge.addon.ui.context.UIBuilder;
import org.jboss.forge.addon.ui.context.UIContext;
import org.jboss.forge.addon.ui.context.UIExecutionContext;
import org.jboss.forge.addon.ui.context.UINavigationContext;
import org.jboss.forge.addon.ui.input.InputComponent;
import org.jboss.forge.addon.ui.input.InputComponentFactory;
import org.jboss.forge.addon.ui.metadata.UICommandMetadata;
import org.jboss.forge.addon.ui.result.NavigationResult;
import org.jboss.forge.addon.ui.result.Result;
import org.jboss.forge.addon.ui.result.Results;
import org.jboss.forge.addon.ui.util.Categories;
import org.jboss.forge.addon.ui.util.Metadata;
import org.jboss.forge.addon.ui.wizard.UIWizardStep;
import org.jboss.forge.roaster.model.util.Strings;
import static io.fabric8.forge.camel.commands.project.helper.CamelCatalogHelper.endpointComponentName;
import static io.fabric8.forge.camel.commands.project.helper.CamelCatalogHelper.isDefaultValue;
import static io.fabric8.forge.camel.commands.project.helper.CamelCommandsHelper.ensureCamelArtifactIdAdded;
import static io.fabric8.forge.camel.commands.project.helper.CamelCommandsHelper.loadCamelComponentDetails;
import static io.fabric8.forge.camel.commands.project.helper.UIHelper.createUIInput;
public class ConfigureEditEndpointPropertiesStep extends AbstractCamelProjectCommand implements UIWizardStep {
@Inject
private InputComponentFactory componentFactory;
@Inject
private DependencyInstaller dependencyInstaller;
@Inject
private DependencyResolver dependencyResolver;
private List<InputComponent> inputs = new ArrayList<>();
@Override
public UICommandMetadata getMetadata(UIContext context) {
return Metadata.forCommand(ConfigureEditEndpointPropertiesStep.class).name(
"Camel: Edit Endpoint XML").category(Categories.create(CATEGORY))
.description("Configure the endpoint options to use");
}
@Override
@SuppressWarnings("unchecked")
public void initializeUI(UIBuilder builder) throws Exception {
// lets create a field for each property on the component
CamelCatalog catalog = new DefaultCamelCatalog();
Map<Object, Object> attributeMap = builder.getUIContext().getAttributeMap();
// either we have an uri from an existing endpoint to edit, or we only have the component name to create a new endpoint
String camelComponentName = optionalAttributeValue(attributeMap, "componentName");
String uri = mandatoryAttributeValue(attributeMap, "endpointUri");
if (camelComponentName == null && uri != null) {
camelComponentName = endpointComponentName(uri);
}
String json = catalog.componentJSonSchema(camelComponentName);
if (json == null) {
throw new IllegalArgumentException("Could not find catalog entry for component name: " + camelComponentName);
}
List<Map<String, String>> data = JSonSchemaHelper.parseJsonSchema("properties", json, true);
Map<String, String> currentValues = uri != null ? catalog.endpointProperties(uri) : Collections.EMPTY_MAP;
if (data != null) {
Set<String> namesAdded = new HashSet<>();
for (Map<String, String> propertyMap : data) {
String name = propertyMap.get("name");
String kind = propertyMap.get("kind");
String type = propertyMap.get("type");
String javaType = propertyMap.get("javaType");
String deprecated = propertyMap.get("deprecated");
String required = propertyMap.get("required");
String currentValue = currentValues.get(name);
String defaultValue = propertyMap.get("defaultValue");
String description = propertyMap.get("description");
String enums = propertyMap.get("enum");
if (!Strings.isNullOrEmpty(name)) {
Class<Object> inputClazz = CamelCommandsHelper.loadValidInputTypes(javaType, type);
if (inputClazz != null) {
if (namesAdded.add(name)) {
InputComponent input = createUIInput(componentFactory, getConverterFactory(), name, inputClazz, required, currentValue, defaultValue, enums, description);
if (input != null) {
builder.add(input);
inputs.add(input);
}
}
}
}
}
}
}
@Override
public NavigationResult next(UINavigationContext context) throws Exception {
return null;
}
@Override
public Result execute(UIExecutionContext context) throws Exception {
Map<Object, Object> attributeMap = context.getUIContext().getAttributeMap();
String kind = mandatoryAttributeValue(attributeMap, "kind");
if ("xml".equals(kind)) {
return executeXml(context, attributeMap);
}
// TODO: support java later
return Results.success();
}
protected Result executeXml(UIExecutionContext context, Map<Object, Object> attributeMap) throws Exception {
String camelComponentName = optionalAttributeValue(attributeMap, "componentName");
String endpointInstanceName = optionalAttributeValue(attributeMap, "instanceName");
String endpointUrl = mandatoryAttributeValue(attributeMap, "endpointUri");
String xml = mandatoryAttributeValue(attributeMap, "xml");
String lineNumber = mandatoryAttributeValue(attributeMap, "lineNumber");
Project project = getSelectedProject(context);
ResourcesFacet facet = project.getFacet(ResourcesFacet.class);
WebResourcesFacet webFacet = project.getFacet(WebResourcesFacet.class);
// does the project already have camel?
Dependency core = CamelProjectHelper.findCamelCoreDependency(project);
if (core == null) {
return Results.fail("The project does not include camel-core");
}
// lets find the camel component class
CamelComponentDetails details = new CamelComponentDetails();
Result result = loadCamelComponentDetails(camelComponentName, details);
if (result != null) {
return result;
}
// and make sure its dependency is added
result = ensureCamelArtifactIdAdded(project, details, dependencyInstaller);
if (result != null) {
return result;
}
// collect all the options that was set
Map<String, String> options = new HashMap<String, String>();
for (InputComponent input : inputs) {
String key = input.getName();
// only use the value if a value was set (and the value is not the same as the default value)
if (input.hasValue()) {
String value = input.getValue().toString();
if (value != null) {
// do not add the value if it match the default value
boolean matchDefault = isDefaultValue(camelComponentName, key, value);
if (!matchDefault) {
options.put(key, value);
}
}
} else if (input.isRequired() && input.hasDefaultValue()) {
// if its required then we need to grab the value
String value = input.getValue().toString();
if (value != null) {
options.put(key, value);
}
}
}
CamelCatalog catalog = new DefaultCamelCatalog();
// TODO: Camel 2.15.3 asEndpointUriXml
String uri = catalog.asEndpointUri(camelComponentName, options);
if (uri == null) {
return Results.fail("Cannot create endpoint uri");
}
// TODO need to replace & with &
uri = org.apache.camel.util.StringHelper.xmlEncode(uri);
FileResource file = facet != null ? facet.getResource(xml) : null;
if (file == null || !file.exists()) {
file = webFacet != null ? webFacet.getWebResource(xml) : null;
}
if (file == null || !file.exists()) {
return Results.fail("Cannot find XML file " + xml);
}
List<String> lines = LineNumberHelper.readLines(file.getResourceInputStream());
// grab existing line
// the list is 0-based, and line number is 1-based
int idx = lineNumber != null ? Integer.valueOf(lineNumber) - 1 : 0;
String line = lines.get(idx);
// replace uri with new value
line = StringHelper.replaceAll(line, endpointUrl, uri);
lines.set(idx, line);
// and save the file back
String content = LineNumberHelper.linesToString(lines);
file.setContents(content);
return Results.success("Update endpoint uri: " + uri + " in XML file " + xml);
}
/**
* Returns the mandatory String value of the given name
*
* @throws IllegalArgumentException if the value is not available in the given attribute map
*/
public static String mandatoryAttributeValue(Map<Object, Object> attributeMap, String name) {
Object value = attributeMap.get(name);
if (value != null) {
String text = value.toString();
if (!Strings.isBlank(text)) {
return text;
}
}
throw new IllegalArgumentException("The attribute value '" + name + "' did not get passed on from the previous wizard page");
}
/**
* Returns the optional String value of the given name
*/
public static String optionalAttributeValue(Map<Object, Object> attributeMap, String name) {
Object value = attributeMap.get(name);
if (value != null) {
String text = value.toString();
if (!Strings.isBlank(text)) {
return text;
}
}
return null;
}
}
| |
/*
* Licensed to The Apereo Foundation under one or more contributor license
* agreements. See the NOTICE file distributed with this work for
* additional information regarding copyright ownership.
*
* The Apereo Foundation licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except in
* compliance with the License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
*
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.unitime.timetable.webutil.timegrid;
import java.io.PrintWriter;
import java.util.Calendar;
import java.util.Collection;
import java.util.Collections;
import java.util.Date;
import java.util.Enumeration;
import java.util.Hashtable;
import java.util.Iterator;
import java.util.Locale;
import java.util.StringTokenizer;
import java.util.TreeSet;
import java.util.Vector;
import javax.servlet.jsp.JspWriter;
import org.unitime.timetable.defaults.UserProperty;
import org.unitime.timetable.form.ExamGridForm;
import org.unitime.timetable.interfaces.RoomAvailabilityInterface;
import org.unitime.timetable.interfaces.RoomAvailabilityInterface.TimeBlock;
import org.unitime.timetable.model.DepartmentalInstructor;
import org.unitime.timetable.model.Exam;
import org.unitime.timetable.model.ExamPeriod;
import org.unitime.timetable.model.ExamType;
import org.unitime.timetable.model.Location;
import org.unitime.timetable.model.PreferenceLevel;
import org.unitime.timetable.model.SubjectArea;
import org.unitime.timetable.model.dao.ExamTypeDAO;
import org.unitime.timetable.security.SessionContext;
import org.unitime.timetable.solver.exam.ExamSolverProxy;
import org.unitime.timetable.solver.exam.ui.ExamAssignmentInfo;
import org.unitime.timetable.solver.exam.ui.ExamRoomInfo;
import org.unitime.timetable.solver.exam.ui.ExamInfo.ExamInstructorInfo;
import org.unitime.timetable.util.Constants;
import org.unitime.timetable.util.Formats;
import org.unitime.timetable.util.RoomAvailability;
import org.unitime.timetable.webutil.timegrid.ExamGridTable.ExamGridModel.ExamGridCell;
/**
* @author Tomas Muller
*/
public class ExamGridTable {
public static Formats.Format<Date> sDF = Formats.getDateFormat(Formats.Pattern.DATE_EXAM_PERIOD);
public static final int sResourceRoom = 0;
public static final int sResourceInstructor = 1;
public static final int sResourceSubject = 2;
public static final String[] sResources = new String[] {"Room", "Instructor", "Subject Area"};
public static final int sBgNone = 0;
public static final int sBgStudentConfs = 1;
public static final int sBgDirectStudentConfs = 2;
public static final int sBgMoreThanTwoADayStudentConfs = 3;
public static final int sBgBackToBackStudentConfs = 4;
public static final int sBgInstructorConfs = 5;
public static final int sBgDirectInstructorConfs = 6;
public static final int sBgMoreThanTwoADayInstructorConfs = 7;
public static final int sBgBackToBackInstructorConfs = 8;
public static final int sBgPeriodPref = 9;
public static final int sBgRoomPref = 10;
public static final int sBgDistPref = 11;
public static final String[] sBackgrounds = new String[] {
"No Background",
"Student Conflicts",
"Student Direct Conflicts",
"Student >2 A Day Conlicts",
"Student Back-To-Back Conlicts",
"Instructor Conflicts",
"Instructor Direct Conflicts",
"Instructor >2 A Day Conlicts",
"Instructor Back-To-Back Conlicts",
"Period Preferences",
"Room Preferences",
"Distribution Preferences"
};
public static final int sDispModeInRowHorizontal = 0;
public static final int sDispModeInRowVertical = 1;
public static final int sDispModePerDayHorizontal = 2;
public static final int sDispModePerDayVertical = 3;
public static final int sDispModePerWeekHorizontal = 4;
public static final int sDispModePerWeekVertical = 5;
public static String[] sDispModes = new String[] {
"In Row [horizontal]",
"In Row [vertical]",
"Per Day [horizontal]",
"Per Day [vertical]",
"Per Week [horizontal]",
"Per Week [vertical]"
};
public static final int sOrderByNameAsc = 0;
public static final int sOrderByNameDesc = 1;
public static final int sOrderBySizeAsc = 2;
public static final int sOrderBySizeDesc = 3;
public static String[] sOrders = new String[] {
"name [asc]", "name [desc]", "size [asc]", "size [desc]"
};
public static String sBgColorEmpty = "rgb(255,255,255)";
public static String sBgColorRequired = "rgb(80,80,200)";
public static String sBgColorStronglyPreferred = "rgb(40,180,60)";
public static String sBgColorPreferred = "rgb(170,240,60)";
public static String sBgColorNeutral = "rgb(240,240,240)";
public static String sBgColorDiscouraged = "rgb(240,210,60)";
public static String sBgColorStronglyDiscouraged = "rgb(240,120,60)";
public static String sBgColorProhibited = "rgb(220,50,40)";
public static String sBgColorNotAvailable = "rgb(200,200,200)";
public static String sBgColorNotAvailableButAssigned = sBgColorProhibited;
Vector<ExamGridModel> iModels = new Vector<ExamGridModel>();
ExamGridForm iForm = null;
TreeSet<Integer> iDates = new TreeSet();
TreeSet<Integer> iStartsSlots = new TreeSet();
Hashtable<Integer,Hashtable<Integer,ExamPeriod>> iPeriods = new Hashtable();
public ExamGridTable(ExamGridForm form, SessionContext context, ExamSolverProxy solver) throws Exception {
iForm = form;
for (Iterator i=iForm.getPeriods(iForm.getExamType().toString()).iterator();i.hasNext();) {
ExamPeriod period = (ExamPeriod)i.next();
iDates.add(period.getDateOffset());
iStartsSlots.add(period.getStartSlot());
Hashtable<Integer,ExamPeriod> periodsThisDay = iPeriods.get(period.getDateOffset());
if (periodsThisDay==null) {
periodsThisDay = new Hashtable<Integer,ExamPeriod>();
iPeriods.put(period.getDateOffset(), periodsThisDay);
}
periodsThisDay.put(period.getStartSlot(), period);
}
if (iForm.getResource()==sResourceRoom) {
Date[] bounds = ExamPeriod.getBounds(form.getSessionId(),form.getExamBeginDate(), form.getExamType());
for (Iterator i=Location.findAllExamLocations(iForm.getSessionId(), iForm.getExamType()).iterator();i.hasNext();) {
Location location = (Location)i.next();
if (match(location.getLabel())) {
if (solver!=null && solver.getExamTypeId().equals(iForm.getExamType()))
iModels.add(new RoomExamGridModel(location,
solver.getAssignedExamsOfRoom(location.getUniqueId()),bounds));
else
iModels.add(new RoomExamGridModel(location,
Exam.findAssignedExamsOfLocation(location.getUniqueId(), iForm.getExamType()),bounds));
}
}
} else if (iForm.getResource()==sResourceInstructor) {
String instructorNameFormat = UserProperty.NameFormat.get(context.getUser());
Hashtable<String,ExamGridModel> models = new Hashtable<String,ExamGridModel> ();
for (Iterator i=DepartmentalInstructor.findAllExamInstructors(iForm.getSessionId(), iForm.getExamType()).iterator();i.hasNext();) {
DepartmentalInstructor instructor = (DepartmentalInstructor)i.next();
if (match(instructor.getName(instructorNameFormat))) {
Collection<ExamAssignmentInfo> assignments = null;
if (solver!=null && solver.getExamTypeId().equals(iForm.getExamType()))
assignments = solver.getAssignedExamsOfInstructor(instructor.getUniqueId());
else
assignments = Exam.findAssignedExamsOfInstructor(instructor.getUniqueId(), iForm.getExamType());
if (instructor.getExternalUniqueId()==null) {
iModels.add(new ExamGridModel(
instructor.getUniqueId(),
instructor.getName(instructorNameFormat),
-1,
assignments));
} else {
ExamGridModel m = models.get(instructor.getExternalUniqueId());
if (m==null) {
m = new ExamGridModel(
instructor.getUniqueId(),
instructor.getName(instructorNameFormat),
-1,
assignments);
iModels.add(m);
models.put(instructor.getExternalUniqueId(),m);
} else
m.addAssignments(assignments);
}
}
}
} else if (iForm.getResource()==sResourceSubject) {
for (Iterator i=SubjectArea.getSubjectAreaList(iForm.getSessionId()).iterator();i.hasNext();) {
SubjectArea subject = (SubjectArea)i.next();
if (match(subject.getSubjectAreaAbbreviation())) {
if (solver!=null && solver.getExamTypeId().equals(iForm.getExamType()))
iModels.add(new ExamGridModel(
subject.getUniqueId(),
subject.getSubjectAreaAbbreviation(),
-1,
solver.getAssignedExams(subject.getUniqueId())));
else
iModels.add(new ExamGridModel(
subject.getUniqueId(),
subject.getSubjectAreaAbbreviation(),
-1,
Exam.findAssignedExams(iForm.getSessionId(),subject.getUniqueId(),iForm.getExamType())));
}
}
}
Collections.sort(iModels);
}
public ExamPeriod getPeriod(int day, Integer time) {
if (time==null) return null;
Hashtable<Integer,ExamPeriod> periods = iPeriods.get(day);
return (periods==null?null:periods.get(time));
}
public void printToHtml(JspWriter jsp) {
PrintWriter out = new PrintWriter(jsp);
printToHtml(out);
out.flush();
}
public int getMaxIdx(ExamGridModel model, int startDay, int endDay, int firstSlot, int lastSlot) {
int max = 0;
for (Iterator i=iForm.getPeriods(iForm.getExamType().toString()).iterator();i.hasNext();) {
ExamPeriod period = (ExamPeriod)i.next();
if (period.getDateOffset()<startDay || period.getDateOffset()>endDay) continue;
if (period.getStartSlot()<firstSlot || period.getStartSlot()>lastSlot) continue;
max = Math.max(max, model.getAssignments(period).size()-1);
}
return max;
}
public int getMaxIdx(ExamGridModel model, int dayOfWeek, int firstSlot, int lastSlot) {
int max = 0;
for (Iterator i=iForm.getPeriods(iForm.getExamType().toString()).iterator();i.hasNext();) {
ExamPeriod period = (ExamPeriod)i.next();
if (getDayOfWeek(period.getDateOffset())!=dayOfWeek) continue;
if (period.getStartSlot()<firstSlot || period.getStartSlot()>lastSlot) continue;
max = Math.max(max, model.getAssignments(period).size()-1);
}
return max;
}
public int getMaxIdx(ExamGridModel model, int week, int slot) {
int max = 0;
for (Iterator i=iForm.getPeriods(iForm.getExamType().toString()).iterator();i.hasNext();) {
ExamPeriod period = (ExamPeriod)i.next();
if (getWeek(period.getDateOffset())!=week) continue;
if (period.getStartSlot()!=slot) continue;
max = Math.max(max, model.getAssignments(period).size()-1);
}
return max;
}
public int getMaxIdx(int day, int time) {
int max = 0;
ExamPeriod period = getPeriod(day, time);
if (period==null) return max;
for (ExamGridModel model : models()) {
max = Math.max(max, model.getAssignments(period).size()-1);
}
return max;
}
public String getDayName(int day) {
Calendar c = Calendar.getInstance(Locale.US);
c.setTime(iForm.getExamBeginDate());
c.add(Calendar.DAY_OF_YEAR, day);
return sDF.format(c.getTime());
}
public String getDayOfWeekName(int dayOfWeek) {
Calendar c = Calendar.getInstance(Locale.US);
c.set(Calendar.DAY_OF_WEEK, dayOfWeek);
return Formats.getDateFormat(Formats.Pattern.DATE_DAY_OF_WEEK).format(c.getTime());
}
public String getWeekName(int week) {
Calendar c = Calendar.getInstance(Locale.US);
c.setTime(iForm.getSessionBeginDate());
c.setLenient(true);
c.add(Calendar.WEEK_OF_YEAR, week-1);
Formats.Format<Date> df = Formats.getDateFormat(Formats.Pattern.DATE_EVENT_SHORT);
while (c.get(Calendar.DAY_OF_WEEK)!=Calendar.MONDAY) c.add(Calendar.DAY_OF_YEAR, -1);
String first = df.format(c.getTime());
while (c.get(Calendar.DAY_OF_WEEK)!=Calendar.SUNDAY) c.add(Calendar.DAY_OF_YEAR, 1);
String end = df.format(c.getTime());
return "Week "+week+"<br>"+first+" - "+end;
}
public String getSlotName(int slot) {
return Constants.toTime(slot*Constants.SLOT_LENGTH_MIN + Constants.FIRST_SLOT_TIME_MIN);
}
public void printHeaderCell(PrintWriter out, String name, boolean vertical, boolean eod, boolean eol) {
String style = "TimetableHead" + "Cell" + (eol?"EOL":eod?"EOD":"");
out.println("<th nowrap width='130' height='40' class='"+style+"'>");
out.println(name==null?"":name);
out.println("</th>");
}
public boolean isVertical() {
return (iForm.getDispMode()==sDispModeInRowVertical || iForm.getDispMode()==sDispModePerDayVertical || iForm.getDispMode()==sDispModePerWeekVertical);
}
public void printHeader(PrintWriter out, String name) {
out.println("<tr valign='top'>");
boolean vertical = isVertical();
printHeaderCell(out, name, vertical, false, false);
TreeSet<Integer> days = days(), slots = slots(), weeks = weeks(), daysOfWeek = daysOfWeek();
if (iForm.getDispMode()==sDispModeInRowHorizontal) {
for (Integer day : days) {
for (Integer slot : slots()) {
boolean eod = (slot==slots.last());
boolean eol = (eod && day==days.last());
printHeaderCell(out, getDayName(day)+"<br>"+getSlotName(slot), vertical, eod, eol);
}
}
} else if (iForm.getDispMode()==sDispModeInRowVertical) {
for (ExamGridModel m : models()) {
boolean eol = m.equals(models().lastElement());
printHeaderCell(out, m.getName()+(m.getSize()>0?" ("+m.getSize()+")":""), vertical, false, eol);
}
} else if (iForm.getDispMode()==sDispModePerDayHorizontal) {
for (Integer slot : slots()) {
boolean eol = (slot==slots.last());
printHeaderCell(out, getSlotName(slot), vertical, false, eol);
}
} else if (iForm.getDispMode()==sDispModePerDayVertical) {
for (Integer day : days) {
boolean eol = (day==days.last());
printHeaderCell(out, getDayName(day), vertical, false, eol);
}
} else if (iForm.getDispMode()==sDispModePerWeekHorizontal) {
for (Integer week : weeks) {
for (Integer slot : slots) {
boolean eod = (slot==slots.last());
boolean eol = eod && (week==weeks.last());
printHeaderCell(out, getWeekName(week)+"<br>"+getSlotName(slot), vertical, eod, eol);
}
}
} else if (iForm.getDispMode()==sDispModePerWeekVertical) {
for (Integer dow : daysOfWeek) {
boolean eol = (dow==daysOfWeek.last());
printHeaderCell(out, getDayOfWeekName(dow), vertical, false, eol);
}
}
out.println("</tr>");
}
private void getMouseOverAndMouseOut(StringBuffer onMouseOver, StringBuffer onMouseOut, ExamGridCell cell, String bgColor, boolean changeMouse) {
if (cell==null) return;
ExamAssignmentInfo info = cell.getInfo();
if (info==null) return;
onMouseOver.append(" onmouseover=\"");
onMouseOut.append(" onmouseout=\"");
if (iForm.getResource()==sResourceRoom) {
for (ExamRoomInfo room : info.getRooms()) {
Long roomId = room.getLocationId();
onMouseOver.append("if (document.getElementById('"+info.getExamId()+"."+roomId+"')!=null) document.getElementById('"+info.getExamId()+"."+roomId+"').style.backgroundColor='rgb(223,231,242)';");
onMouseOut.append("if (document.getElementById('"+info.getExamId()+"."+roomId+"')!=null) document.getElementById('"+info.getExamId()+"."+roomId+"').style.backgroundColor='"+(bgColor==null?"transparent":bgColor)+"';");
}
} else if (iForm.getResource()==sResourceInstructor) {
for (Enumeration e=info.getInstructors().elements();e.hasMoreElements();) {
Long instructorId = ((ExamInstructorInfo)e.nextElement()).getId();
onMouseOver.append("if (document.getElementById('"+info.getExamId()+"."+instructorId+"')!=null) document.getElementById('"+info.getExamId()+"."+instructorId+"').style.backgroundColor='rgb(223,231,242)';");
onMouseOut.append("if (document.getElementById('"+info.getExamId()+"."+instructorId+"')!=null) document.getElementById('"+info.getExamId()+"."+instructorId+"').style.backgroundColor='"+(bgColor==null?"transparent":bgColor)+"';");
}
} else {
onMouseOver.append("if (document.getElementById('"+info.getExamId()+"')!=null) document.getElementById('"+info.getExamId()+"').style.backgroundColor='rgb(223,231,242)';");
onMouseOut.append("if (document.getElementById('"+info.getExamId()+"')!=null) document.getElementById('"+info.getExamId()+"').style.backgroundColor='"+(bgColor==null?"transparent":bgColor)+"';");
}
if (changeMouse)
onMouseOver.append("this.style.cursor='hand';this.style.cursor='pointer';");
onMouseOver.append("\" ");
onMouseOut.append("\" ");
}
public int getWeek(int day) {
Calendar cal = Calendar.getInstance(Locale.US);
cal.setTime(iForm.getExamBeginDate());
cal.setLenient(true);
cal.add(Calendar.DAY_OF_YEAR, day);
int week = 1;
while (cal.getTime().after(iForm.getSessionBeginDate()) && cal.get(Calendar.WEEK_OF_YEAR) != iForm.getSessionBeginWeek()) {
cal.add(Calendar.DAY_OF_YEAR, -7); week ++;
}
while (cal.getTime().before(iForm.getSessionBeginDate()) && cal.get(Calendar.WEEK_OF_YEAR) != iForm.getSessionBeginWeek()) {
cal.add(Calendar.DAY_OF_WEEK, 7); week --;
}
return week;
}
public int getDayOfWeek(int day) {
Calendar cal = Calendar.getInstance(Locale.US);
cal.setTime(iForm.getExamBeginDate());
cal.setLenient(true);
cal.add(Calendar.DAY_OF_YEAR, day);
return cal.get(Calendar.DAY_OF_WEEK);
}
public int getDay(int week, int dayOfWeek) {
Calendar c = Calendar.getInstance(Locale.US);
c.setTime(iForm.getSessionBeginDate());
c.setLenient(true);
c.add(Calendar.WEEK_OF_YEAR, week-1);
c.add(Calendar.DAY_OF_WEEK, dayOfWeek - c.get(Calendar.DAY_OF_WEEK));
Calendar ec = Calendar.getInstance(Locale.US);
ec.setTime(iForm.getExamBeginDate());
return c.get(Calendar.DAY_OF_YEAR)-ec.get(Calendar.DAY_OF_YEAR);
}
public TreeSet<Integer> days() {
if (iForm.isAllDates(iForm.getExamType().toString())) return iDates;
TreeSet<Integer> days = new TreeSet();
if (iForm.getDate(iForm.getExamType().toString())>500) {
for (Integer day:iDates) {
if (1000+getWeek(day)==iForm.getDate(iForm.getExamType().toString())) days.add(day);
}
} else {
days.add(iForm.getDate(iForm.getExamType().toString()));
}
return days;
}
public TreeSet<Integer> daysOfWeek() {
TreeSet<Integer> daysOfWeek = new TreeSet();
for (Integer day:days()) {
daysOfWeek.add(getDayOfWeek(day));
}
return daysOfWeek;
}
public TreeSet<Integer> weeks() {
TreeSet<Integer> weeks = new TreeSet();
for (Integer day:days()) {
weeks.add(getWeek(day));
}
return weeks;
}
public TreeSet<Integer> days(int week) {
TreeSet<Integer> days= new TreeSet();
for (Integer day:days()) {
if (getWeek(day)==week)
days.add(day);
}
return days;
}
public TreeSet<Integer> slots() {
TreeSet<Integer> slots = new TreeSet();
for (Integer slot:iStartsSlots) {
if (slot<iForm.getStartTime(iForm.getExamType().toString()) || slot>iForm.getEndTime(iForm.getExamType().toString())) continue;
slots.add(slot);
}
return slots;
}
public Integer prev(int slot) {
Integer prev = null;
for (Integer s:iStartsSlots) {
if (s<iForm.getStartTime(iForm.getExamType().toString()) || s>=slot) continue;
if (prev==null) prev = s;
else prev = Math.max(prev,s);
}
return prev;
}
public Integer next(int slot) {
Integer next = null;
for (Integer s:iStartsSlots) {
if (s<=slot || s>iForm.getEndTime(iForm.getExamType().toString())) continue;
if (next==null) next = s;
else next = Math.min(next,s);
}
return next;
}
public void printCell(PrintWriter out, ExamGridModel model, int day, int slot, int idx, int maxIdx, boolean head, boolean vertical, boolean in, boolean eod, boolean eol) {
ExamPeriod period = getPeriod(day, slot);
ExamGridCell cell = model.getAssignment(period,idx);
String style = "Timetable"+(head || (!in && !vertical) ? "Head":"")+"Cell" + (!head && in && vertical?"In":"") + (vertical?"Vertical":"") + (eol?"EOL":eod?"EOD":"");
if (cell==null) {
String bgColor = model.getBackground(period);
if (bgColor==null && !model.isAvailable(period)) bgColor=sBgColorNotAvailable;
if (period==null) bgColor=sBgColorNotAvailable;
if (idx>0 && model.getAssignment(day, slot, idx-1)==null) return;
int rowspan = 1 + maxIdx - idx;
out.println("<td rowspan='"+rowspan+"' class='"+style+"' "+(bgColor==null?"":"style='background-color:"+bgColor+"'")+"> </td>");
} else {
String bgColor = cell.getBackground();
if (iForm.getBackground()==sBgNone && !sBgColorNotAvailable.equals(bgColor)) {
if (!model.isAvailable(period))
bgColor = sBgColorNotAvailableButAssigned;
}
StringBuffer onMouseOver = new StringBuffer();
StringBuffer onMouseOut = new StringBuffer();
getMouseOverAndMouseOut(onMouseOver, onMouseOut, cell, bgColor, cell.getOnClick()!=null);
out.println("<td nowrap "+(bgColor==null?"":"style='background-color:"+bgColor+"' ")+
" class='"+style+"' align='center' "+
(cell.getOnClick()==null?"":"onclick=\""+cell.getOnClick()+"\" ")+
(cell.getId()!=null?"id='"+cell.getId()+"' ":"")+
onMouseOver +
onMouseOut +
(cell.getTitle()==null?"":"title=\""+cell.getTitle()+"\" ")+
">");
out.print(cell.getName());
if (iForm.getResource()!=sResourceRoom)
out.print("<BR>"+cell.getRoomName());
else
out.print(cell.getShortComment()==null?"":"<BR>"+cell.getShortComment());
out.println("</td>");
}
}
public String getModelName(ExamGridModel model) {
return model.getName()+(model.getSize()>0?" ("+model.getSize()+")":"");
}
public void printRowHeaderCell(PrintWriter out, String name, int maxIdx, boolean vertical, boolean head, boolean in) {
String style = "Timetable"+(head || (!in && !vertical)?"Head":"")+"Cell"+(!head && in && vertical?"In":"")+(vertical?"Vertical":"");
out.println("<th nowrap width='130' height='40' rowspan='"+(1+maxIdx)+"' class='"+style+"'>");
out.println(name);
out.println("</th>");
}
public void printToHtml(PrintWriter out) {
boolean vertical = isVertical();
out.println("<table border='0' cellpadding='2' cellspacing='0'>");
TreeSet<Integer> days = days(), slots = slots(), weeks = weeks(), daysOfWeek = daysOfWeek();
int rowNumber=0;
if (iForm.getDispMode()==sDispModeInRowVertical) {
int globalMaxIdx = 0;
for (Integer day:days)
for (Integer slot:slots) {
globalMaxIdx = Math.max(globalMaxIdx,getMaxIdx(day, slot));
}
int week = -1;
for (Integer day:days) {
boolean head = false;
if (week!=getWeek(day)) {
week = getWeek(day);
head = true;
printHeader(out, getWeekName(week));
}
for (Integer slot:slots) {
if (getPeriod(day, slot)==null) continue;
out.println("<tr valign='top'>");
int maxIdx = getMaxIdx(day, slot);
printRowHeaderCell(out, getDayName(day)+"<br>"+getSlotName(slot), maxIdx, vertical, head && slot==slots.first(), globalMaxIdx==0);
for (int idx=0;idx<=maxIdx;idx++) {
if (idx>0) out.println("</tr><tr valign='top'>");
for (ExamGridModel model : models()) {
printCell(out,
model,
day,
slot,
idx, maxIdx,
head && slot==slots.first() && idx==0, vertical, globalMaxIdx==0 || idx>0,
false, model.equals(models().lastElement()));
}
}
out.println("</tr>");
rowNumber++;
}
}
} else {
int tmx = 0;
for (ExamGridModel m : models())
tmx = Math.max(tmx,getMaxIdx(m, days.first(),days.last(),slots.first(),slots.last()));
for (ExamGridModel model : models()) {
if (iForm.getDispMode()==sDispModeInRowHorizontal) {
if (rowNumber%10==0) printHeader(out, null);
int maxIdx = getMaxIdx(model, days.first(),days.last(),slots.first(),slots.last());
out.println("<tr valign='top'>");
printRowHeaderCell(out, model.getName()+(model.getSize()>0?" ("+model.getSize()+")":""), maxIdx, vertical, (rowNumber%10==0), tmx==0);
for (int idx=0;idx<=maxIdx;idx++) {
if (idx>0) out.println("</tr><tr valign='top'>");
for (Integer day:days) {
for (Integer slot:slots) {
boolean eod = (slot==slots.last());
boolean eol = (eod && day==days.last());
printCell(out,
model,
day,
slot,
idx, maxIdx,
rowNumber%10==0 && idx==0, vertical, tmx==0 || idx>0,
eod, eol);
}
}
}
out.println("</tr>");
} else if (iForm.getDispMode()==sDispModePerDayVertical) {
printHeader(out, getModelName(model));
int gmx = getMaxIdx(model, days.first(),days.last(),slots.first(),slots.last());
for (Integer slot:slots) {
out.println("<tr valign='top'>");
int maxIdx = getMaxIdx(model, days.first(), days.last(), slot, slot);
printRowHeaderCell(out, getSlotName(slot), maxIdx, vertical, slot==slots.first(), gmx==0);
for (int idx=0;idx<=maxIdx;idx++) {
if (idx>0) out.println("</tr><tr valign='top'>");
for (Integer day:days) {
printCell(out,
model,
day,
slot,
idx, maxIdx,
slot==slots.first() && idx==0, vertical, gmx==0 || idx>0,
false, (day==days.last()));
}
}
out.println("</tr>");
}
} else if (iForm.getDispMode()==sDispModePerDayHorizontal) {
printHeader(out, getModelName(model));
int gmx = getMaxIdx(model, days.first(),days.last(),slots.first(),slots.last());
for (Integer day:days) {
out.println("<tr valign='top'>");
int maxIdx = getMaxIdx(model, day, day,slots.first(),slots.last());
printRowHeaderCell(out, getDayName(day), maxIdx, vertical, day==days.first(), gmx==0);
for (int idx=0;idx<=maxIdx;idx++) {
if (idx>0) out.println("</tr><tr valign='top'>");
for (Integer slot:slots) {
printCell(out,
model,
day,
slot,
idx, maxIdx,
day==days.first() && idx==0, vertical, gmx==0 || idx>0,
false, (slot==slots.last()));
}
}
out.println("</tr>");
}
} else if (iForm.getDispMode()==sDispModePerWeekHorizontal) {
printHeader(out, getModelName(model));
int gmx = getMaxIdx(model, days.first(), days.last(), slots.first(),slots.last());
for (Integer dow:daysOfWeek()) {
out.println("<tr valign='top'>");
int maxIdx = getMaxIdx(model, dow,slots.first(),slots.last());
printRowHeaderCell(out, getDayOfWeekName(dow), maxIdx, vertical, dow==daysOfWeek.first(), gmx==0);
for (int idx=0;idx<=maxIdx;idx++) {
if (idx>0) out.println("</tr><tr valign='top'>");
for (Integer week : weeks) {
for (Integer slot:slots) {
printCell(out,
model,
getDay(week,dow),
slot,
idx, maxIdx,
dow==daysOfWeek.first() && idx==0, vertical, gmx==0 || idx>0,
(slot==slots.last()), (slot==slots.last() && week==weeks.last()));
}
}
}
out.println("</tr>");
}
} else if (iForm.getDispMode()==sDispModePerWeekVertical) {
printHeader(out, getModelName(model));
int gmx = getMaxIdx(model, days.first(), days.last(), slots.first(),slots.last());
for (Integer week : weeks) {
for (Integer slot:slots) {
out.println("<tr valign='top'>");
int maxIdx = getMaxIdx(model, week,slot);
printRowHeaderCell(out, getWeekName(week) +"<br>"+ getSlotName(slot), maxIdx, vertical, slot==slots.first(), gmx==0);
for (int idx=0;idx<=maxIdx;idx++) {
if (idx>0) out.println("</tr><tr valign='top'>");
for (Integer dow : daysOfWeek) {
printCell(out,
model,
getDay(week,dow),
slot,
idx,
maxIdx,
slot==slots.first() && idx==0, vertical, gmx==0 || idx>0,
false, (dow==daysOfWeek.last()));
}
}
out.println("</tr>");
}
}
}
rowNumber++;
}
}
out.println("</table>");
}
private boolean match(String name) {
if (iForm.getFilter()==null || iForm.getFilter().trim().length()==0) return true;
String n = name.toUpperCase();
StringTokenizer stk1 = new StringTokenizer(iForm.getFilter().toUpperCase(),";");
while (stk1.hasMoreTokens()) {
StringTokenizer stk2 = new StringTokenizer(stk1.nextToken()," ,");
boolean match = true;
while (match && stk2.hasMoreTokens()) {
String token = stk2.nextToken().trim();
if (token.length()==0) continue;
if (n.indexOf(token)<0) match = false;
}
if (match) return true;
}
return false;
}
public void printLegend(JspWriter jsp) {
PrintWriter out = new PrintWriter(jsp);
printLegend(out);
out.flush();
}
public void printLegend(PrintWriter out) {
if (iForm.getBackground()!=sBgNone) {
out.println("<tr><td colspan='2'>Assigned examinations:</td></tr>");
}
if (iForm.getBackground()==sBgPeriodPref) {
out.println("<tr><td width=40 style='background-color:"+pref2color(PreferenceLevel.sRequired)+";border:1px solid rgb(0,0,0)'> </td><td>Required period</td><td></td></tr>");
out.println("<tr><td width=40 style='background-color:"+pref2color(PreferenceLevel.sStronglyPreferred)+";border:1px solid rgb(0,0,0)'> </td><td>Strongly preferred period</td><td></td></tr>");
out.println("<tr><td width=40 style='background-color:"+pref2color(PreferenceLevel.sPreferred)+";border:1px solid rgb(0,0,0)'> </td><td>Preferred period</td><td></td></tr>");
out.println("<tr><td width=40 style='background-color:"+pref2color(PreferenceLevel.sNeutral)+";border:1px solid rgb(0,0,0)'> </td><td>No period preference</td><td></td></tr>");
out.println("<tr><td width=40 style='background-color:"+pref2color(PreferenceLevel.sDiscouraged)+";border:1px solid rgb(0,0,0)'> </td><td>Discouraged period</td><td></td></tr>");
out.println("<tr><td width=40 style='background-color:"+pref2color(PreferenceLevel.sStronglyDiscouraged)+";border:1px solid rgb(0,0,0)'> </td><td>Strongly discouraged period</td><td></td></tr>");
out.println("<tr><td width=40 style='background-color:"+pref2color(PreferenceLevel.sProhibited)+";border:1px solid rgb(0,0,0)'> </td><td>Prohibited period</td><td></td></tr>");
} else if (iForm.getBackground()==sBgRoomPref) {
out.println("<tr><td width=40 style='background-color:"+pref2color(PreferenceLevel.sRequired)+";border:1px solid rgb(0,0,0)'> </td><td>Required room</td><td></td></tr>");
out.println("<tr><td width=40 style='background-color:"+pref2color(PreferenceLevel.sStronglyPreferred)+";border:1px solid rgb(0,0,0)'> </td><td>Strongly preferred room</td><td></td></tr>");
out.println("<tr><td width=40 style='background-color:"+pref2color(PreferenceLevel.sPreferred)+";border:1px solid rgb(0,0,0)'> </td><td>Preferred room</td><td></td></tr>");
out.println("<tr><td width=40 style='background-color:"+pref2color(PreferenceLevel.sNeutral)+";border:1px solid rgb(0,0,0)'> </td><td>No room preference</td><td></td></tr>");
out.println("<tr><td width=40 style='background-color:"+pref2color(PreferenceLevel.sDiscouraged)+";border:1px solid rgb(0,0,0)'> </td><td>Discouraged room</td><td></td></tr>");
out.println("<tr><td width=40 style='background-color:"+pref2color(PreferenceLevel.sStronglyDiscouraged)+";border:1px solid rgb(0,0,0)'> </td><td>Strongly discouraged room</td><td></td></tr>");
out.println("<tr><td width=40 style='background-color:"+pref2color(PreferenceLevel.sProhibited)+";border:1px solid rgb(0,0,0)'> </td><td>Prohibited room</td><td></td></tr>");
} else if (iForm.getBackground()==sBgInstructorConfs) {
out.println("<tr><td width=40 style='background-color:"+pref2color(PreferenceLevel.sNeutral)+";border:1px solid rgb(0,0,0)'> </td><td>No instructor conflict</td><td></td></tr>");
out.println("<tr><td width=40 style='background-color:"+pref2color(PreferenceLevel.sDiscouraged)+";border:1px solid rgb(0,0,0)'> </td><td>One or more instructor back-to-back conflicts</td><td></td></tr>");
out.println("<tr><td width=40 style='background-color:"+pref2color(PreferenceLevel.sStronglyDiscouraged)+";border:1px solid rgb(0,0,0)'> </td><td>One or more instructor three or more exams a day conflicts</td><td></td></tr>");
out.println("<tr><td width=40 style='background-color:"+pref2color(PreferenceLevel.sProhibited)+";border:1px solid rgb(0,0,0)'> </td><td>One or more instructor direct conflicts</td><td></td></tr>");
} else if (iForm.getBackground()==sBgStudentConfs) {
out.println("<tr><td width=40 style='background-color:"+pref2color(PreferenceLevel.sNeutral)+";border:1px solid rgb(0,0,0)'> </td><td>No student conflict</td><td></td></tr>");
out.println("<tr><td width=40 style='background-color:"+pref2color(PreferenceLevel.sDiscouraged)+";border:1px solid rgb(0,0,0)'> </td><td>One or more student back-to-back conflicts</td><td></td></tr>");
out.println("<tr><td width=40 style='background-color:"+pref2color(PreferenceLevel.sStronglyDiscouraged)+";border:1px solid rgb(0,0,0)'> </td><td>One or more student three or more exams a day student conflicts</td><td></td></tr>");
out.println("<tr><td width=40 style='background-color:"+pref2color(PreferenceLevel.sProhibited)+";border:1px solid rgb(0,0,0)'> </td><td>One or more student direct conflicts</td><td></td></tr>");
} else if (iForm.getBackground()==sBgDirectInstructorConfs) {
for (int nrConflicts=0;nrConflicts<=6;nrConflicts++) {
String color = lessConflicts2color(nrConflicts);
out.println("<tr><td width=40 style='background-color:"+color+";border:1px solid rgb(0,0,0)'> </td><td>"+nrConflicts+" "+(nrConflicts==6?"or more ":"")+"instructor direct conflicts</td><td></td></tr>");
}
} else if (iForm.getBackground()==sBgMoreThanTwoADayInstructorConfs) {
for (int nrConflicts=0;nrConflicts<=15;nrConflicts++) {
String color = conflicts2color(nrConflicts);
out.println("<tr><td width=40 style='background-color:"+color+";border:1px solid rgb(0,0,0)'> </td><td>"+nrConflicts+" "+(nrConflicts==15?"or more ":"")+"instructor more than two exams a day conflicts</td><td></td></tr>");
}
} else if (iForm.getBackground()==sBgBackToBackInstructorConfs) {
for (int nrConflicts=0;nrConflicts<=15;nrConflicts++) {
String color = conflicts2color(nrConflicts);
out.println("<tr><td width=40 style='background-color:"+color+";border:1px solid rgb(0,0,0)'> </td><td>"+nrConflicts+" "+(nrConflicts==15?"or more ":"")+"instructor back to back conflicts</td><td></td></tr>");
}
} else if (iForm.getBackground()==sBgDirectStudentConfs) {
for (int nrConflicts=0;nrConflicts<=6;nrConflicts++) {
String color = lessConflicts2color(nrConflicts);
out.println("<tr><td width=40 style='background-color:"+color+";border:1px solid rgb(0,0,0)'> </td><td>"+nrConflicts+" "+(nrConflicts==6?"or more ":"")+"student direct conflicts</td><td></td></tr>");
}
} else if (iForm.getBackground()==sBgMoreThanTwoADayStudentConfs) {
for (int nrConflicts=0;nrConflicts<=15;nrConflicts++) {
String color = conflicts2color(nrConflicts);
out.println("<tr><td width=40 style='background-color:"+color+";border:1px solid rgb(0,0,0)'> </td><td>"+nrConflicts+" "+(nrConflicts==15?"or more ":"")+"student more than two exams a day conflicts</td><td></td></tr>");
}
} else if (iForm.getBackground()==sBgBackToBackStudentConfs) {
for (int nrConflicts=0;nrConflicts<=15;nrConflicts++) {
String color = conflicts2color(nrConflicts);
out.println("<tr><td width=40 style='background-color:"+color+";border:1px solid rgb(0,0,0)'> </td><td>"+nrConflicts+" "+(nrConflicts==15?"or more ":"")+"student back to back conflicts</td><td></td></tr>");
}
} else if (iForm.getBackground()==sBgDistPref) {
out.println("<tr><td width=40 style='background-color:"+pref2color(PreferenceLevel.sNeutral)+";border:1px solid rgb(0,0,0)'> </td><td>No violated constraint<i>(distance=0)</i></td><td></td></tr>");
out.println("<tr><td width=40 style='background-color:"+pref2color(PreferenceLevel.sDiscouraged)+";border:1px solid rgb(0,0,0)'> </td><td>Discouraged/preferred constraint violated</td><td></td></tr>");
out.println("<tr><td width=40 style='background-color:"+pref2color(PreferenceLevel.sStronglyDiscouraged)+";border:1px solid rgb(0,0,0)'> </td><td>Strongly discouraged/preferred constraint violated</i></td><td></td></tr>");
out.println("<tr><td width=40 style='background-color:"+pref2color(PreferenceLevel.sProhibited)+";border:1px solid rgb(0,0,0)'> </td><td>Required/prohibited constraint violated</i></td><td></td></tr>");
}
out.println("<tr><td colspan='2'>Free times:</td></tr>");
out.println("<tr><td width=40 style='background-color:"+sBgColorNotAvailable+";border:1px solid rgb(0,0,0)'> </td><td>Period not available</td><td></td></tr>");
if (iForm.getBgPreferences() && iForm.getBackground()==sBgPeriodPref) {
out.println("<tr><td width=40 style='background-color:"+pref2color(PreferenceLevel.sStronglyPreferred)+";border:1px solid rgb(0,0,0)'> </td><td>Strongly preferred period</td><td></td></tr>");
out.println("<tr><td width=40 style='background-color:"+pref2color(PreferenceLevel.sPreferred)+";border:1px solid rgb(0,0,0)'> </td><td>Preferred period</td><td></td></tr>");
}
out.println("<tr><td width=40 style='background-color:"+pref2color(PreferenceLevel.sNeutral)+";border:1px solid rgb(0,0,0)'> </td><td>No period preference</td><td></td></tr>");
if (iForm.getBgPreferences() && iForm.getBackground()==sBgPeriodPref) {
out.println("<tr><td width=40 style='background-color:"+pref2color(PreferenceLevel.sDiscouraged)+";border:1px solid rgb(0,0,0)'> </td><td>Discouraged period</td><td></td></tr>");
out.println("<tr><td width=40 style='background-color:"+pref2color(PreferenceLevel.sStronglyDiscouraged)+";border:1px solid rgb(0,0,0)'> </td><td>Strongly discouraged period</td><td></td></tr>");
out.println("<tr><td width=40 style='background-color:"+pref2color(PreferenceLevel.sProhibited)+";border:1px solid rgb(0,0,0)'> </td><td>Prohibited period</td><td></td></tr>");
}
}
public static String pref2color(String pref) {
if (pref==null) return null;
return PreferenceLevel.prolog2bgColor(pref);
}
public static String pref2color(int pref) {
return PreferenceLevel.prolog2bgColor(PreferenceLevel.int2prolog(pref));
}
public static String conflicts2color(int nrConflicts) {
if (nrConflicts>15) nrConflicts = 15;
String color = null;
if (nrConflicts==0) {
color = "rgb(240,240,240)";
} else if (nrConflicts<5) {
color = "rgb(240,"+(240-(30*nrConflicts/5))+","+(240-(180*nrConflicts/5))+")";
} else if (nrConflicts<10) {
color = "rgb(240,"+(210-(90*(nrConflicts-5)/5))+",60)";
} else {
color = "rgb("+(240-(20*(nrConflicts-10)/5))+","+(120-(70*(nrConflicts-10)/5))+","+(60-(20*(nrConflicts-10)/5))+")";
}
return color;
}
public static String lessConflicts2color(int nrConflicts) {
if (nrConflicts>6) nrConflicts = 6;
String color = null;
if (nrConflicts==0) {
color = "rgb(240,240,240)";
} else if (nrConflicts<2) {
color = "rgb(240,"+(240-(30*nrConflicts/2))+","+(240-(180*nrConflicts/2))+")";
} else if (nrConflicts<4) {
color = "rgb(240,"+(210-(90*(nrConflicts-2)/2))+",60)";
} else {
color = "rgb("+(240-(20*(nrConflicts-4)/2))+","+(120-(70*(nrConflicts-4)/2))+","+(60-(20*(nrConflicts-4)/2))+")";
}
return color;
}
public class ExamGridModel implements Comparable<ExamGridModel>{
private Long iId = null;
private String iName = null;
private int iSize = 0;
private int iNrAssignments = 0;
private Hashtable<ExamPeriod, Vector<ExamAssignmentInfo>> iAssignments = new Hashtable<ExamPeriod, Vector<ExamAssignmentInfo>>();
ExamGridModel(Long id, String name, int size, Collection<ExamAssignmentInfo> assignments) {
iId = id;
iName = name;
iSize = size;
for (Iterator i=assignments.iterator();i.hasNext();) {
ExamAssignmentInfo exam = (ExamAssignmentInfo)i.next();
Vector<ExamAssignmentInfo> a = iAssignments.get(exam.getPeriod());
if (a==null) {
a = new Vector<ExamAssignmentInfo>();
iAssignments.put(exam.getPeriod(), a);
}
a.add(exam); iNrAssignments++;
}
}
public void addAssignments(Collection<ExamAssignmentInfo> assignments) {
for (Iterator i=assignments.iterator();i.hasNext();) {
ExamAssignmentInfo exam = (ExamAssignmentInfo)i.next();
Vector<ExamAssignmentInfo> a = iAssignments.get(exam.getPeriodId());
if (a==null) {
a = new Vector<ExamAssignmentInfo>();
iAssignments.put(exam.getPeriod(), a);
}
a.add(exam); iNrAssignments++;
}
}
public Long getId() {
return iId;
}
public int getSize() {
if (iSize<0) return iNrAssignments;
return iSize;
}
public String getName() {
return iName;
}
public Vector<ExamAssignmentInfo> getAssignments(ExamPeriod period) {
if (period==null) return new Vector<ExamAssignmentInfo>();
Vector<ExamAssignmentInfo> ret = iAssignments.get(period);
return (ret==null?new Vector<ExamAssignmentInfo>():ret);
}
public ExamGridCell getAssignment(int day, int slot, int idx) {
return getAssignment(getPeriod(day, slot), idx);
}
public ExamGridCell getAssignment(ExamPeriod period, int idx) {
if (period==null) return null;
Vector<ExamAssignmentInfo> assignments = iAssignments.get(period);
if (assignments==null || assignments.size()<=idx) return null;
ExamAssignmentInfo info = assignments.elementAt(idx);
return info==null?null:new ExamGridCell(info);
}
public boolean isAvailable(ExamPeriod period) {
return period!=null && !PreferenceLevel.sProhibited.equals(period.getPrefLevel().getPrefProlog());
}
public String getBackground(ExamPeriod period) {
if (period==null) return null;
if (iForm.getBgPreferences() && iForm.getBackground()==sBgPeriodPref) {
if (period.getPrefLevel()!=null && !PreferenceLevel.sNeutral.equals(period.getPrefLevel().getPrefProlog()))
return pref2color(period.getPrefLevel().getPrefProlog());
}
return null;
}
public int compareTo(ExamGridModel model) {
switch (iForm.getOrder()) {
case sOrderByNameAsc :
return getName().compareTo(model.getName());
case sOrderByNameDesc :
return -getName().compareTo(model.getName());
case sOrderBySizeAsc:
return Double.compare(getSize(), model.getSize());
case sOrderBySizeDesc :
return -Double.compare(getSize(), model.getSize());
}
return getId().compareTo(model.getId());
}
public class ExamGridCell {
private ExamAssignmentInfo iInfo = null;
public ExamGridCell() {}
public ExamGridCell(ExamAssignmentInfo info) {
iInfo = info;
}
private ExamAssignmentInfo getInfo() {
return iInfo;
}
public String getBackground() {
switch (iForm.getBackground()) {
case sBgPeriodPref :
return pref2color(getInfo().getPeriodPref());
case sBgRoomPref :
if (iForm.getResource()==sResourceRoom)
return pref2color(getInfo().getRoomPref(ExamGridModel.this.getId()));
else
return pref2color(getInfo().getRoomPref());
case sBgDistPref :
return pref2color(getInfo().getDistributionPref());
case sBgStudentConfs :
if (getInfo().getNrDirectConflicts()>0)
return pref2color(PreferenceLevel.sProhibited);
if (getInfo().getNrMoreThanTwoConflicts()>0)
return pref2color(PreferenceLevel.sStronglyDiscouraged);
if (getInfo().getNrBackToBackConflicts()>0)
return pref2color(PreferenceLevel.sDiscouraged);
return pref2color(PreferenceLevel.sNeutral);
case sBgDirectStudentConfs :
return lessConflicts2color(getInfo().getNrDirectConflicts());
case sBgMoreThanTwoADayStudentConfs :
return conflicts2color(getInfo().getNrMoreThanTwoConflicts());
case sBgBackToBackStudentConfs :
return conflicts2color(getInfo().getNrBackToBackConflicts());
case sBgInstructorConfs :
if (getInfo().getNrInstructorDirectConflicts()>0)
return pref2color(PreferenceLevel.sProhibited);
if (getInfo().getNrInstructorMoreThanTwoConflicts()>0)
return pref2color(PreferenceLevel.sStronglyDiscouraged);
if (getInfo().getNrInstructorBackToBackConflicts()>0)
return pref2color(PreferenceLevel.sDiscouraged);
return pref2color(PreferenceLevel.sNeutral);
case sBgDirectInstructorConfs :
return lessConflicts2color(getInfo().getNrInstructorDirectConflicts());
case sBgMoreThanTwoADayInstructorConfs :
return conflicts2color(getInfo().getNrInstructorMoreThanTwoConflicts());
case sBgBackToBackInstructorConfs :
return conflicts2color(getInfo().getNrInstructorBackToBackConflicts());
}
return null;
}
public String getOnClick() {
return "showGwtDialog('Examination Assignment', 'examInfo.do?examId="+getInfo().getExamId()+"','900','90%');";
}
public String getId() {
String id = getInfo().getExamId().toString();
if (iForm.getResource()==sResourceRoom || iForm.getResource()==sResourceInstructor)
id += "."+ExamGridModel.this.getId();
return id;
}
public String getTitle() {
return getInfo().toString();
}
public String getName() {
return (iForm.getShowSections()?getInfo().getSectionName("<br>"):getInfo().getExamName());
}
public String getRoomName() {
return getInfo().getRoomsName(",");
}
public String getShortComment() {
int dc = getInfo().getNrDirectConflicts();
int m2d = getInfo().getNrMoreThanTwoConflicts();
int btb = getInfo().getNrBackToBackConflicts();
return
"<font color='"+(dc>0?PreferenceLevel.prolog2color("P"):"gray")+"'>"+dc+"</font>, "+
"<font color='"+(m2d>0?PreferenceLevel.prolog2color("2"):"gray")+"'>"+m2d+"</font>, "+
"<font color='"+(btb>0?PreferenceLevel.prolog2color("1"):"gray")+"'>"+btb+"</font>";
}
public String getShortCommentNoColors() {
int dc = getInfo().getNrDirectConflicts();
int m2d = getInfo().getNrMoreThanTwoConflicts();
int btb = getInfo().getNrBackToBackConflicts();
return dc+", "+m2d+", "+btb;
}
}
public class BlockGridCell extends ExamGridCell {
private TimeBlock iBlock = null;
public BlockGridCell(TimeBlock block) {
iBlock = block;
}
public String getBackground() {
return sBgColorNotAvailable;
}
public String getOnClick() {
return null;
}
public String getId() {
return null;
}
public String getTitle() {
return iBlock.getEventName()+" ("+iBlock.getEventType()+")";
}
public String getName() {
return iBlock.getEventName();
}
public String getRoomName() {
return iBlock.getEventType();
}
public String getShortComment() {
return "";
}
public String getShortCommentNoColors() {
return "";
}
}
}
public class RoomExamGridModel extends ExamGridModel {
private Hashtable iExamPrefs = new Hashtable();
private Collection<TimeBlock> iUnavailabilities = null;
RoomExamGridModel(Location location, Collection<ExamAssignmentInfo> assignments, Date[] bounds) {
super(location.getUniqueId(), location.getLabel(), location.getCapacity(), assignments);
iExamPrefs = location.getExamPreferences(iForm.getExamType());
if (RoomAvailability.getInstance()!=null) {
iUnavailabilities = RoomAvailability.getInstance().getRoomAvailability(
location.getUniqueId(),
bounds[0], bounds[1],
(ExamTypeDAO.getInstance().get(iForm.getExamType()).getType()==ExamType.sExamTypeFinal?RoomAvailabilityInterface.sFinalExamType:RoomAvailabilityInterface.sMidtermExamType));
}
}
public TimeBlock getBlock(ExamPeriod period) {
if (period==null || iUnavailabilities==null || iUnavailabilities.isEmpty()) return null;
for (TimeBlock block : iUnavailabilities)
if (period.overlap(block)) return block;
return null;
}
public PreferenceLevel getPreference(ExamPeriod period) {
return (iExamPrefs==null?null:(PreferenceLevel)iExamPrefs.get(period));
}
public boolean isAvailable(ExamPeriod period) {
if (!super.isAvailable(period)) return false;
if (getBlock(period)!=null) return false;
PreferenceLevel pref = getPreference(period);
return (pref==null || !PreferenceLevel.sProhibited.equals(pref.getPrefProlog()));
}
public String getBackground(ExamPeriod period) {
if (period==null) return null;
if (iForm.getBgPreferences() && iForm.getBackground()==sBgPeriodPref) {
PreferenceLevel pref = getPreference(period);
if (pref!=null && !PreferenceLevel.sNeutral.equals(pref.getPrefProlog()))
return pref2color(pref.getPrefProlog());
if (period.getPrefLevel()!=null && !PreferenceLevel.sNeutral.equals(period.getPrefLevel().getPrefProlog()))
return pref2color(period.getPrefLevel().getPrefProlog());
}
return null;
}
public ExamGridCell getAssignment(ExamPeriod period, int idx) {
ExamGridCell cell = super.getAssignment(period, idx);
if (cell!=null) return cell;
if (idx==getAssignments(period).size()) {
TimeBlock block = getBlock(period);
if (block!=null) return new BlockGridCell(block);
}
return null;
}
}
public Vector<ExamGridModel> models() {
return iModels;
}
public ExamGridForm getForm() {
return iForm;
}
}
| |
package aima.search.nqueens;
import java.util.ArrayList;
import aima.basic.XYLocation;
public class NQueensBoard {
/**
* X---> increases left to right with zero based index Y increases top to
* bottom with zero based index | | V
*/
int[][] board;
int size;
public NQueensBoard(int n) {
size = n;
board = new int[size][size];
for (int i = 0; i < size; i++) {
for (int j = 0; j < size; j++) {
board[i][j] = 0;
}
}
}
public void addQueenAt(XYLocation l) {
if (!(queenExistsAt(l)))
board[l.getXCoOrdinate()][l.getYCoOrdinate()] = 1;
}
public void removeQueenFrom(XYLocation l) {
if (board[l.getXCoOrdinate()][l.getYCoOrdinate()] == 1) {
board[l.getXCoOrdinate()][l.getYCoOrdinate()] = 0;
}
}
private boolean queenExistsAt(int x, int y) {
return (board[x][y] == 1);
}
public boolean queenExistsAt(XYLocation l) {
return (queenExistsAt(l.getXCoOrdinate(), l.getYCoOrdinate()));
}
public void moveQueen(XYLocation from, XYLocation to) {
if ((queenExistsAt(from)) && (!(queenExistsAt(to)))) {
removeQueenFrom(from);
addQueenAt(to);
}
}
public void clear() {
for (int i = 0; i < size; i++) {
for (int j = 0; j < size; j++) {
board[i][j] = 0;
}
}
}
public void setBoard(ArrayList al) {
clear();
for (int i = 0; i < al.size(); i++) {
addQueenAt((XYLocation) al.get(i));
}
}
public int getNumberOfQueensOnBoard() {
int count = 0;
for (int i = 0; i < size; i++) {
for (int j = 0; j < size; j++) {
if (board[i][j] == 1) {
count++;
}
}
}
return count;
}
public ArrayList getQueenPositions() {
ArrayList result = new ArrayList();
for (int i = 0; i < size; i++) {
for (int j = 0; j < size; j++) {
if (queenExistsAt(i, j)) {
result.add(new XYLocation(i, j));
}
}
}
return result;
}
private boolean isSquareHorizontallyAttacked(int x, int y) {
return numberOfHorizontalAttacksOn(x, y) > 0;
}
private boolean isSquareVerticallyAttacked(int x, int y) {
return numberOfVerticalAttacksOn(x, y) > 0;
}
private boolean isSquareDiagonallyAttacked(int x, int y) {
return numberOfDiagonalAttacksOn(x, y) > 0;
}
public boolean isSquareUnderAttack(XYLocation l) {
int x = l.getXCoOrdinate();
int y = l.getYCoOrdinate();
return (isSquareHorizontallyAttacked(x, y)
|| isSquareVerticallyAttacked(x, y) || isSquareDiagonallyAttacked(
x, y));
}
public int getSize() {
return size;
}
public void print() {
System.out.println(getBoardPic());
}
public String getBoardPic() {
StringBuffer buffer = new StringBuffer();
for (int i = 0; (i < size); i++) {
for (int j = 0; (j < size); j++) {
if (queenExistsAt(i, j)) {
buffer.append(" Q ");
} else {
buffer.append(" - ");
}
}
buffer.append("\n");
}
return buffer.toString();
}
public int getNumberOfAttacksOn(XYLocation l) {
int x = l.getXCoOrdinate();
int y = l.getYCoOrdinate();
return numberOfHorizontalAttacksOn(x, y)
+ numberOfVerticalAttacksOn(x, y)
+ numberOfDiagonalAttacksOn(x, y);
}
private int numberOfHorizontalAttacksOn(int x, int y) {
int retVal = 0;
for (int i = 0; i < size; i++) {
if ((queenExistsAt(i, y))) {
if (i != x)
retVal++;
}
}
return retVal;
}
private int numberOfVerticalAttacksOn(int x, int y) {
int retVal = 0;
for (int j = 0; j < size; j++) {
if ((queenExistsAt(x, j))) {
if (j != y)
retVal++;
}
}
return retVal;
}
private int numberOfDiagonalAttacksOn(int x, int y) {
int retVal = 0;
int i;
int j;
//forward up diagonal
for (i = (x + 1), j = (y - 1); (i < size && (j > -1)); i++, j--) {
if (queenExistsAt(i, j)) {
retVal++;
}
}
//forward down diagonal
for (i = (x + 1), j = (y + 1); ((i < size) && (j < size)); i++, j++) {
if (queenExistsAt(i, j)) {
retVal++;
}
}
//backward up diagonal
for (i = (x - 1), j = (y - 1); ((i > -1) && (j > -1)); i--, j--) {
if (queenExistsAt(i, j)) {
retVal++;
}
}
//backward down diagonal
for (i = (x - 1), j = (y + 1); ((i > -1) && (j < size)); i--, j++) {
if (queenExistsAt(i, j)) {
retVal++;
}
}
return retVal;
}
public int hashCode() {
return 0;
}
public boolean equals(Object o) {
NQueensBoard aBoard = (NQueensBoard) o;
boolean retVal = true;
ArrayList locs = getQueenPositions();
for (int i = 0; i < locs.size(); i++) {
if (!(aBoard.queenExistsAt((XYLocation) locs.get(i)))) {
retVal = false;
}
}
return retVal;
}
public String toString() {
StringBuffer buf = new StringBuffer();
for (int i = 0; i < size; i++) { //rows
for (int j = 0; j < size; j++) { //columns
if (queenExistsAt(i, j)) {
buf.append('Q');
} else {
buf.append('-');
}
}
buf.append("\n");
}
return buf.toString();
}
}
| |
/*
* Copyright 2015, The Querydsl Team (http://www.querydsl.com/team)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.querydsl.core;
import java.util.Collection;
import java.util.Collections;
import java.util.HashSet;
import com.google.common.collect.ImmutableList;
import com.querydsl.core.types.CollectionExpression;
import com.querydsl.core.types.Constant;
import com.querydsl.core.types.Expression;
import com.querydsl.core.types.MapExpression;
import com.querydsl.core.types.dsl.*;
/**
* @author tiwe
*
*/
public class ProjectionsFactory {
private final Module module;
private final Target target;
public ProjectionsFactory(Module module, Target target) {
this.module = module;
this.target = target;
}
public <A> Collection<Expression<?>> array(ArrayExpression<A[], A> expr, ArrayExpression<A[], A> other, A knownElement) {
HashSet<Expression<?>> rv = new HashSet<Expression<?>>();
if (!module.equals(Module.RDFBEAN)) {
rv.add(expr.size());
}
return ImmutableList.copyOf(rv);
}
public <A> Collection<Expression<?>> collection(CollectionExpressionBase<?,A> expr, CollectionExpression<?,A> other, A knownElement) {
HashSet<Expression<?>> rv = new HashSet<Expression<?>>();
if (!module.equals(Module.RDFBEAN)) {
rv.add(expr.size());
}
return ImmutableList.copyOf(rv);
}
@SuppressWarnings("unchecked")
public <A extends Comparable> Collection<Expression<?>> date(DateExpression<A> expr, DateExpression<A> other, A knownValue) {
HashSet<Expression<?>> rv = new HashSet<Expression<?>>();
rv.add(expr.dayOfMonth());
rv.add(expr.month());
rv.add(expr.year());
rv.add(expr.yearMonth());
if (module != Module.COLLECTIONS && module != Module.RDFBEAN) {
rv.add(expr.min());
rv.add(expr.max());
}
return ImmutableList.copyOf(rv);
}
@SuppressWarnings("unchecked")
public <A extends Comparable> Collection<Expression<?>> dateTime(DateTimeExpression<A> expr, DateTimeExpression<A> other, A knownValue) {
HashSet<Expression<?>> rv = new HashSet<Expression<?>>();
rv.add(expr.dayOfMonth());
rv.add(expr.month());
rv.add(expr.year());
rv.add(expr.yearMonth());
rv.add(expr.hour());
rv.add(expr.minute());
rv.add(expr.second());
if (module != Module.COLLECTIONS && module != Module.RDFBEAN) {
rv.add(expr.min());
rv.add(expr.max());
}
return ImmutableList.copyOf(rv);
}
public <A,Q extends SimpleExpression<A>> Collection<Expression<?>> list(ListPath<A,Q> expr, ListExpression<A,Q> other, A knownElement) {
HashSet<Expression<?>> rv = new HashSet<Expression<?>>();
rv.add(expr.get(0));
if (!module.equals(Module.RDFBEAN)) {
rv.add(expr.size());
}
return ImmutableList.copyOf(rv);
}
public <K,V> Collection<Expression<?>> map(MapExpressionBase<K,V,?> expr, MapExpression<K,V> other, K knownKey, V knownValue) {
HashSet<Expression<?>> rv = new HashSet<Expression<?>>();
rv.add(expr.get(knownKey));
if (!module.equals(Module.RDFBEAN)) {
rv.add(expr.size());
}
return ImmutableList.copyOf(rv);
}
public <A extends Number & Comparable<A>> Collection<NumberExpression<?>> numeric(NumberExpression<A> expr, NumberExpression<A> other, A knownValue, boolean forFilter) {
HashSet<NumberExpression<?>> rv = new HashSet<NumberExpression<?>>();
rv.addAll(numeric(expr, other, forFilter));
rv.addAll(numeric(expr, NumberConstant.create(knownValue), forFilter));
return ImmutableList.copyOf(rv);
}
@SuppressWarnings("unchecked")
private <A extends Number & Comparable<A>> Collection<NumberExpression<?>> numeric(NumberExpression<A> expr, NumberExpression<?> other, boolean forFilter) {
HashSet<NumberExpression<?>> rv = new HashSet<NumberExpression<?>>();
rv.add(expr.abs());
rv.add(expr.add(other));
rv.add(expr.divide(other));
if (target != Target.HSQLDB) {
rv.add(expr.negate());
}
rv.add(expr.multiply(other));
rv.add(expr.sqrt());
rv.add(expr.subtract(other));
if (!forFilter && module != Module.COLLECTIONS && module != Module.RDFBEAN) {
rv.add(expr.min());
rv.add(expr.max());
rv.add(expr.avg());
rv.add(expr.count());
rv.add(expr.countDistinct());
}
if (!(other instanceof Constant<?> || module == Module.JDO || module == Module.RDFBEAN)) {
CaseBuilder cases = new CaseBuilder();
rv.add(NumberConstant.create(1).add(cases
.when(expr.gt(10)).then(expr)
.when(expr.between(0, 10)).then((NumberExpression<A>) other)
.otherwise((NumberExpression<A>) other)));
rv.add(expr
.when((NumberExpression<A>) other).then(expr)
.otherwise((NumberExpression<A>) other));
}
return ImmutableList.copyOf(rv);
}
public <A extends Number & Comparable<A>> Collection<NumberExpression<?>> numericCasts(NumberExpression<A> expr, NumberExpression<A> other, A knownValue) {
if (!target.equals(Target.MYSQL)) {
HashSet<NumberExpression<?>> rv = new HashSet<NumberExpression<?>>();
rv.add(expr.byteValue());
rv.add(expr.doubleValue());
rv.add(expr.floatValue());
rv.add(expr.intValue());
rv.add(expr.longValue());
rv.add(expr.shortValue());
return ImmutableList.copyOf(rv);
} else {
return Collections.emptySet();
}
}
public Collection<SimpleExpression<String>> string(StringExpression expr, StringExpression other, String knownValue) {
HashSet<SimpleExpression<String>> rv = new HashSet<SimpleExpression<String>>();
rv.addAll(stringProjections(expr, other));
rv.addAll(stringProjections(expr, StringConstant.create(knownValue)));
return rv;
}
@SuppressWarnings("unchecked")
public Collection<SimpleExpression<String>> stringProjections(StringExpression expr, StringExpression other) {
HashSet<SimpleExpression<String>> rv = new HashSet<SimpleExpression<String>>();
rv.add(expr.append("Hello"));
rv.add(expr.append(other));
rv.add(expr.concat(other));
rv.add(expr.concat("Hello"));
rv.add(expr.lower());
rv.add(expr.prepend("Hello"));
rv.add(expr.prepend(other));
rv.add(expr.stringValue());
rv.add(expr.substring(1));
rv.add(expr.substring(0, 1));
if (!(other instanceof Constant<?> || module == Module.JDO || module == Module.RDFBEAN)) {
CaseBuilder cases = new CaseBuilder();
rv.add(cases.when(expr.eq("A")).then(other)
.when(expr.eq("B")).then(expr)
.otherwise(other));
rv.add(expr.when("A").then(other)
.when("B").then(expr)
.otherwise(other));
}
rv.add(expr.trim());
rv.add(expr.upper());
if (module != Module.JDO) {
rv.add(expr.nullif("xxx"));
}
return ImmutableList.copyOf(rv);
}
@SuppressWarnings("unchecked")
public <A extends Comparable> Collection<Expression<?>> time(TimeExpression<A> expr, TimeExpression<A> other, A knownValue) {
HashSet<Expression<?>> rv = new HashSet<Expression<?>>();
rv.add(expr.hour());
rv.add(expr.minute());
rv.add(expr.second());
return ImmutableList.copyOf(rv);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.cassandra.db;
import org.junit.BeforeClass;
import org.junit.Test;
import org.apache.cassandra.Util;
import org.apache.cassandra.config.ColumnDefinition;
import org.apache.cassandra.SchemaLoader;
import org.apache.cassandra.db.rows.Row;
import org.apache.cassandra.db.context.CounterContext;
import org.apache.cassandra.exceptions.ConfigurationException;
import org.apache.cassandra.exceptions.WriteTimeoutException;
import org.apache.cassandra.schema.KeyspaceParams;
import org.apache.cassandra.utils.ByteBufferUtil;
import static org.junit.Assert.assertEquals;
public class CounterMutationTest
{
private static final String KEYSPACE1 = "CounterMutationTest";
private static final String CF1 = "Counter1";
private static final String CF2 = "Counter2";
@BeforeClass
public static void defineSchema() throws ConfigurationException
{
SchemaLoader.prepareServer();
SchemaLoader.createKeyspace(KEYSPACE1,
KeyspaceParams.simple(1),
SchemaLoader.counterCFMD(KEYSPACE1, CF1),
SchemaLoader.counterCFMD(KEYSPACE1, CF2));
}
@Test
public void testSingleCell() throws WriteTimeoutException
{
ColumnFamilyStore cfs = Keyspace.open(KEYSPACE1).getColumnFamilyStore(CF1);
cfs.truncateBlocking();
ColumnDefinition cDef = cfs.metadata.getColumnDefinition(ByteBufferUtil.bytes("val"));
// Do the initial update (+1)
addAndCheck(cfs, 1, 1);
// Make another increment (+2)
addAndCheck(cfs, 2, 3);
// Decrement to 0 (-3)
addAndCheck(cfs, -3, 0);
}
private void addAndCheck(ColumnFamilyStore cfs, long toAdd, long expected)
{
ColumnDefinition cDef = cfs.metadata.getColumnDefinition(ByteBufferUtil.bytes("val"));
Mutation m = new RowUpdateBuilder(cfs.metadata, 5, "key1").clustering("cc").add("val", toAdd).build();
new CounterMutation(m, ConsistencyLevel.ONE).apply();
Row row = Util.getOnlyRow(Util.cmd(cfs).includeRow("cc").columns("val").build());
assertEquals(expected, CounterContext.instance().total(row.getCell(cDef).value()));
}
@Test
public void testTwoCells() throws WriteTimeoutException
{
ColumnFamilyStore cfs = Keyspace.open(KEYSPACE1).getColumnFamilyStore(CF1);
cfs.truncateBlocking();
// Do the initial update (+1, -1)
addTwoAndCheck(cfs, 1L, 1L, -1L, -1L);
// Make another increment (+2, -2)
addTwoAndCheck(cfs, 2L, 3L, -2L, -3L);
// Decrement to 0 (-3, +3)
addTwoAndCheck(cfs, -3L, 0L, 3L, 0L);
}
private void addTwoAndCheck(ColumnFamilyStore cfs, long addOne, long expectedOne, long addTwo, long expectedTwo)
{
ColumnDefinition cDefOne = cfs.metadata.getColumnDefinition(ByteBufferUtil.bytes("val"));
ColumnDefinition cDefTwo = cfs.metadata.getColumnDefinition(ByteBufferUtil.bytes("val2"));
Mutation m = new RowUpdateBuilder(cfs.metadata, 5, "key1")
.clustering("cc")
.add("val", addOne)
.add("val2", addTwo)
.build();
new CounterMutation(m, ConsistencyLevel.ONE).apply();
Row row = Util.getOnlyRow(Util.cmd(cfs).includeRow("cc").columns("val", "val2").build());
assertEquals(expectedOne, CounterContext.instance().total(row.getCell(cDefOne).value()));
assertEquals(expectedTwo, CounterContext.instance().total(row.getCell(cDefTwo).value()));
}
@Test
public void testBatch() throws WriteTimeoutException
{
ColumnFamilyStore cfsOne = Keyspace.open(KEYSPACE1).getColumnFamilyStore(CF1);
ColumnFamilyStore cfsTwo = Keyspace.open(KEYSPACE1).getColumnFamilyStore(CF2);
cfsOne.truncateBlocking();
cfsTwo.truncateBlocking();
// Do the update (+1, -1), (+2, -2)
Mutation batch = new Mutation(KEYSPACE1, Util.dk("key1"));
batch.add(new RowUpdateBuilder(cfsOne.metadata, 5, "key1")
.clustering("cc")
.add("val", 1L)
.add("val2", -1L)
.build().get(cfsOne.metadata));
batch.add(new RowUpdateBuilder(cfsTwo.metadata, 5, "key1")
.clustering("cc")
.add("val", 2L)
.add("val2", -2L)
.build().get(cfsTwo.metadata));
new CounterMutation(batch, ConsistencyLevel.ONE).apply();
ColumnDefinition c1cfs1 = cfsOne.metadata.getColumnDefinition(ByteBufferUtil.bytes("val"));
ColumnDefinition c2cfs1 = cfsOne.metadata.getColumnDefinition(ByteBufferUtil.bytes("val2"));
Row row = Util.getOnlyRow(Util.cmd(cfsOne).includeRow("cc").columns("val", "val2").build());
assertEquals(1L, CounterContext.instance().total(row.getCell(c1cfs1).value()));
assertEquals(-1L, CounterContext.instance().total(row.getCell(c2cfs1).value()));
ColumnDefinition c1cfs2 = cfsTwo.metadata.getColumnDefinition(ByteBufferUtil.bytes("val"));
ColumnDefinition c2cfs2 = cfsTwo.metadata.getColumnDefinition(ByteBufferUtil.bytes("val2"));
row = Util.getOnlyRow(Util.cmd(cfsTwo).includeRow("cc").columns("val", "val2").build());
assertEquals(2L, CounterContext.instance().total(row.getCell(c1cfs2).value()));
assertEquals(-2L, CounterContext.instance().total(row.getCell(c2cfs2).value()));
// Check the caches, separately
CBuilder cb = CBuilder.create(cfsOne.metadata.comparator);
cb.add("cc");
assertEquals(ClockAndCount.create(1L, 1L), cfsOne.getCachedCounter(Util.dk("key1").getKey(), cb.build(), c1cfs1, null));
assertEquals(ClockAndCount.create(1L, -1L), cfsOne.getCachedCounter(Util.dk("key1").getKey(), cb.build(), c2cfs1, null));
assertEquals(ClockAndCount.create(1L, 2L), cfsTwo.getCachedCounter(Util.dk("key1").getKey(), cb.build(), c1cfs2, null));
assertEquals(ClockAndCount.create(1L, -2L), cfsTwo.getCachedCounter(Util.dk("key1").getKey(), cb.build(), c2cfs2, null));
}
@Test
public void testDeletes() throws WriteTimeoutException
{
ColumnFamilyStore cfs = Keyspace.open(KEYSPACE1).getColumnFamilyStore(CF1);
cfs.truncateBlocking();
ColumnDefinition cOne = cfs.metadata.getColumnDefinition(ByteBufferUtil.bytes("val"));
ColumnDefinition cTwo = cfs.metadata.getColumnDefinition(ByteBufferUtil.bytes("val2"));
// Do the initial update (+1, -1)
new CounterMutation(
new RowUpdateBuilder(cfs.metadata, 5, "key1")
.clustering("cc")
.add("val", 1L)
.add("val2", -1L)
.build(),
ConsistencyLevel.ONE).apply();
Row row = Util.getOnlyRow(Util.cmd(cfs).includeRow("cc").columns("val", "val2").build());
assertEquals(1L, CounterContext.instance().total(row.getCell(cOne).value()));
assertEquals(-1L, CounterContext.instance().total(row.getCell(cTwo).value()));
// Remove the first counter, increment the second counter
new CounterMutation(
new RowUpdateBuilder(cfs.metadata, 5, "key1")
.clustering("cc")
.delete(cOne)
.add("val2", -5L)
.build(),
ConsistencyLevel.ONE).apply();
row = Util.getOnlyRow(Util.cmd(cfs).includeRow("cc").columns("val", "val2").build());
assertEquals(null, row.getCell(cOne));
assertEquals(-6L, CounterContext.instance().total(row.getCell(cTwo).value()));
// Increment the first counter, make sure it's still shadowed by the tombstone
new CounterMutation(
new RowUpdateBuilder(cfs.metadata, 5, "key1")
.clustering("cc")
.add("val", 1L)
.build(),
ConsistencyLevel.ONE).apply();
row = Util.getOnlyRow(Util.cmd(cfs).includeRow("cc").columns("val", "val2").build());
assertEquals(null, row.getCell(cOne));
// Get rid of the complete partition
RowUpdateBuilder.deleteRow(cfs.metadata, 6, "key1", "cc").applyUnsafe();
Util.assertEmpty(Util.cmd(cfs).includeRow("cc").columns("val", "val2").build());
// Increment both counters, ensure that both stay dead
new CounterMutation(
new RowUpdateBuilder(cfs.metadata, 6, "key1")
.clustering("cc")
.add("val", 1L)
.add("val2", 1L)
.build(),
ConsistencyLevel.ONE).apply();
Util.assertEmpty(Util.cmd(cfs).includeRow("cc").columns("val", "val2").build());
}
}
| |
/**
* Copyright 2006-2015 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.mybatis.generator.codegen.mybatis3;
import java.util.ArrayList;
import java.util.List;
import org.mybatis.generator.api.GeneratedJavaFile;
import org.mybatis.generator.api.GeneratedXmlFile;
import org.mybatis.generator.api.IntrospectedTable;
import org.mybatis.generator.api.ProgressCallback;
import org.mybatis.generator.api.dom.java.CompilationUnit;
import org.mybatis.generator.api.dom.xml.Document;
import org.mybatis.generator.codegen.AbstractGenerator;
import org.mybatis.generator.codegen.AbstractJavaClientGenerator;
import org.mybatis.generator.codegen.AbstractJavaGenerator;
import org.mybatis.generator.codegen.AbstractXmlGenerator;
import org.mybatis.generator.codegen.mybatis3.javamapper.AnnotatedClientGenerator;
import org.mybatis.generator.codegen.mybatis3.javamapper.JavaMapperGenerator;
import org.mybatis.generator.codegen.mybatis3.javamapper.MixedClientGenerator;
import org.mybatis.generator.codegen.mybatis3.model.BaseRecordGenerator;
import org.mybatis.generator.codegen.mybatis3.model.ExampleGenerator;
import org.mybatis.generator.codegen.mybatis3.model.PrimaryKeyGenerator;
import org.mybatis.generator.codegen.mybatis3.model.RecordWithBLOBsGenerator;
import org.mybatis.generator.codegen.mybatis3.xmlmapper.XMLMapperGenerator;
import org.mybatis.generator.config.PropertyRegistry;
import org.mybatis.generator.internal.ObjectFactory;
/**
* The Class IntrospectedTableMyBatis3Impl.
*
* @author Jeff Butler
*/
public class IntrospectedTableMyBatis3Impl extends IntrospectedTable {
/** The java model generators. */
protected List<AbstractJavaGenerator> javaModelGenerators;
/** The client generators. */
protected List<AbstractJavaGenerator> clientGenerators;
/** The xml mapper generator. */
protected AbstractXmlGenerator xmlMapperGenerator;
/**
* Instantiates a new introspected table my batis3 impl.
*/
public IntrospectedTableMyBatis3Impl() {
super(TargetRuntime.MYBATIS3);
javaModelGenerators = new ArrayList<AbstractJavaGenerator>();
clientGenerators = new ArrayList<AbstractJavaGenerator>();
}
/* (non-Javadoc)
* @see org.mybatis.generator.api.IntrospectedTable#calculateGenerators(java.util.List, org.mybatis.generator.api.ProgressCallback)
*/
@Override
public void calculateGenerators(List<String> warnings,
ProgressCallback progressCallback) {
calculateJavaModelGenerators(warnings, progressCallback);
AbstractJavaClientGenerator javaClientGenerator =
calculateClientGenerators(warnings, progressCallback);
calculateXmlMapperGenerator(javaClientGenerator, warnings, progressCallback);
}
/**
* Calculate xml mapper generator.
*
* @param javaClientGenerator
* the java client generator
* @param warnings
* the warnings
* @param progressCallback
* the progress callback
*/
protected void calculateXmlMapperGenerator(AbstractJavaClientGenerator javaClientGenerator,
List<String> warnings,
ProgressCallback progressCallback) {
if (javaClientGenerator == null) {
if (context.getSqlMapGeneratorConfiguration() != null) {
xmlMapperGenerator = new XMLMapperGenerator();
}
} else {
xmlMapperGenerator = javaClientGenerator.getMatchedXMLGenerator();
}
initializeAbstractGenerator(xmlMapperGenerator, warnings,
progressCallback);
}
/**
* Calculate client generators.
*
* @param warnings
* the warnings
* @param progressCallback
* the progress callback
* @return true if an XML generator is required
*/
protected AbstractJavaClientGenerator calculateClientGenerators(List<String> warnings,
ProgressCallback progressCallback) {
if (!rules.generateJavaClient()) {
return null;
}
AbstractJavaClientGenerator javaGenerator = createJavaClientGenerator();
if (javaGenerator == null) {
return null;
}
initializeAbstractGenerator(javaGenerator, warnings, progressCallback);
clientGenerators.add(javaGenerator);
return javaGenerator;
}
/**
* Creates the java client generator.
*
* @return the abstract java client generator
*/
protected AbstractJavaClientGenerator createJavaClientGenerator() {
if (context.getJavaClientGeneratorConfiguration() == null) {
return null;
}
String type = context.getJavaClientGeneratorConfiguration()
.getConfigurationType();
AbstractJavaClientGenerator javaGenerator;
if ("XMLMAPPER".equalsIgnoreCase(type)) { //$NON-NLS-1$
javaGenerator = new JavaMapperGenerator();
} else if ("MIXEDMAPPER".equalsIgnoreCase(type)) { //$NON-NLS-1$
javaGenerator = new MixedClientGenerator();
} else if ("ANNOTATEDMAPPER".equalsIgnoreCase(type)) { //$NON-NLS-1$
javaGenerator = new AnnotatedClientGenerator();
} else if ("MAPPER".equalsIgnoreCase(type)) { //$NON-NLS-1$
javaGenerator = new JavaMapperGenerator();
} else {
javaGenerator = (AbstractJavaClientGenerator) ObjectFactory
.createInternalObject(type);
}
return javaGenerator;
}
/**
* Calculate java model generators.
*
* @param warnings
* the warnings
* @param progressCallback
* the progress callback
*/
protected void calculateJavaModelGenerators(List<String> warnings,
ProgressCallback progressCallback) {
if (getRules().generateExampleClass()) {
AbstractJavaGenerator javaGenerator = new ExampleGenerator();
initializeAbstractGenerator(javaGenerator, warnings,
progressCallback);
javaModelGenerators.add(javaGenerator);
}
if (getRules().generatePrimaryKeyClass()) {
AbstractJavaGenerator javaGenerator = new PrimaryKeyGenerator();
initializeAbstractGenerator(javaGenerator, warnings,
progressCallback);
javaModelGenerators.add(javaGenerator);
}
if (getRules().generateBaseRecordClass()) {
AbstractJavaGenerator javaGenerator = new BaseRecordGenerator();
initializeAbstractGenerator(javaGenerator, warnings,
progressCallback);
javaModelGenerators.add(javaGenerator);
}
if (getRules().generateRecordWithBLOBsClass()) {
AbstractJavaGenerator javaGenerator = new RecordWithBLOBsGenerator();
initializeAbstractGenerator(javaGenerator, warnings,
progressCallback);
javaModelGenerators.add(javaGenerator);
}
}
/**
* Initialize abstract generator.
*
* @param abstractGenerator
* the abstract generator
* @param warnings
* the warnings
* @param progressCallback
* the progress callback
*/
protected void initializeAbstractGenerator(
AbstractGenerator abstractGenerator, List<String> warnings,
ProgressCallback progressCallback) {
if (abstractGenerator == null) {
return;
}
abstractGenerator.setContext(context);
abstractGenerator.setIntrospectedTable(this);
abstractGenerator.setProgressCallback(progressCallback);
abstractGenerator.setWarnings(warnings);
}
/* (non-Javadoc)
* @see org.mybatis.generator.api.IntrospectedTable#getGeneratedJavaFiles()
*/
@Override
public List<GeneratedJavaFile> getGeneratedJavaFiles() {
List<GeneratedJavaFile> answer = new ArrayList<GeneratedJavaFile>();
for (AbstractJavaGenerator javaGenerator : javaModelGenerators) {
List<CompilationUnit> compilationUnits = javaGenerator
.getCompilationUnits();
for (CompilationUnit compilationUnit : compilationUnits) {
GeneratedJavaFile gjf = new GeneratedJavaFile(compilationUnit,
context.getJavaModelGeneratorConfiguration()
.getTargetProject(),
context.getProperty(PropertyRegistry.CONTEXT_JAVA_FILE_ENCODING),
context.getJavaFormatter());
answer.add(gjf);
}
}
for (AbstractJavaGenerator javaGenerator : clientGenerators) {
List<CompilationUnit> compilationUnits = javaGenerator
.getCompilationUnits();
for (CompilationUnit compilationUnit : compilationUnits) {
GeneratedJavaFile gjf = new GeneratedJavaFile(compilationUnit,
context.getJavaClientGeneratorConfiguration()
.getTargetProject(),
context.getProperty(PropertyRegistry.CONTEXT_JAVA_FILE_ENCODING),
context.getJavaFormatter());
answer.add(gjf);
}
}
return answer;
}
/* (non-Javadoc)
* @see org.mybatis.generator.api.IntrospectedTable#getGeneratedXmlFiles()
*/
@Override
public List<GeneratedXmlFile> getGeneratedXmlFiles() {
List<GeneratedXmlFile> answer = new ArrayList<GeneratedXmlFile>();
if (xmlMapperGenerator != null) {
Document document = xmlMapperGenerator.getDocument();
GeneratedXmlFile gxf = new GeneratedXmlFile(document,
getMyBatis3XmlMapperFileName(), getMyBatis3XmlMapperPackage(),
context.getSqlMapGeneratorConfiguration().getTargetProject(),
true, context.getXmlFormatter());
if (context.getPlugins().sqlMapGenerated(gxf, this)) {
answer.add(gxf);
}
}
return answer;
}
/* (non-Javadoc)
* @see org.mybatis.generator.api.IntrospectedTable#getGenerationSteps()
*/
@Override
public int getGenerationSteps() {
return javaModelGenerators.size() + clientGenerators.size() +
(xmlMapperGenerator == null ? 0 : 1);
}
/* (non-Javadoc)
* @see org.mybatis.generator.api.IntrospectedTable#isJava5Targeted()
*/
@Override
public boolean isJava5Targeted() {
return true;
}
/* (non-Javadoc)
* @see org.mybatis.generator.api.IntrospectedTable#requiresXMLGenerator()
*/
@Override
public boolean requiresXMLGenerator() {
AbstractJavaClientGenerator javaClientGenerator =
createJavaClientGenerator();
if (javaClientGenerator == null) {
return false;
} else {
return javaClientGenerator.requiresXMLGenerator();
}
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.test.ingest;
import org.apache.lucene.util.SetOnce;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.action.DocWriteRequest;
import org.elasticsearch.action.bulk.BulkRequest;
import org.elasticsearch.action.delete.DeleteRequest;
import org.elasticsearch.action.index.IndexRequest;
import org.elasticsearch.action.update.UpdateRequest;
import org.elasticsearch.client.Requests;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.util.concurrent.EsExecutors;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.VersionType;
import org.elasticsearch.ingest.AbstractProcessor;
import org.elasticsearch.ingest.CompoundProcessor;
import org.elasticsearch.ingest.IngestDocument;
import org.elasticsearch.ingest.IngestMetadata;
import org.elasticsearch.ingest.IngestStats;
import org.elasticsearch.ingest.Pipeline;
import org.elasticsearch.ingest.PipelineConfiguration;
import org.elasticsearch.ingest.PipelineExecutionService;
import org.elasticsearch.ingest.PipelineStore;
import org.elasticsearch.ingest.Processor;
import org.elasticsearch.testframework.ESTestCase;
import org.elasticsearch.threadpool.ThreadPool;
import org.junit.Before;
import org.mockito.ArgumentMatcher;
import org.mockito.invocation.InvocationOnMock;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.concurrent.ExecutorService;
import java.util.function.BiConsumer;
import java.util.function.Consumer;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.hasKey;
import static org.hamcrest.Matchers.instanceOf;
import static org.hamcrest.Matchers.not;
import static org.hamcrest.Matchers.sameInstance;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.anyString;
import static org.mockito.ArgumentMatchers.argThat;
import static org.mockito.ArgumentMatchers.eq;
import static org.mockito.Mockito.doAnswer;
import static org.mockito.Mockito.doThrow;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.never;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
public class PipelineExecutionServiceTests extends ESTestCase {
private final Integer version = randomBoolean() ? randomInt() : null;
private PipelineStore store;
private PipelineExecutionService executionService;
@Before
public void setup() {
store = mock(PipelineStore.class);
ThreadPool threadPool = mock(ThreadPool.class);
final ExecutorService executorService = EsExecutors.newDirectExecutorService();
when(threadPool.executor(anyString())).thenReturn(executorService);
executionService = new PipelineExecutionService(store, threadPool);
}
public void testExecuteIndexPipelineDoesNotExist() {
final IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id").source(Collections.emptyMap()).setPipeline("_id");
final SetOnce<Boolean> failure = new SetOnce<>();
final BiConsumer<IndexRequest, Exception> failureHandler = (request, e) -> {
failure.set(true);
assertThat(request, sameInstance(indexRequest));
assertThat(e, instanceOf(IllegalArgumentException.class));
assertThat(e.getMessage(), equalTo("pipeline with id [_id] does not exist"));
};
@SuppressWarnings("unchecked")
final Consumer<Exception> completionHandler = mock(Consumer.class);
executionService.executeBulkRequest(Collections.singletonList(indexRequest), failureHandler, completionHandler);
assertTrue(failure.get());
verify(completionHandler, times(1)).accept(null);
}
public void testExecuteIndexPipelineExistsButFailedParsing() {
when(store.get("_id")).thenReturn(new Pipeline("_id", "stub", null,
new CompoundProcessor(new AbstractProcessor("mock") {
@Override
public void execute(IngestDocument ingestDocument) {
throw new IllegalStateException("error");
}
@Override
public String getType() {
return null;
}
})));
final SetOnce<Boolean> failure = new SetOnce<>();
final IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id").source(Collections.emptyMap()).setPipeline("_id");
final BiConsumer<IndexRequest, Exception> failureHandler = (request, e) -> {
assertThat(e.getCause(), instanceOf(IllegalArgumentException.class));
assertThat(e.getCause().getCause(), instanceOf(IllegalStateException.class));
assertThat(e.getCause().getCause().getMessage(), equalTo("error"));
failure.set(true);
};
@SuppressWarnings("unchecked")
final Consumer<Exception> completionHandler = mock(Consumer.class);
executionService.executeBulkRequest(Collections.singletonList(indexRequest), failureHandler, completionHandler);
assertTrue(failure.get());
verify(completionHandler, times(1)).accept(null);
}
public void testExecuteBulkPipelineDoesNotExist() {
CompoundProcessor processor = mock(CompoundProcessor.class);
when(store.get("_id")).thenReturn(new Pipeline("_id", "_description", version, processor));
BulkRequest bulkRequest = new BulkRequest();
IndexRequest indexRequest1 = new IndexRequest("_index", "_type", "_id").source(Collections.emptyMap()).setPipeline("_id");
bulkRequest.add(indexRequest1);
IndexRequest indexRequest2 =
new IndexRequest("_index", "_type", "_id").source(Collections.emptyMap()).setPipeline("does_not_exist");
bulkRequest.add(indexRequest2);
@SuppressWarnings("unchecked")
BiConsumer<IndexRequest, Exception> failureHandler = mock(BiConsumer.class);
@SuppressWarnings("unchecked")
Consumer<Exception> completionHandler = mock(Consumer.class);
executionService.executeBulkRequest(bulkRequest.requests(), failureHandler, completionHandler);
verify(failureHandler, times(1)).accept(
argThat(item -> item == indexRequest2),
argThat((ArgumentMatcher<IllegalArgumentException>) iae ->
"pipeline with id [does_not_exist] does not exist".equals(iae.getMessage()))
);
verify(completionHandler, times(1)).accept(null);
}
public void testExecuteSuccess() {
final CompoundProcessor processor = mock(CompoundProcessor.class);
when(store.get("_id")).thenReturn(new Pipeline("_id", "_description", version, processor));
final IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id").source(Collections.emptyMap()).setPipeline("_id");
@SuppressWarnings("unchecked")
final BiConsumer<IndexRequest, Exception> failureHandler = mock(BiConsumer.class);
@SuppressWarnings("unchecked")
final Consumer<Exception> completionHandler = mock(Consumer.class);
executionService.executeBulkRequest(Collections.singletonList(indexRequest), failureHandler, completionHandler);
verify(failureHandler, never()).accept(any(), any());
verify(completionHandler, times(1)).accept(null);
}
public void testExecuteEmptyPipeline() throws Exception {
final CompoundProcessor processor = mock(CompoundProcessor.class);
when(store.get("_id")).thenReturn(new Pipeline("_id", "_description", version, processor));
when(processor.getProcessors()).thenReturn(Collections.emptyList());
final IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id").source(Collections.emptyMap()).setPipeline("_id");
@SuppressWarnings("unchecked")
final BiConsumer<IndexRequest, Exception> failureHandler = mock(BiConsumer.class);
@SuppressWarnings("unchecked")
final Consumer<Exception> completionHandler = mock(Consumer.class);
executionService.executeBulkRequest(Collections.singletonList(indexRequest), failureHandler, completionHandler);
verify(processor, never()).execute(any());
verify(failureHandler, never()).accept(any(), any());
verify(completionHandler, times(1)).accept(null);
}
public void testExecutePropagateAllMetaDataUpdates() throws Exception {
final CompoundProcessor processor = mock(CompoundProcessor.class);
when(processor.getProcessors()).thenReturn(Collections.singletonList(mock(Processor.class)));
final long newVersion = randomLong();
final String versionType = randomFrom("internal", "external", "external_gt", "external_gte");
doAnswer((InvocationOnMock invocationOnMock) -> {
IngestDocument ingestDocument = (IngestDocument) invocationOnMock.getArguments()[0];
for (IngestDocument.MetaData metaData : IngestDocument.MetaData.values()) {
if (metaData == IngestDocument.MetaData.VERSION) {
ingestDocument.setFieldValue(metaData.getFieldName(), newVersion);
} else if (metaData == IngestDocument.MetaData.VERSION_TYPE) {
ingestDocument.setFieldValue(metaData.getFieldName(), versionType);
} else {
ingestDocument.setFieldValue(metaData.getFieldName(), "update" + metaData.getFieldName());
}
}
return null;
}).when(processor).execute(any());
when(store.get("_id")).thenReturn(new Pipeline("_id", "_description", version, processor));
final IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id").source(Collections.emptyMap()).setPipeline("_id");
@SuppressWarnings("unchecked")
final BiConsumer<IndexRequest, Exception> failureHandler = mock(BiConsumer.class);
@SuppressWarnings("unchecked")
final Consumer<Exception> completionHandler = mock(Consumer.class);
executionService.executeBulkRequest(Collections.singletonList(indexRequest), failureHandler, completionHandler);
verify(processor).execute(any());
verify(failureHandler, never()).accept(any(), any());
verify(completionHandler, times(1)).accept(null);
assertThat(indexRequest.index(), equalTo("update_index"));
assertThat(indexRequest.type(), equalTo("update_type"));
assertThat(indexRequest.id(), equalTo("update_id"));
assertThat(indexRequest.routing(), equalTo("update_routing"));
assertThat(indexRequest.parent(), equalTo("update_parent"));
assertThat(indexRequest.version(), equalTo(newVersion));
assertThat(indexRequest.versionType(), equalTo(VersionType.fromString(versionType)));
}
public void testExecuteFailure() throws Exception {
final CompoundProcessor processor = mock(CompoundProcessor.class);
when(processor.getProcessors()).thenReturn(Collections.singletonList(mock(Processor.class)));
when(store.get("_id")).thenReturn(new Pipeline("_id", "_description", version, processor));
final IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id").source(Collections.emptyMap()).setPipeline("_id");
doThrow(new RuntimeException())
.when(processor)
.execute(eqIndexTypeId(indexRequest.version(), indexRequest.versionType(), Collections.emptyMap()));
@SuppressWarnings("unchecked")
final BiConsumer<IndexRequest, Exception> failureHandler = mock(BiConsumer.class);
@SuppressWarnings("unchecked")
final Consumer<Exception> completionHandler = mock(Consumer.class);
executionService.executeBulkRequest(Collections.singletonList(indexRequest), failureHandler, completionHandler);
verify(processor).execute(eqIndexTypeId(indexRequest.version(), indexRequest.versionType(), Collections.emptyMap()));
verify(failureHandler, times(1)).accept(eq(indexRequest), any(RuntimeException.class));
verify(completionHandler, times(1)).accept(null);
}
public void testExecuteSuccessWithOnFailure() throws Exception {
final Processor processor = mock(Processor.class);
when(processor.getType()).thenReturn("mock_processor_type");
when(processor.getTag()).thenReturn("mock_processor_tag");
final Processor onFailureProcessor = mock(Processor.class);
final CompoundProcessor compoundProcessor = new CompoundProcessor(
false, Collections.singletonList(processor), Collections.singletonList(new CompoundProcessor(onFailureProcessor)));
when(store.get("_id")).thenReturn(new Pipeline("_id", "_description", version, compoundProcessor));
final IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id").source(Collections.emptyMap()).setPipeline("_id");
doThrow(new RuntimeException()).when(processor).execute(eqIndexTypeId(Collections.emptyMap()));
@SuppressWarnings("unchecked")
final BiConsumer<IndexRequest, Exception> failureHandler = mock(BiConsumer.class);
@SuppressWarnings("unchecked")
final Consumer<Exception> completionHandler = mock(Consumer.class);
executionService.executeBulkRequest(Collections.singletonList(indexRequest), failureHandler, completionHandler);
verify(failureHandler, never()).accept(eq(indexRequest), any(ElasticsearchException.class));
verify(completionHandler, times(1)).accept(null);
}
public void testExecuteFailureWithOnFailure() throws Exception {
final Processor processor = mock(Processor.class);
final Processor onFailureProcessor = mock(Processor.class);
final CompoundProcessor compoundProcessor = new CompoundProcessor(
false, Collections.singletonList(processor), Collections.singletonList(new CompoundProcessor(onFailureProcessor)));
when(store.get("_id")).thenReturn(new Pipeline("_id", "_description", version, compoundProcessor));
final IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id").source(Collections.emptyMap()).setPipeline("_id");
doThrow(new RuntimeException())
.when(processor)
.execute(eqIndexTypeId(indexRequest.version(), indexRequest.versionType(), Collections.emptyMap()));
doThrow(new RuntimeException())
.when(onFailureProcessor)
.execute(eqIndexTypeId(indexRequest.version(), indexRequest.versionType(), Collections.emptyMap()));
@SuppressWarnings("unchecked")
final BiConsumer<IndexRequest, Exception> failureHandler = mock(BiConsumer.class);
@SuppressWarnings("unchecked")
final Consumer<Exception> completionHandler = mock(Consumer.class);
executionService.executeBulkRequest(Collections.singletonList(indexRequest), failureHandler, completionHandler);
verify(processor).execute(eqIndexTypeId(indexRequest.version(), indexRequest.versionType(), Collections.emptyMap()));
verify(failureHandler, times(1)).accept(eq(indexRequest), any(RuntimeException.class));
verify(completionHandler, times(1)).accept(null);
}
public void testExecuteFailureWithNestedOnFailure() throws Exception {
final Processor processor = mock(Processor.class);
final Processor onFailureProcessor = mock(Processor.class);
final Processor onFailureOnFailureProcessor = mock(Processor.class);
final List<Processor> processors = Collections.singletonList(onFailureProcessor);
final List<Processor> onFailureProcessors = Collections.singletonList(onFailureOnFailureProcessor);
final CompoundProcessor compoundProcessor = new CompoundProcessor(
false,
Collections.singletonList(processor),
Collections.singletonList(new CompoundProcessor(false, processors, onFailureProcessors)));
when(store.get("_id")).thenReturn(new Pipeline("_id", "_description", version, compoundProcessor));
final IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id").source(Collections.emptyMap()).setPipeline("_id");
doThrow(new RuntimeException())
.when(onFailureOnFailureProcessor)
.execute(eqIndexTypeId(indexRequest.version(), indexRequest.versionType(), Collections.emptyMap()));
doThrow(new RuntimeException())
.when(onFailureProcessor)
.execute(eqIndexTypeId(indexRequest.version(), indexRequest.versionType(), Collections.emptyMap()));
doThrow(new RuntimeException())
.when(processor)
.execute(eqIndexTypeId(indexRequest.version(), indexRequest.versionType(), Collections.emptyMap()));
@SuppressWarnings("unchecked")
final BiConsumer<IndexRequest, Exception> failureHandler = mock(BiConsumer.class);
@SuppressWarnings("unchecked")
final Consumer<Exception> completionHandler = mock(Consumer.class);
executionService.executeBulkRequest(Collections.singletonList(indexRequest), failureHandler, completionHandler);
verify(processor).execute(eqIndexTypeId(indexRequest.version(), indexRequest.versionType(), Collections.emptyMap()));
verify(failureHandler, times(1)).accept(eq(indexRequest), any(RuntimeException.class));
verify(completionHandler, times(1)).accept(null);
}
public void testBulkRequestExecutionWithFailures() throws Exception {
BulkRequest bulkRequest = new BulkRequest();
String pipelineId = "_id";
int numRequest = scaledRandomIntBetween(8, 64);
int numIndexRequests = 0;
for (int i = 0; i < numRequest; i++) {
DocWriteRequest request;
if (randomBoolean()) {
if (randomBoolean()) {
request = new DeleteRequest("_index", "_type", "_id");
} else {
request = new UpdateRequest("_index", "_type", "_id");
}
} else {
IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id").setPipeline(pipelineId);
indexRequest.source(Requests.INDEX_CONTENT_TYPE, "field1", "value1");
request = indexRequest;
numIndexRequests++;
}
bulkRequest.add(request);
}
CompoundProcessor processor = mock(CompoundProcessor.class);
when(processor.getProcessors()).thenReturn(Collections.singletonList(mock(Processor.class)));
Exception error = new RuntimeException();
doThrow(error).when(processor).execute(any());
when(store.get(pipelineId)).thenReturn(new Pipeline(pipelineId, null, version, processor));
@SuppressWarnings("unchecked")
BiConsumer<IndexRequest, Exception> requestItemErrorHandler = mock(BiConsumer.class);
@SuppressWarnings("unchecked")
Consumer<Exception> completionHandler = mock(Consumer.class);
executionService.executeBulkRequest(bulkRequest.requests(), requestItemErrorHandler, completionHandler);
verify(requestItemErrorHandler, times(numIndexRequests)).accept(any(IndexRequest.class), eq(error));
verify(completionHandler, times(1)).accept(null);
}
public void testBulkRequestExecution() {
BulkRequest bulkRequest = new BulkRequest();
String pipelineId = "_id";
int numRequest = scaledRandomIntBetween(8, 64);
for (int i = 0; i < numRequest; i++) {
IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id").setPipeline(pipelineId);
indexRequest.source(Requests.INDEX_CONTENT_TYPE, "field1", "value1");
bulkRequest.add(indexRequest);
}
when(store.get(pipelineId)).thenReturn(new Pipeline(pipelineId, null, version, new CompoundProcessor()));
@SuppressWarnings("unchecked")
BiConsumer<IndexRequest, Exception> requestItemErrorHandler = mock(BiConsumer.class);
@SuppressWarnings("unchecked")
Consumer<Exception> completionHandler = mock(Consumer.class);
executionService.executeBulkRequest(bulkRequest.requests(), requestItemErrorHandler, completionHandler);
verify(requestItemErrorHandler, never()).accept(any(), any());
verify(completionHandler, times(1)).accept(null);
}
public void testStats() {
final IngestStats initialStats = executionService.stats();
assertThat(initialStats.getStatsPerPipeline().size(), equalTo(0));
assertThat(initialStats.getTotalStats().getIngestCount(), equalTo(0L));
assertThat(initialStats.getTotalStats().getIngestCurrent(), equalTo(0L));
assertThat(initialStats.getTotalStats().getIngestFailedCount(), equalTo(0L));
assertThat(initialStats.getTotalStats().getIngestTimeInMillis(), equalTo(0L));
when(store.get("_id1")).thenReturn(new Pipeline("_id1", null, version, new CompoundProcessor(mock(Processor.class))));
when(store.get("_id2")).thenReturn(new Pipeline("_id2", null, null, new CompoundProcessor(mock(Processor.class))));
final Map<String, PipelineConfiguration> configurationMap = new HashMap<>();
configurationMap.put("_id1", new PipelineConfiguration("_id1", new BytesArray("{}"), XContentType.JSON));
configurationMap.put("_id2", new PipelineConfiguration("_id2", new BytesArray("{}"), XContentType.JSON));
executionService.updatePipelineStats(new IngestMetadata(configurationMap));
@SuppressWarnings("unchecked")
final BiConsumer<IndexRequest, Exception> failureHandler = mock(BiConsumer.class);
@SuppressWarnings("unchecked")
final Consumer<Exception> completionHandler = mock(Consumer.class);
final IndexRequest indexRequest = new IndexRequest("_index");
indexRequest.setPipeline("_id1");
executionService.executeBulkRequest(Collections.singletonList(indexRequest), failureHandler, completionHandler);
final IngestStats afterFirstRequestStats = executionService.stats();
assertThat(afterFirstRequestStats.getStatsPerPipeline().size(), equalTo(2));
assertThat(afterFirstRequestStats.getStatsPerPipeline().get("_id1").getIngestCount(), equalTo(1L));
assertThat(afterFirstRequestStats.getStatsPerPipeline().get("_id2").getIngestCount(), equalTo(0L));
assertThat(afterFirstRequestStats.getTotalStats().getIngestCount(), equalTo(1L));
indexRequest.setPipeline("_id2");
executionService.executeBulkRequest(Collections.singletonList(indexRequest), failureHandler, completionHandler);
final IngestStats afterSecondRequestStats = executionService.stats();
assertThat(afterSecondRequestStats.getStatsPerPipeline().size(), equalTo(2));
assertThat(afterSecondRequestStats.getStatsPerPipeline().get("_id1").getIngestCount(), equalTo(1L));
assertThat(afterSecondRequestStats.getStatsPerPipeline().get("_id2").getIngestCount(), equalTo(1L));
assertThat(afterSecondRequestStats.getTotalStats().getIngestCount(), equalTo(2L));
}
// issue: https://github.com/elastic/elasticsearch/issues/18126
public void testUpdatingStatsWhenRemovingPipelineWorks() {
Map<String, PipelineConfiguration> configurationMap = new HashMap<>();
configurationMap.put("_id1", new PipelineConfiguration("_id1", new BytesArray("{}"), XContentType.JSON));
configurationMap.put("_id2", new PipelineConfiguration("_id2", new BytesArray("{}"), XContentType.JSON));
executionService.updatePipelineStats(new IngestMetadata(configurationMap));
assertThat(executionService.stats().getStatsPerPipeline(), hasKey("_id1"));
assertThat(executionService.stats().getStatsPerPipeline(), hasKey("_id2"));
configurationMap = new HashMap<>();
configurationMap.put("_id3", new PipelineConfiguration("_id3", new BytesArray("{}"), XContentType.JSON));
executionService.updatePipelineStats(new IngestMetadata(configurationMap));
assertThat(executionService.stats().getStatsPerPipeline(), not(hasKey("_id1")));
assertThat(executionService.stats().getStatsPerPipeline(), not(hasKey("_id2")));
}
private IngestDocument eqIndexTypeId(final Map<String, Object> source) {
return argThat(new IngestDocumentMatcher("_index", "_type", "_id", source));
}
private IngestDocument eqIndexTypeId(final Long version, final VersionType versionType, final Map<String, Object> source) {
return argThat(new IngestDocumentMatcher("_index", "_type", "_id", version, versionType, source));
}
private class IngestDocumentMatcher implements ArgumentMatcher<IngestDocument> {
private final IngestDocument ingestDocument;
IngestDocumentMatcher(String index, String type, String id, Map<String, Object> source) {
this.ingestDocument = new IngestDocument(index, type, id, null, null, null, null, source);
}
IngestDocumentMatcher(String index, String type, String id, Long version, VersionType versionType, Map<String, Object> source) {
this.ingestDocument = new IngestDocument(index, type, id, null, null, version, versionType, source);
}
@Override
public boolean matches(IngestDocument otherIngestDocument) {
//ingest metadata will not be the same (timestamp differs every time)
return Objects.equals(ingestDocument.getSourceAndMetadata(), otherIngestDocument.getSourceAndMetadata());
}
}
}
| |
package com.mxgraph.reader;
import java.util.Hashtable;
import java.util.Map;
import org.w3c.dom.Element;
import org.w3c.dom.Node;
import com.mxgraph.canvas.mxICanvas2D;
/**
*
public static void main(String[] args)
{
try
{
String filename = Test.class.getResource(
"/com/mxgraph/online/exported.xml").getPath();
String xml = mxUtils.readFile(filename);
System.out.println("xml=" + xml);
Document doc = mxUtils.parseXml(xml);
Element root = doc.getDocumentElement();
int width = Integer.parseInt(root.getAttribute("width"));
int height = Integer.parseInt(root.getAttribute("height"));
System.out.println("width=" + width + " height=" + height);
BufferedImage img = mxUtils.createBufferedImage(width, height,
Color.WHITE);
Graphics2D g2 = img.createGraphics();
mxUtils.setAntiAlias(g2, true, true);
mxDomOutputParser reader = new mxDomOutputParser(
new mxGraphicsExportCanvas(g2));
reader.read((Element) root.getFirstChild().getNextSibling());
ImageIO.write(img, "PNG", new File(
"C:\\Users\\Gaudenz\\Desktop\\test.png"));
}
catch (Exception e)
{
e.printStackTrace();
}
}
// -------------
Document doc = mxUtils.parseXml(xml);
Element root = doc.getDocumentElement();
mxDomOutputParser reader = new mxDomOutputParser(canvas);
reader.read(root.getFirstChild());
*/
public class mxDomOutputParser
{
/**
*
*/
protected mxICanvas2D canvas;
/**
*
*/
protected transient Map<String, IElementHandler> handlers = new Hashtable<String, IElementHandler>();
/**
*
*/
public mxDomOutputParser(mxICanvas2D canvas)
{
this.canvas = canvas;
initHandlers();
}
/**
*
*/
public void read(Node node)
{
while (node != null)
{
if (node instanceof Element)
{
Element elt = (Element) node;
IElementHandler handler = handlers.get(elt.getNodeName());
if (handler != null)
{
handler.parseElement(elt);
}
}
node = node.getNextSibling();
}
}
/**
*
*/
protected void initHandlers()
{
handlers.put("save", new IElementHandler()
{
public void parseElement(Element elt)
{
canvas.save();
}
});
handlers.put("restore", new IElementHandler()
{
public void parseElement(Element elt)
{
canvas.restore();
}
});
handlers.put("scale", new IElementHandler()
{
public void parseElement(Element elt)
{
canvas.scale(Double.parseDouble(elt.getAttribute("scale")));
}
});
handlers.put("translate", new IElementHandler()
{
public void parseElement(Element elt)
{
canvas.translate(Double.parseDouble(elt.getAttribute("dx")),
Double.parseDouble(elt.getAttribute("dy")));
}
});
handlers.put("rotate", new IElementHandler()
{
public void parseElement(Element elt)
{
canvas.rotate(Double.parseDouble(elt.getAttribute("theta")),
elt.getAttribute("flipH").equals("1"), elt
.getAttribute("flipV").equals("1"), Double
.parseDouble(elt.getAttribute("cx")), Double
.parseDouble(elt.getAttribute("cy")));
}
});
handlers.put("strokewidth", new IElementHandler()
{
public void parseElement(Element elt)
{
canvas.setStrokeWidth(Double.parseDouble(elt
.getAttribute("width")));
}
});
handlers.put("strokecolor", new IElementHandler()
{
public void parseElement(Element elt)
{
canvas.setStrokeColor(elt.getAttribute("color"));
}
});
handlers.put("dashed", new IElementHandler()
{
public void parseElement(Element elt)
{
canvas.setDashed(elt.getAttribute("dashed").equals("1"));
}
});
handlers.put("dashpattern", new IElementHandler()
{
public void parseElement(Element elt)
{
canvas.setDashPattern(elt.getAttribute("pattern"));
}
});
handlers.put("linecap", new IElementHandler()
{
public void parseElement(Element elt)
{
canvas.setLineCap(elt.getAttribute("cap"));
}
});
handlers.put("linejoin", new IElementHandler()
{
public void parseElement(Element elt)
{
canvas.setLineJoin(elt.getAttribute("join"));
}
});
handlers.put("miterlimit", new IElementHandler()
{
public void parseElement(Element elt)
{
canvas.setMiterLimit(Double.parseDouble(elt
.getAttribute("limit")));
}
});
handlers.put("fontsize", new IElementHandler()
{
public void parseElement(Element elt)
{
canvas.setFontSize(Double.parseDouble(elt.getAttribute("size")));
}
});
handlers.put("fontcolor", new IElementHandler()
{
public void parseElement(Element elt)
{
canvas.setFontColor(elt.getAttribute("color"));
}
});
handlers.put("fontbackgroundcolor", new IElementHandler()
{
public void parseElement(Element elt)
{
canvas.setFontBackgroundColor(elt.getAttribute("color"));
}
});
handlers.put("fontbordercolor", new IElementHandler()
{
public void parseElement(Element elt)
{
canvas.setFontBorderColor(elt.getAttribute("color"));
}
});
handlers.put("fontfamily", new IElementHandler()
{
public void parseElement(Element elt)
{
canvas.setFontFamily(elt.getAttribute("family"));
}
});
handlers.put("fontstyle", new IElementHandler()
{
public void parseElement(Element elt)
{
canvas.setFontStyle(Integer.parseInt(elt.getAttribute("style")));
}
});
handlers.put("alpha", new IElementHandler()
{
public void parseElement(Element elt)
{
canvas.setAlpha(Double.parseDouble(elt.getAttribute("alpha")));
}
});
handlers.put("fillcolor", new IElementHandler()
{
public void parseElement(Element elt)
{
canvas.setFillColor(elt.getAttribute("color"));
}
});
handlers.put("shadowcolor", new IElementHandler()
{
public void parseElement(Element elt)
{
canvas.setShadowColor(elt.getAttribute("color"));
}
});
handlers.put("shadowalpha", new IElementHandler()
{
public void parseElement(Element elt)
{
canvas.setShadowAlpha(Double.parseDouble(elt.getAttribute("alpha")));
}
});
handlers.put("shadowoffset", new IElementHandler()
{
public void parseElement(Element elt)
{
canvas.setShadowOffset(Double.parseDouble(elt.getAttribute("dx")),
Double.parseDouble(elt.getAttribute("dy")));
}
});
handlers.put("shadow", new IElementHandler()
{
public void parseElement(Element elt)
{
canvas.setShadow(elt.getAttribute("enabled").equals("1"));
}
});
handlers.put("gradient", new IElementHandler()
{
public void parseElement(Element elt)
{
canvas.setGradient(elt.getAttribute("c1"),
elt.getAttribute("c2"),
Double.parseDouble(elt.getAttribute("x")),
Double.parseDouble(elt.getAttribute("y")),
Double.parseDouble(elt.getAttribute("w")),
Double.parseDouble(elt.getAttribute("h")),
elt.getAttribute("direction"),
Double.parseDouble(getValue(elt, "alpha1", "1")),
Double.parseDouble(getValue(elt, "alpha2", "1")));
}
});
handlers.put("rect", new IElementHandler()
{
public void parseElement(Element elt)
{
canvas.rect(Double.parseDouble(elt.getAttribute("x")),
Double.parseDouble(elt.getAttribute("y")),
Double.parseDouble(elt.getAttribute("w")),
Double.parseDouble(elt.getAttribute("h")));
}
});
handlers.put("roundrect", new IElementHandler()
{
public void parseElement(Element elt)
{
canvas.roundrect(Double.parseDouble(elt.getAttribute("x")),
Double.parseDouble(elt.getAttribute("y")),
Double.parseDouble(elt.getAttribute("w")),
Double.parseDouble(elt.getAttribute("h")),
Double.parseDouble(elt.getAttribute("dx")),
Double.parseDouble(elt.getAttribute("dy")));
}
});
handlers.put("ellipse", new IElementHandler()
{
public void parseElement(Element elt)
{
canvas.ellipse(Double.parseDouble(elt.getAttribute("x")),
Double.parseDouble(elt.getAttribute("y")),
Double.parseDouble(elt.getAttribute("w")),
Double.parseDouble(elt.getAttribute("h")));
}
});
handlers.put("image", new IElementHandler()
{
public void parseElement(Element elt)
{
canvas.image(Double.parseDouble(elt.getAttribute("x")), Double
.parseDouble(elt.getAttribute("y")), Double
.parseDouble(elt.getAttribute("w")), Double
.parseDouble(elt.getAttribute("h")), elt
.getAttribute("src"), elt.getAttribute("aspect")
.equals("1"), elt.getAttribute("flipH").equals("1"),
elt.getAttribute("flipV").equals("1"));
}
});
handlers.put("text", new IElementHandler()
{
public void parseElement(Element elt)
{
canvas.text(Double.parseDouble(elt.getAttribute("x")),
Double.parseDouble(elt.getAttribute("y")),
Double.parseDouble(elt.getAttribute("w")),
Double.parseDouble(elt.getAttribute("h")),
elt.getAttribute("str"),
elt.getAttribute("align"),
elt.getAttribute("valign"),
getValue(elt, "wrap", "").equals("1"),
elt.getAttribute("format"),
elt.getAttribute("overflow"),
getValue(elt, "clip", "").equals("1"),
Double.parseDouble(getValue(elt, "rotation", "0")),
elt.getAttribute("dir"));
}
});
handlers.put("begin", new IElementHandler()
{
public void parseElement(Element elt)
{
canvas.begin();
}
});
handlers.put("move", new IElementHandler()
{
public void parseElement(Element elt)
{
canvas.moveTo(Double.parseDouble(elt.getAttribute("x")),
Double.parseDouble(elt.getAttribute("y")));
}
});
handlers.put("line", new IElementHandler()
{
public void parseElement(Element elt)
{
canvas.lineTo(Double.parseDouble(elt.getAttribute("x")),
Double.parseDouble(elt.getAttribute("y")));
}
});
handlers.put("quad", new IElementHandler()
{
public void parseElement(Element elt)
{
canvas.quadTo(Double.parseDouble(elt.getAttribute("x1")),
Double.parseDouble(elt.getAttribute("y1")),
Double.parseDouble(elt.getAttribute("x2")),
Double.parseDouble(elt.getAttribute("y2")));
}
});
handlers.put("curve", new IElementHandler()
{
public void parseElement(Element elt)
{
canvas.curveTo(Double.parseDouble(elt.getAttribute("x1")),
Double.parseDouble(elt.getAttribute("y1")),
Double.parseDouble(elt.getAttribute("x2")),
Double.parseDouble(elt.getAttribute("y2")),
Double.parseDouble(elt.getAttribute("x3")),
Double.parseDouble(elt.getAttribute("y3")));
}
});
handlers.put("close", new IElementHandler()
{
public void parseElement(Element elt)
{
canvas.close();
}
});
handlers.put("stroke", new IElementHandler()
{
public void parseElement(Element elt)
{
canvas.stroke();
}
});
handlers.put("fill", new IElementHandler()
{
public void parseElement(Element elt)
{
canvas.fill();
}
});
handlers.put("fillstroke", new IElementHandler()
{
public void parseElement(Element elt)
{
canvas.fillAndStroke();
}
});
}
/**
* Returns the given attribute value or an empty string.
*/
protected String getValue(Element elt, String name, String defaultValue)
{
String value = elt.getAttribute(name);
if (value == null)
{
value = defaultValue;
}
return value;
};
/**
*
*/
protected interface IElementHandler
{
void parseElement(Element elt);
}
}
| |
package com.krishagni.catissueplus.core.biospecimen.events;
import java.math.BigDecimal;
import java.util.Date;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import org.apache.commons.collections.CollectionUtils;
import com.krishagni.catissueplus.core.common.AttributeModifiedSupport;
import com.krishagni.catissueplus.core.common.ListenAttributeChanges;
import com.krishagni.catissueplus.core.common.events.NameValuePair;
import com.krishagni.catissueplus.core.common.events.UserSummary;
import com.krishagni.catissueplus.core.de.events.ExtensionDetail;
@ListenAttributeChanges
public class MasterSpecimenDetail extends AttributeModifiedSupport {
private String cpShortTitle;
private String ppid;
private Date registrationDate;
private String regSite;
private String externalSubjectId;
private String firstName;
private String lastName;
private String middleName;
private String emailAddress;
private Date birthDate;
private Date deathDate;
private String gender;
private Set<String> races;
private String vitalStatus;
private List<PmiDetail> pmis;
private String sexGenotype;
private Set<String> ethnicities;
private String uid;
private String empi;
private Long visitId;
private String visitName;
private String eventLabel;
private Date visitDate;
private String collectionSite;
private String status;
private Set<String> clinicalDiagnoses;
private String clinicalStatus;
private String surgicalPathologyNumber;
private String visitComments;
private String reqCode;
private String label;
private String barcode;
private String imageId;
private String specimenClass;
private String type;
private String lineage;
private String parentLabel;
private String anatomicSite;
private String laterality;
private String pathology;
private BigDecimal initialQty;
private BigDecimal concentration;
private Integer freezeThawCycles;
private Date createdOn;
private UserSummary createdBy;
private String comments;
private String collectionStatus;
private String container;
private String positionX;
private String positionY;
private int position;
private Date collectionDate;
private String collectionProcedure;
private String collectionContainer;
private String collector;
private Date receivedDate;
private String receivedQuality;
private String receiver;
private List<NameValuePair> externalIds;
private ExtensionDetail extensionDetail;
public String getCpShortTitle() {
return cpShortTitle;
}
public void setCpShortTitle(String cpShortTitle) {
this.cpShortTitle = cpShortTitle;
}
public String getPpid() {
return ppid;
}
public void setPpid(String ppid) {
this.ppid = ppid;
}
public Date getRegistrationDate() {
if (registrationDate != null) {
return registrationDate;
} else if (visitDate != null) {
return visitDate;
} else {
return collectionDate;
}
}
public void setRegistrationDate(Date registrationDate) {
this.registrationDate = registrationDate;
}
public String getRegSite() {
return regSite;
}
public void setRegSite(String regSite) {
this.regSite = regSite;
}
public String getExternalSubjectId() {
return externalSubjectId;
}
public void setExternalSubjectId(String externalSubjectId) {
this.externalSubjectId = externalSubjectId;
}
public String getFirstName() {
return firstName;
}
public void setFirstName(String firstName) {
this.firstName = firstName;
}
public String getLastName() {
return lastName;
}
public void setLastName(String lastName) {
this.lastName = lastName;
}
public String getMiddleName() {
return middleName;
}
public void setMiddleName(String middleName) {
this.middleName = middleName;
}
public String getEmailAddress() {
return emailAddress;
}
public void setEmailAddress(String emailAddress) {
this.emailAddress = emailAddress;
}
public Date getBirthDate() {
return birthDate;
}
public void setBirthDate(Date birthDate) {
this.birthDate = birthDate;
}
public Date getDeathDate() {
return deathDate;
}
public void setDeathDate(Date deathDate) {
this.deathDate = deathDate;
}
public String getGender() {
return gender;
}
public void setGender(String gender) {
this.gender = gender;
}
public Set<String> getRaces() {
return races;
}
public void setRaces(Set<String> races) {
this.races = races;
}
public String getVitalStatus() {
return vitalStatus;
}
public void setVitalStatus(String vitalStatus) {
this.vitalStatus = vitalStatus;
}
public List<PmiDetail> getPmis() {
return pmis;
}
public void setPmis(List<PmiDetail> pmis) {
this.pmis = pmis;
}
public String getSexGenotype() {
return sexGenotype;
}
public void setSexGenotype(String sexGenotype) {
this.sexGenotype = sexGenotype;
}
public String getEthnicity() {
return CollectionUtils.isNotEmpty(ethnicities) ? ethnicities.iterator().next() : null;
}
public void setEthnicity(String ethnicity) {
if (ethnicities == null) {
ethnicities = new HashSet<>();
}
ethnicities.add(ethnicity);
}
public Set<String> getEthnicities() {
return ethnicities;
}
public void setEthnicities(Set<String> ethnicities) {
this.ethnicities = ethnicities;
}
public String getUid() {
return uid;
}
public void setUid(String uid) {
this.uid = uid;
}
public String getEmpi() {
return empi;
}
public void setEmpi(String empi) {
this.empi = empi;
}
public Long getVisitId() {
return visitId;
}
public void setVisitId(Long visitId) {
this.visitId = visitId;
}
public String getVisitName() {
return visitName;
}
public void setVisitName(String visitName) {
this.visitName = visitName;
}
public String getEventLabel() {
return eventLabel;
}
public void setEventLabel(String eventLabel) {
this.eventLabel = eventLabel;
}
public Date getVisitDate() {
if (visitDate != null) {
return visitDate;
} else {
return collectionDate;
}
}
public void setVisitDate(Date visitDate) {
this.visitDate = visitDate;
}
public String getCollectionSite() {
return collectionSite;
}
public void setCollectionSite(String collectionSite) {
this.collectionSite = collectionSite;
}
public String getStatus() {
return status;
}
public void setStatus(String status) {
this.status = status;
}
public Set<String> getClinicalDiagnoses() {
return clinicalDiagnoses;
}
public void setClinicalDiagnoses(Set<String> clinicalDiagnoses) {
this.clinicalDiagnoses = clinicalDiagnoses;
}
public String getClinicalStatus() {
return clinicalStatus;
}
public void setClinicalStatus(String clinicalStatus) {
this.clinicalStatus = clinicalStatus;
}
public String getSurgicalPathologyNumber() {
return surgicalPathologyNumber;
}
public void setSurgicalPathologyNumber(String surgicalPathologyNumber) {
this.surgicalPathologyNumber = surgicalPathologyNumber;
}
public String getVisitComments() {
return visitComments;
}
public void setVisitComments(String visitComments) {
this.visitComments = visitComments;
}
public String getReqCode() {
return reqCode;
}
public void setReqCode(String reqCode) {
this.reqCode = reqCode;
}
public String getLabel() {
return label;
}
public void setLabel(String label) {
this.label = label;
}
public String getBarcode() {
return barcode;
}
public void setBarcode(String barcode) {
this.barcode = barcode;
}
public String getImageId() {
return imageId;
}
public void setImageId(String imageId) {
this.imageId = imageId;
}
public String getSpecimenClass() {
return specimenClass;
}
public void setSpecimenClass(String specimenClass) {
this.specimenClass = specimenClass;
}
public String getType() {
return type;
}
public void setType(String type) {
this.type = type;
}
public String getLineage() {
return lineage;
}
public void setLineage(String lineage) {
this.lineage = lineage;
}
public String getParentLabel() {
return parentLabel;
}
public void setParentLabel(String parentLabel) {
this.parentLabel = parentLabel;
}
public String getAnatomicSite() {
return anatomicSite;
}
public void setAnatomicSite(String anatomicSite) {
this.anatomicSite = anatomicSite;
}
public String getLaterality() {
return laterality;
}
public void setLaterality(String laterality) {
this.laterality = laterality;
}
public String getPathology() {
return pathology;
}
public void setPathology(String pathology) {
this.pathology = pathology;
}
public BigDecimal getInitialQty() {
return initialQty;
}
public void setInitialQty(BigDecimal initialQty) {
this.initialQty = initialQty;
}
public BigDecimal getConcentration() {
return concentration;
}
public void setConcentration(BigDecimal concentration) {
this.concentration = concentration;
}
public Integer getFreezeThawCycles() {
return freezeThawCycles;
}
public void setFreezeThawCycles(Integer freezeThawCycles) {
this.freezeThawCycles = freezeThawCycles;
}
public Date getCreatedOn() {
return createdOn;
}
public void setCreatedOn(Date createdOn) {
this.createdOn = createdOn;
}
public UserSummary getCreatedBy() {
return createdBy;
}
public void setCreatedBy(UserSummary createdBy) {
this.createdBy = createdBy;
}
public String getComments() {
return comments;
}
public void setComments(String comments) {
this.comments = comments;
}
public String getCollectionStatus() {
return collectionStatus;
}
public void setCollectionStatus(String collectionStatus) {
this.collectionStatus = collectionStatus;
}
public String getContainer() {
return container;
}
public void setContainer(String container) {
this.container = container;
}
public String getPositionX() {
return positionX;
}
public void setPositionX(String positionX) {
this.positionX = positionX;
}
public String getPositionY() {
return positionY;
}
public void setPositionY(String positionY) {
this.positionY = positionY;
}
public int getPosition() {
return position;
}
public void setPosition(int position) {
this.position = position;
}
public Date getCollectionDate() {
return collectionDate;
}
public void setCollectionDate(Date collectionDate) {
this.collectionDate = collectionDate;
}
public String getCollectionProcedure() {
return collectionProcedure;
}
public void setCollectionProcedure(String collectionProcedure) {
this.collectionProcedure = collectionProcedure;
}
public String getCollectionContainer() {
return collectionContainer;
}
public void setCollectionContainer(String collectionContainer) {
this.collectionContainer = collectionContainer;
}
public String getCollector() {
return collector;
}
public void setCollector(String collector) {
this.collector = collector;
}
public Date getReceivedDate() {
return receivedDate;
}
public void setReceivedDate(Date receivedDate) {
this.receivedDate = receivedDate;
}
public String getReceivedQuality() {
return receivedQuality;
}
public void setReceivedQuality(String receivedQuality) {
this.receivedQuality = receivedQuality;
}
public String getReceiver() {
return receiver;
}
public void setReceiver(String receiver) {
this.receiver = receiver;
}
public List<NameValuePair> getExternalIds() {
return externalIds;
}
public void setExternalIds(List<NameValuePair> externalIds) {
this.externalIds = externalIds;
}
public ExtensionDetail getExtensionDetail() {
return extensionDetail;
}
public void setExtensionDetail(ExtensionDetail extensionDetail) {
this.extensionDetail = extensionDetail;
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.mongodb.gridfs;
import com.mongodb.DB;
import com.mongodb.DBCollection;
import com.mongodb.Mongo;
import com.mongodb.MongoClient;
import com.mongodb.ReadPreference;
import com.mongodb.WriteConcern;
import com.mongodb.gridfs.GridFS;
import org.apache.camel.Consumer;
import org.apache.camel.Processor;
import org.apache.camel.Producer;
import org.apache.camel.spi.Metadata;
import org.apache.camel.spi.UriEndpoint;
import org.apache.camel.spi.UriParam;
import org.apache.camel.spi.UriPath;
import org.apache.camel.support.CamelContextHelper;
import org.apache.camel.support.DefaultEndpoint;
/**
* Component for working with MongoDB GridFS.
*/
@UriEndpoint(firstVersion = "2.18.0", scheme = "mongodb-gridfs", title = "MongoDB GridFS", syntax = "mongodb-gridfs:connectionBean", label = "database,nosql")
public class GridFsEndpoint extends DefaultEndpoint {
public static final String GRIDFS_OPERATION = "gridfs.operation";
public static final String GRIDFS_METADATA = "gridfs.metadata";
public static final String GRIDFS_CHUNKSIZE = "gridfs.chunksize";
public static final String GRIDFS_FILE_ID_PRODUCED = "gridfs.fileid";
@UriPath @Metadata(required = true)
private String connectionBean;
@UriParam @Metadata(required = true)
private String database;
@UriParam(defaultValue = GridFS.DEFAULT_BUCKET)
private String bucket;
@UriParam(enums = "ACKNOWLEDGED,W1,W2,W3,UNACKNOWLEDGED,JOURNALED,MAJORITY,SAFE")
private WriteConcern writeConcern;
@UriParam
private WriteConcern writeConcernRef;
@UriParam
private ReadPreference readPreference;
@UriParam(label = "producer")
private String operation;
@UriParam(label = "consumer")
private String query;
@UriParam(label = "consumer", defaultValue = "1000")
private long initialDelay = 1000;
@UriParam(label = "consumer", defaultValue = "500")
private long delay = 500;
@UriParam(label = "consumer", defaultValue = "TimeStamp")
private QueryStrategy queryStrategy = QueryStrategy.TimeStamp;
@UriParam(label = "consumer", defaultValue = "camel-timestamps")
private String persistentTSCollection = "camel-timestamps";
@UriParam(label = "consumer", defaultValue = "camel-timestamp")
private String persistentTSObject = "camel-timestamp";
@UriParam(label = "consumer", defaultValue = "camel-processed")
private String fileAttributeName = "camel-processed";
private Mongo mongoConnection;
private DB db;
private GridFS gridFs;
private DBCollection filesCollection;
public GridFsEndpoint(String uri, GridFsComponent component) {
super(uri, component);
}
@Override
public Producer createProducer() throws Exception {
initializeConnection();
return new GridFsProducer(this);
}
@Override
public Consumer createConsumer(Processor processor) throws Exception {
initializeConnection();
return new GridFsConsumer(this, processor);
}
public boolean isSingleton() {
return true;
}
public void initializeConnection() throws Exception {
log.info("Initialize GridFS endpoint: {}", this);
if (database == null) {
throw new IllegalStateException("Missing required endpoint configuration: database");
}
db = mongoConnection.getDB(database);
if (db == null) {
throw new IllegalStateException("Could not initialize GridFsComponent. Database " + database + " does not exist.");
}
gridFs = new GridFS(db, bucket == null ? GridFS.DEFAULT_BUCKET : bucket) {
{
filesCollection = getFilesCollection();
}
};
}
@Override
protected void doStart() throws Exception {
if (writeConcern != null && writeConcernRef != null) {
String msg = "Cannot set both writeConcern and writeConcernRef at the same time. Respective values: " + writeConcern
+ ", " + writeConcernRef + ". Aborting initialization.";
throw new IllegalArgumentException(msg);
}
mongoConnection = CamelContextHelper.mandatoryLookup(getCamelContext(), connectionBean, MongoClient.class);
log.debug("Resolved the connection with the name {} as {}", connectionBean, mongoConnection);
setWriteReadOptionsOnConnection();
super.doStart();
}
@Override
protected void doStop() throws Exception {
super.doStop();
if (mongoConnection != null) {
log.debug("Closing connection");
mongoConnection.close();
}
}
private void setWriteReadOptionsOnConnection() {
// Set the WriteConcern
if (writeConcern != null) {
mongoConnection.setWriteConcern(writeConcern);
} else if (writeConcernRef != null) {
mongoConnection.setWriteConcern(writeConcernRef);
}
// Set the ReadPreference
if (readPreference != null) {
mongoConnection.setReadPreference(readPreference);
}
}
// ======= Getters and setters ===============================================
public String getConnectionBean() {
return connectionBean;
}
/**
* Name of {@link com.mongodb.Mongo} to use.
*/
public void setConnectionBean(String connectionBean) {
this.connectionBean = connectionBean;
}
public Mongo getMongoConnection() {
return mongoConnection;
}
/**
* Sets the Mongo instance that represents the backing connection
*
* @param mongoConnection the connection to the database
*/
public void setMongoConnection(Mongo mongoConnection) {
this.mongoConnection = mongoConnection;
}
public DB getDB() {
return db;
}
public String getDatabase() {
return database;
}
/**
* Sets the name of the MongoDB database to target
*
* @param database name of the MongoDB database
*/
public void setDatabase(String database) {
this.database = database;
}
/**
* Sets the name of the GridFS bucket within the database. Default is "fs".
*
* @param database name of the MongoDB database
*/
public String getBucket() {
return bucket;
}
public void setBucket(String bucket) {
this.bucket = bucket;
}
public String getQuery() {
return query;
}
/**
* Additional query parameters (in JSON) that are used to configure the query used for finding
* files in the GridFsConsumer
* @param query
*/
public void setQuery(String query) {
this.query = query;
}
public long getDelay() {
return delay;
}
/**
* Sets the delay between polls within the Consumer. Default is 500ms
* @param delay
*/
public void setDelay(long delay) {
this.delay = delay;
}
public long getInitialDelay() {
return initialDelay;
}
/**
* Sets the initialDelay before the consumer will start polling. Default is 1000ms
* @param initialDelay
*/
public void setInitialDelay(long initialDelay) {
this.initialDelay = delay;
}
/**
* Sets the QueryStrategy that is used for polling for new files. Default is Timestamp
* @see QueryStrategy
* @param s
*/
public void setQueryStrategy(String s) {
queryStrategy = QueryStrategy.valueOf(s);
}
public QueryStrategy getQueryStrategy() {
return queryStrategy;
}
/**
* If the QueryType uses a persistent timestamp, this sets the name of the collection within
* the DB to store the timestamp.
* @param s
*/
public void setPersistentTSCollection(String s) {
persistentTSCollection = s;
}
public String getPersistentTSCollection() {
return persistentTSCollection;
}
/**
* If the QueryType uses a persistent timestamp, this is the ID of the object in the collection
* to store the timestamp.
* @param s
*/
public void setPersistentTSObject(String id) {
persistentTSObject = id;
}
public String getPersistentTSObject() {
return persistentTSObject;
}
/**
* If the QueryType uses a FileAttribute, this sets the name of the attribute that is used. Default is "camel-processed".
* @param f
*/
public void setFileAttributeName(String f) {
fileAttributeName = f;
}
public String getFileAttributeName() {
return fileAttributeName;
}
/**
* Set the {@link WriteConcern} for write operations on MongoDB using the standard ones.
* Resolved from the fields of the WriteConcern class by calling the {@link WriteConcern#valueOf(String)} method.
*
* @param writeConcern the standard name of the WriteConcern
* @see <a href="http://api.mongodb.org/java/current/com/mongodb/WriteConcern.html#valueOf(java.lang.String)">possible options</a>
*/
public void setWriteConcern(String writeConcern) {
this.writeConcern = WriteConcern.valueOf(writeConcern);
}
public WriteConcern getWriteConcern() {
return writeConcern;
}
/**
* Set the {@link WriteConcern} for write operations on MongoDB, passing in the bean ref to a custom WriteConcern which exists in the Registry.
* You can also use standard WriteConcerns by passing in their key. See the {@link #setWriteConcern(String) setWriteConcern} method.
*
* @param writeConcernRef the name of the bean in the registry that represents the WriteConcern to use
*/
public void setWriteConcernRef(String writeConcernRef) {
WriteConcern wc = this.getCamelContext().getRegistry().lookupByNameAndType(writeConcernRef, WriteConcern.class);
if (wc == null) {
String msg = "Camel MongoDB component could not find the WriteConcern in the Registry. Verify that the "
+ "provided bean name (" + writeConcernRef + ") is correct. Aborting initialization.";
throw new IllegalArgumentException(msg);
}
this.writeConcernRef = wc;
}
public WriteConcern getWriteConcernRef() {
return writeConcernRef;
}
/**
* Sets a MongoDB {@link ReadPreference} on the Mongo connection. Read preferences set directly on the connection will be
* overridden by this setting.
* <p/>
* The {@link com.mongodb.ReadPreference#valueOf(String)} utility method is used to resolve the passed {@code readPreference}
* value. Some examples for the possible values are {@code nearest}, {@code primary} or {@code secondary} etc.
*
* @param readPreference the name of the read preference to set
*/
public void setReadPreference(String readPreference) {
this.readPreference = ReadPreference.valueOf(readPreference);
}
public ReadPreference getReadPreference() {
return readPreference;
}
/**
* Sets the operation this endpoint will execute against GridRS.
*/
public void setOperation(String operation) {
this.operation = operation;
}
public String getOperation() {
return operation;
}
public GridFS getGridFs() {
return gridFs;
}
public void setGridFs(GridFS gridFs) {
this.gridFs = gridFs;
}
public DBCollection getFilesCollection() {
return filesCollection;
}
}
| |
package com.something.liberty.messaging;
import android.app.ActivityManager;
import android.app.Service;
import android.content.BroadcastReceiver;
import android.content.Context;
import android.content.Intent;
import android.os.Handler;
import android.os.IBinder;
import android.os.PowerManager;
import android.util.Log;
import android.widget.Toast;
import com.getpebble.android.kit.PebbleKit;
import com.getpebble.android.kit.util.PebbleDictionary;
import com.something.liberty.UserUtils;
import org.eclipse.paho.client.mqttv3.MqttMessage;
import org.json.JSONException;
import org.json.JSONObject;
import java.util.UUID;
public class GameMessagingService extends Service implements MessagingUtils.NewGameMessageHandler, MessagingUtils.ConnectionLostHandler
{
public static UUID PEBBLE_APP_UUID = UUID.fromString("8e994e98-0427-4a18-8103-f6b7e5489782");
private static final int PEBBLE_KEY_ATTACK = 1;
private static final String PEBBLE_MESSAGE_TYPE_ATTACK = "ATTACK";
private static final String MQTT_TOPIC_SPLATTED = "something/killed/";
private static final String MQTT_TOPIC_ATTACK_RESPONSE = "something/attResponse/";
private static final String MQTT_TOPIC_NEWS = "something/news/";
private static final String MQTT_TOPIC_OUTGUNNER = "something/outgunner/";
public static void ensureServiceStarted(Context context)
{
// check if service already running
ActivityManager manager = (ActivityManager) context.getSystemService(Context.ACTIVITY_SERVICE);
for (ActivityManager.RunningServiceInfo service : manager.getRunningServices(Integer.MAX_VALUE))
{
if (GameMessagingService.class.getName().equals(service.service.getClassName()))
{
return;
}
}
Intent intent = new Intent(context,GameMessagingService.class);
context.startService(intent);
}
public static void stopService(Context context)
{
Intent intent = new Intent(context,GameMessagingService.class);
context.stopService(intent);
}
private Handler uiThreadHandler = null;
private PowerManager.WakeLock wakeLock = null;
private BroadcastReceiver pebbleKitReceiver = null;
@Override
public void onCreate()
{
super.onCreate();
PowerManager powerManager = (PowerManager) getSystemService(Service.POWER_SERVICE);
wakeLock = powerManager.newWakeLock(PowerManager.PARTIAL_WAKE_LOCK,"SomethingLibertyWakeLock");
wakeLock.acquire();
}
@Override
public int onStartCommand(Intent intent, int flags, int startId)
{
Log.i("SomethingLiberty","GameMessagingService Started");
uiThreadHandler = new Handler();
pebbleKitReceiver = PebbleKit.registerReceivedDataHandler(this, new PebbleKit.PebbleDataReceiver(PEBBLE_APP_UUID)
{
@Override
public void receiveData(Context context, int transactionId, PebbleDictionary data)
{
String messageType = data.getString(PEBBLE_KEY_ATTACK);
if(PEBBLE_MESSAGE_TYPE_ATTACK.equals(messageType))
{
SendMessage.sendAttackMessage(context);
PebbleKit.sendAckToPebble(context,transactionId);
}
}
});
final MessagingUtils.ConnectionLostHandler thisConnectionLostHandler = this;
final MessagingUtils.NewGameMessageHandler thisMessageHandler = this;
final String username = UserUtils.getUsername(this);
new Thread(new Runnable(){
@Override
public void run() {
MessagingUtils messagingUtils = MessagingUtils.getMessagingUtils(UserUtils.getUsername(GameMessagingService.this));
messagingUtils.subscribeToTopic(MQTT_TOPIC_SPLATTED + username,thisMessageHandler);
messagingUtils.subscribeToTopic(MQTT_TOPIC_ATTACK_RESPONSE + username,thisMessageHandler);
messagingUtils.subscribeToTopic(MQTT_TOPIC_NEWS + username, thisMessageHandler);
messagingUtils.subscribeToTopic(MQTT_TOPIC_OUTGUNNER + username,thisMessageHandler);
messagingUtils.setConnectionLostHandler(thisConnectionLostHandler);
}
}).run();
return START_STICKY;
}
@Override
public IBinder onBind(Intent intent)
{
return null;
}
@Override
public void onNewGameMessage(final String topic, final MqttMessage message)
{
uiThreadHandler.post(new Runnable() {
@Override
public void run() {
String messageString = new String(message.getPayload());
Log.i("SomethingLiberty", "GameMessagingService : received : " + messageString);
if(topic.contains(MQTT_TOPIC_SPLATTED))
{
handleSplattedMessage(message);
}
else if(topic.contains(MQTT_TOPIC_ATTACK_RESPONSE))
{
handleAttackResponseMessage(message);
}
else if(topic.contains(MQTT_TOPIC_NEWS))
{
handleNewsMessage(message);
}
else if(topic.contains(MQTT_TOPIC_OUTGUNNER))
{
handleOutgunnerMessage(message);
}
}
});
}
@Override
public void onConnectionLost(final Throwable cause)
{
final Service thisService = this;
uiThreadHandler.post(new Runnable() {
@Override
public void run() {
Log.i("SomethingLiberty", "GameMessagingService : Client disconnected : " + cause.getMessage());
Toast.makeText(thisService,"Lost MQTT connection",Toast.LENGTH_SHORT).show();
}
});
}
private void handleSplattedMessage(MqttMessage splattedMessage)
{
String payloadString = new String(splattedMessage.getPayload());
String messageToDisplay = null;
try
{
JSONObject payloadObject = null;
payloadObject = new JSONObject(payloadString);
messageToDisplay = payloadObject.getString(GameMessageReceiver.EXTRA_MESSAGE);
}
catch(JSONException e)
{
e.printStackTrace();
Log.e("SomethingLiberty","Failed to parse splatted message");
return;
}
Intent broadcastMessageIntent = new Intent();
broadcastMessageIntent.setAction(GameMessageReceiver.ACTION_HANDLE_SPLATTED_MESSAGE);
broadcastMessageIntent.putExtra(GameMessageReceiver.EXTRA_MESSAGE,messageToDisplay);
sendOrderedBroadcast(broadcastMessageIntent, null);
}
private void handleAttackResponseMessage(MqttMessage attackResponseMessage)
{
String payloadString = new String(attackResponseMessage.getPayload());
String responseResult = null;
String messageToDisplay = null;
try
{
JSONObject payloadObject = null;
payloadObject = new JSONObject(payloadString);
responseResult = payloadObject.getString(GameMessageReceiver.EXTRA_RESPONSE_TYPE);
messageToDisplay = payloadObject.getString(GameMessageReceiver.EXTRA_ATTACKER_MESSAGE);
}
catch(JSONException e)
{
e.printStackTrace();
Log.e("SomethingLiberty","Failed to parse attack response message");
return;
}
Intent broadcastMessageIntent = new Intent();
broadcastMessageIntent.setAction(GameMessageReceiver.ACTION_HANDLE_ATTACK_RESPONSE_MESSAGE);
broadcastMessageIntent.putExtra(GameMessageReceiver.EXTRA_RESPONSE_TYPE, responseResult);
broadcastMessageIntent.putExtra(GameMessageReceiver.EXTRA_ATTACKER_MESSAGE,messageToDisplay);
sendOrderedBroadcast(broadcastMessageIntent, null);
}
private void handleNewsMessage(MqttMessage newsMessage)
{
Intent broadcastMessageIntent = new Intent();
broadcastMessageIntent.setAction(GameMessageReceiver.ACTION_HANDLE_NEWS_MESSAGE);
broadcastMessageIntent.putExtra(GameMessageReceiver.EXTRA_NEWS_JSON, new String(newsMessage.getPayload()));
sendOrderedBroadcast(broadcastMessageIntent, null);
}
private void handleOutgunnerMessage(MqttMessage outgunnerMessage)
{
String messageString = null;
try
{
JSONObject payloadObject = new JSONObject(new String(outgunnerMessage.getPayload()));
messageString = payloadObject.getString(GameMessageReceiver.EXTRA_MESSAGE);
}
catch(JSONException e)
{
e.printStackTrace();
Log.e("SomethingLiberty","Failed to parse outgunner message");
return;
}
Intent broadcastMessageIntent = new Intent();
broadcastMessageIntent.putExtra(GameMessageReceiver.EXTRA_MESSAGE,messageString);
broadcastMessageIntent.setAction(GameMessageReceiver.ACTION_HANDLE_OUTGUNNER_MESSAGE);
sendOrderedBroadcast(broadcastMessageIntent,null);
}
@Override
public void onDestroy() {
super.onDestroy();
if(pebbleKitReceiver != null)
{
unregisterReceiver(pebbleKitReceiver);
}
wakeLock.release();
}
}
| |
package com.gemstone.gemfire.cache.query.functional;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import java.util.Collection;
import java.util.List;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import com.gemstone.gemfire.cache.Region;
import com.gemstone.gemfire.cache.RegionShortcut;
import com.gemstone.gemfire.cache.query.CacheUtils;
import com.gemstone.gemfire.cache.query.Index;
import com.gemstone.gemfire.cache.query.IndexExistsException;
import com.gemstone.gemfire.cache.query.MultiIndexCreationException;
import com.gemstone.gemfire.cache.query.QueryService;
import com.gemstone.gemfire.cache.query.SelectResults;
import com.gemstone.gemfire.cache.query.data.Portfolio;
import com.gemstone.gemfire.cache.query.internal.QueryObserver;
import com.gemstone.gemfire.cache.query.internal.QueryObserverAdapter;
import com.gemstone.gemfire.cache.query.internal.QueryObserverHolder;
import com.gemstone.gemfire.cache.query.internal.index.CompactRangeIndex;
import com.gemstone.gemfire.cache.query.internal.index.HashIndex;
import com.gemstone.gemfire.cache.query.internal.index.PrimaryKeyIndex;
import com.gemstone.gemfire.test.junit.categories.IntegrationTest;
@Category(IntegrationTest.class)
public class MultiIndexCreationJUnitTest {
private static String regionName = "portfolios";
private String prRegionName = "prPortfolios";
private String overflowRegionName = "overflowPortfolios";
@Before
public void setUp() throws java.lang.Exception {
CacheUtils.startCache();
Region region = CacheUtils.createRegion(regionName, Portfolio.class);
}
@After
public void tearDown() throws java.lang.Exception {
CacheUtils.closeCache();
}
@Test
public void testBasicMultiIndexCreation() throws Exception {
Region r = CacheUtils.getRegion(regionName);
for (int i = 0; i < 10; i++) {
r.put("" + i, new Portfolio(i));
}
QueryService qs = CacheUtils.getQueryService();
qs.defineIndex("statusIndex", "status", r.getFullPath());
qs.defineIndex("IDIndex", "ID", r.getFullPath());
List<Index> indexes = qs.createDefinedIndexes();
assertEquals("Only 2 indexes should have been created. ", 2, indexes.size());
Index ind = qs.getIndex(r, "statusIndex");
assertEquals(2, ind.getStatistics().getNumberOfKeys());
assertEquals(10, ind.getStatistics().getNumberOfValues());
ind = qs.getIndex(r, "IDIndex");
assertEquals(10, ind.getStatistics().getNumberOfKeys());
assertEquals(10, ind.getStatistics().getNumberOfValues());
QueryObserver old = QueryObserverHolder
.setInstance(new QueryObserverAdapter() {
private boolean indexCalled = false;
public void afterIndexLookup(Collection results) {
indexCalled = true;
}
public void endQuery() {
assertTrue(indexCalled);
}
});
String[] queries = {
"select * from " + r.getFullPath() + " where status = 'active'",
"select * from " + r.getFullPath() + " where ID > 4"
};
for(int i = 0 ; i < queries.length; i++) {
SelectResults sr = (SelectResults) qs.newQuery(queries[i]).execute();
assertEquals(5, sr.size());
}
QueryObserverHolder.setInstance(old);
}
@Test
public void testBasicMultiIndexCreationDifferentTypes() throws Exception {
Region r = CacheUtils.getRegion(regionName);
for (int i = 0; i < 10; i++) {
r.put("" + i, new Portfolio(i));
}
QueryService qs = CacheUtils.getQueryService();
qs.defineIndex("statusIndex", "status", r.getFullPath());
qs.defineHashIndex("IDIndex", "ID", r.getFullPath());
qs.defineKeyIndex("keyIDIndex", "ID", r.getFullPath());
List<Index> indexes = qs.createDefinedIndexes();
assertEquals("Only 3 indexes should have been created. ", 3, indexes.size());
Index ind = qs.getIndex(r, "statusIndex");
assertTrue(ind instanceof CompactRangeIndex);
assertEquals(2, ind.getStatistics().getNumberOfKeys());
assertEquals(10, ind.getStatistics().getNumberOfValues());
ind = qs.getIndex(r, "IDIndex");
assertTrue(ind instanceof HashIndex);
assertEquals(10, ind.getStatistics().getNumberOfValues());
ind = qs.getIndex(r, "keyIDIndex");
assertTrue(ind instanceof PrimaryKeyIndex);
assertEquals(10, ind.getStatistics().getNumberOfKeys());
assertEquals(10, ind.getStatistics().getNumberOfValues());
QueryObserver old = QueryObserverHolder
.setInstance(new QueryObserverAdapter() {
private boolean indexCalled = false;
public void afterIndexLookup(Collection results) {
indexCalled = true;
}
public void endQuery() {
assertTrue(indexCalled);
}
});
String[] queries = {
"select * from " + r.getFullPath() + " where status = 'active'",
"select * from " + r.getFullPath() + " where ID > 4"
};
for(int i = 0 ; i < queries.length; i++) {
SelectResults sr = (SelectResults) qs.newQuery(queries[i]).execute();
assertEquals(5, sr.size());
}
QueryObserverHolder.setInstance(old);
}
@Test
public void testMultiIndexCreationOnlyDefine() throws Exception {
Region r = CacheUtils.getRegion(regionName);
for (int i = 0; i < 10; i++) {
r.put("" + i, new Portfolio(i));
}
QueryService qs = CacheUtils.getQueryService();
qs.defineIndex("statusIndex", "status", r.getFullPath());
qs.defineIndex("IDIndex", "ID", r.getFullPath());
Index ind = qs.getIndex(r, "statusIndex");
assertNull("Index should not have been created",ind);
ind = qs.getIndex(r, "IDIndex");
assertNull("Index should not have been created",ind);
QueryObserver old = QueryObserverHolder
.setInstance(new QueryObserverAdapter() {
private boolean indexCalled = false;
public void afterIndexLookup(Collection results) {
indexCalled = true;
}
public void endQuery() {
assertFalse(indexCalled);
}
});
String[] queries = {
"select * from " + r.getFullPath() + " where status = 'active'",
"select * from " + r.getFullPath() + " where ID > 4"
};
for(int i = 0 ; i < queries.length; i++) {
SelectResults sr = (SelectResults) qs.newQuery(queries[i]).execute();
assertEquals(5, sr.size());
}
QueryObserverHolder.setInstance(old);
}
@Test
public void testMultiIndexCreationOnFailure() throws Exception {
Region r = CacheUtils.getRegion(regionName);
for (int i = 0; i < 10; i++) {
r.put("" + i, new Portfolio(i));
}
QueryService qs = CacheUtils.getQueryService();
qs.defineIndex("IDIndex1", "ID", r.getFullPath());
qs.defineIndex("IDIndex2", "ID", r.getFullPath());
List<Index> indexes = null;
try {
indexes = qs.createDefinedIndexes();
fail("Exception should have been thrown");
} catch(MultiIndexCreationException me) {
assertTrue("IndexExistsException should have been thrown ", me.getExceptionsMap().values().iterator().next() instanceof IndexExistsException);
}
assertNull("Index should not have been returned",indexes);
assertEquals("1 index should have been created.", 1, qs.getIndexes().size());
Index ind = qs.getIndexes().iterator().next();
assertNotNull("Index should not be null.", ind);
assertEquals(10, ind.getStatistics().getNumberOfKeys());
assertEquals(10, ind.getStatistics().getNumberOfValues());
}
@Test
public void testIndexCreationOnMultipleRegions() throws Exception {
Region pr = CacheUtils.getCache().createRegionFactory(RegionShortcut.PARTITION).create(prRegionName);
for (int i = 0; i < 10; i++) {
pr.put("" + i, new Portfolio(i));
}
Region overflow = CacheUtils.getCache().createRegionFactory(RegionShortcut.REPLICATE_OVERFLOW).create(overflowRegionName);
for (int i = 0; i < 10; i++) {
overflow.put("" + i, new Portfolio(i));
}
Region r = CacheUtils.getRegion(regionName);
for (int i = 0; i < 10; i++) {
r.put("" + i, new Portfolio(i));
}
QueryService qs = CacheUtils.getQueryService();
qs.defineIndex("IDIndex", "ID", pr.getFullPath());
qs.defineIndex("secIDIndex", "pos.secId", r.getFullPath() + " p, p.positions.values pos ");
qs.defineIndex("statusIndex", "status", overflow.getFullPath() );
List<Index> indexes = qs.createDefinedIndexes();
assertEquals("Only 3 indexes should have been created. ", 3, indexes.size());
Index ind = qs.getIndex(overflow, "statusIndex");
assertEquals(2, ind.getStatistics().getNumberOfKeys());
assertEquals(10, ind.getStatistics().getNumberOfValues());
ind = qs.getIndex(pr, "IDIndex");
assertEquals(10, ind.getStatistics().getNumberOfKeys());
assertEquals(10, ind.getStatistics().getNumberOfValues());
ind = qs.getIndex(r, "secIDIndex");
assertEquals(12, ind.getStatistics().getNumberOfKeys());
assertEquals(20, ind.getStatistics().getNumberOfValues());
QueryObserver old = QueryObserverHolder
.setInstance(new QueryObserverAdapter() {
private boolean indexCalled = false;
public void afterIndexLookup(Collection results) {
indexCalled = true;
}
public void endQuery() {
assertTrue(indexCalled);
}
});
String[] queries = {
"select * from " + overflow.getFullPath() + " where status = 'active'",
"select * from " + pr.getFullPath() + " where ID > 4",
"select * from " + r.getFullPath() + " p, p.positions.values pos where pos.secId != NULL"
};
for(int i = 0 ; i < queries.length; i++) {
SelectResults sr = (SelectResults) qs.newQuery(queries[i]).execute();
if(i == 2) {
assertEquals("Incorrect results for query: " + queries[i], 20, sr.size());
} else {
assertEquals("Incorrect results for query: " + queries[i], 5, sr.size());
}
}
QueryObserverHolder.setInstance(old);
}
@Test
public void testIndexCreationOnMultipleRegionsBeforePuts() throws Exception {
Region pr = CacheUtils.getCache().createRegionFactory(RegionShortcut.PARTITION).create(prRegionName);
Region overflow = CacheUtils.getCache().createRegionFactory(RegionShortcut.REPLICATE_OVERFLOW).create(overflowRegionName);
Region r = CacheUtils.getRegion(regionName);
QueryService qs = CacheUtils.getQueryService();
qs.defineIndex("IDIndex", "ID", pr.getFullPath());
qs.defineIndex("secIDIndex", "pos.secId", r.getFullPath() + " p, p.positions.values pos ");
qs.defineIndex("statusIndex", "status", overflow.getFullPath());
List<Index> indexes = qs.createDefinedIndexes();
for (int i = 0; i < 10; i++) {
r.put("" + i, new Portfolio(i));
}
for (int i = 0; i < 10; i++) {
pr.put("" + i, new Portfolio(i));
}
for (int i = 0; i < 10; i++) {
overflow.put("" + i, new Portfolio(i));
}
assertEquals("Only 3 indexes should have been created. ", 3, indexes.size());
Index ind = qs.getIndex(overflow, "statusIndex");
assertEquals(2, ind.getStatistics().getNumberOfKeys());
assertEquals(10, ind.getStatistics().getNumberOfValues());
ind = qs.getIndex(pr, "IDIndex");
assertEquals(10, ind.getStatistics().getNumberOfKeys());
assertEquals(10, ind.getStatistics().getNumberOfValues());
ind = qs.getIndex(r, "secIDIndex");
assertEquals(12, ind.getStatistics().getNumberOfKeys());
assertEquals(20, ind.getStatistics().getNumberOfValues());
QueryObserver old = QueryObserverHolder
.setInstance(new QueryObserverAdapter() {
private boolean indexCalled = false;
public void afterIndexLookup(Collection results) {
indexCalled = true;
}
public void endQuery() {
assertTrue(indexCalled);
}
});
String[] queries = {
"select * from " + overflow.getFullPath() + " where status = 'active'",
"select * from " + pr.getFullPath() + " where ID > 4",
"select * from " + r.getFullPath() + " p, p.positions.values pos where pos.secId != NULL"
};
for(int i = 0 ; i < queries.length; i++) {
SelectResults sr = (SelectResults) qs.newQuery(queries[i]).execute();
if(i == 2) {
assertEquals("Incorrect results for query: " + queries[i], 20, sr.size());
} else {
assertEquals("Incorrect results for query: " + queries[i], 5, sr.size());
}
}
QueryObserverHolder.setInstance(old);
}
}
| |
/* ###
* IP: GHIDRA
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ghidra.feature.vt.gui.plugin;
import java.net.URL;
import java.util.List;
import java.util.Set;
import javax.swing.*;
import docking.action.DockingActionIf;
import docking.help.Help;
import docking.help.HelpService;
import docking.tool.ToolConstants;
import docking.wizard.WizardManager;
import ghidra.GhidraOptions;
import ghidra.app.plugin.core.codebrowser.CodeBrowserPlugin;
import ghidra.app.plugin.core.colorizer.ColorizingService;
import ghidra.app.util.viewer.options.ListingDisplayOptionsEditor;
import ghidra.feature.vt.api.db.VTSessionDB;
import ghidra.feature.vt.api.main.VTSession;
import ghidra.feature.vt.gui.actions.*;
import ghidra.feature.vt.gui.provider.functionassociation.VTFunctionAssociationProvider;
import ghidra.feature.vt.gui.provider.impliedmatches.*;
import ghidra.feature.vt.gui.provider.markuptable.VTMarkupItemsTableProvider;
import ghidra.feature.vt.gui.provider.matchtable.VTMatchTableProvider;
import ghidra.feature.vt.gui.wizard.VTNewSessionWizardManager;
import ghidra.framework.model.*;
import ghidra.framework.options.Options;
import ghidra.framework.options.SaveState;
import ghidra.framework.plugintool.*;
import ghidra.framework.plugintool.util.PluginStatus;
import ghidra.framework.preferences.Preferences;
import ghidra.program.model.address.AddressSetView;
import ghidra.program.model.listing.Program;
import ghidra.program.util.ProgramLocation;
import ghidra.util.*;
import resources.MultiIcon;
import resources.ResourceManager;
import resources.icons.*;
//@formatter:off
@PluginInfo(
status = PluginStatus.HIDDEN,
packageName = VersionTrackingPluginPackage.NAME,
category = "Version Tracking",
shortDescription = "Version Tracking",
description = "This plugin provides the Version Tracking Feature.",
servicesProvided = { VTController.class }
)
//@formatter:on
public class VTPlugin extends Plugin {
public static final String WINDOW_GROUP = "VTResults";
public static final String HELP_TOPIC_NAME = "VersionTrackingPlugin";
private static final String SHOW_HELP_PREFERENCE = "VersionTrackingShowHelp";
public static String OWNER;
// menu stuffs
public static final String MATCH_POPUP_MENU_NAME = "Version Tracking Match";
public static final String MARKUP_POPUP_MENU_NAME = "Version Tracking Markup";
public static final String VT_MAIN_MENU_GROUP = "AAA_VT_Main";
public static final String ADDRESS_EDIT_MENU_GROUP = "A_VT_X_AddressEdit";
public static final String APPLY_EDIT_MENU_GROUP = "A_VT_Apply_Edit";
public static final String EDIT_MENU_GROUP = "A_VT_Edit_1";
public static final String TAG_MENU_GROUP = "A_VT_Edit_2";
public static final String UNEDIT_MENU_GROUP = "A_VT_UnEdit";
public static final String VT_SETTINGS_MENU_GROUP = "ZZ_VT_SETTINGS";
public static final Icon UNFILTERED_ICON =
ResourceManager.loadImage("images/lightbulb_off.png");
public static final Icon FILTERED_ICON = ResourceManager.loadImage("images/lightbulb.png");
public static final Icon REPLACED_ICON = ResourceManager.loadImage("images/sync_enabled.png");
public static final Icon UNIGNORED_ICON = new IconWrapper() {
@Override
protected Icon createIcon() {
MultiIcon icon = new MultiIcon(new EmptyIcon(16, 16));
ImageIcon cancelIcon = ResourceManager.loadImage("images/dialog-cancel.png");
ScaledImageIconWrapper scaledCancelIcon =
new ScaledImageIconWrapper(cancelIcon, 13, 13);
TranslateIcon translatedCancelIcon = new TranslateIcon(scaledCancelIcon, 3, 4);
ImageIcon undoIcon = ResourceManager.loadImage("images/undo.png");
TranslateIcon translatedUndoIcon = new TranslateIcon(undoIcon, 0, -4);
icon.addIcon(translatedUndoIcon);
icon.addIcon(translatedCancelIcon);
return icon;
}
};
private VTController controller;
// common resources
// destination-side resources
private VTMatchTableProvider matchesProvider;
private VTMarkupItemsTableProvider markupProvider;
private VTSubToolManager toolManager;
private VTImpliedMatchesTableProvider impliedMatchesTable;
private VTFunctionAssociationProvider functionAssociationProvider;
public VTPlugin(PluginTool tool) {
super(tool);
OWNER = getName();
controller = new VTControllerImpl(this);
matchesProvider = new VTMatchTableProvider(controller);
markupProvider = new VTMarkupItemsTableProvider(controller);
impliedMatchesTable = new VTImpliedMatchesTableProvider(controller);
functionAssociationProvider = new VTFunctionAssociationProvider(controller);
toolManager = new VTSubToolManager(this);
createActions();
registerServiceProvided(VTController.class, controller);
tool.setUnconfigurable();
DockingActionIf saveAs = getToolAction("Save Tool As");
tool.removeAction(saveAs);
DockingActionIf export = getToolAction("Export Tool");
tool.removeAction(export);
new MatchStatusUpdaterAssociationHook(controller);
new ImpliedMatchAssociationHook(controller);
initializeOptions();
}
private DockingActionIf getToolAction(String actionName) {
Set<DockingActionIf> actions = tool.getDockingActionsByOwnerName(ToolConstants.TOOL_OWNER);
for (DockingActionIf action : actions) {
if (action.getName().equals(actionName)) {
return action;
}
}
throw new IllegalArgumentException("Unable to find Tool action '" + actionName + "'");
}
private void initializeOptions() {
Options options = tool.getOptions(GhidraOptions.CATEGORY_BROWSER_DISPLAY);
options.registerOptionsEditor(new ListingDisplayOptionsEditor(options));
options.setOptionsHelpLocation(new HelpLocation(CodeBrowserPlugin.class.getSimpleName(),
GhidraOptions.CATEGORY_BROWSER_DISPLAY));
}
@Override
protected void init() {
maybeShowHelp();
}
private void maybeShowHelp() {
if (SystemUtilities.isInDevelopmentMode() || SystemUtilities.isInTestingMode()) {
return; // don't show help for dev mode
}
HelpService help = Help.getHelpService();
// if this is the first time Ghidra is being run, pop up
// the What's New help page
String preference = Preferences.getProperty(SHOW_HELP_PREFERENCE);
if (preference != null) {
return;
}
Preferences.setProperty(SHOW_HELP_PREFERENCE, "No");
Preferences.store();
URL url = ResourceManager.getResource("help/topics/VersionTrackingPlugin/VT_Workflow.html");
if (url == null) {
Msg.showError(this, null, "Help Not Found",
"Unable to find the Version Tracking workflow help");
return;
}
help.showHelp(url);
}
private void createActions() {
tool.addAction(new CreateVersionTrackingSessionAction(controller));
tool.addAction(new OpenVersionTrackingSessionAction(controller));
tool.addAction(new AddToVersionTrackingSessionAction(controller));
tool.addAction(new CloseVersionTrackingSessionAction(controller));
tool.addAction(new SaveVersionTrackingSessionAction(controller));
tool.addAction(new UndoAction(controller));
tool.addAction(new RedoAction(controller));
tool.addAction(new ResetToolAction(controller, toolManager));
tool.addAction(new HelpAction());
tool.addAction(new AutoVersionTrackingAction(controller));
}
@Override
protected void close() {
controller.closeCurrentSessionIgnoringChanges();
matchesProvider.setVisible(false);
markupProvider.setVisible(false);
impliedMatchesTable.setVisible(false);
functionAssociationProvider.setVisible(false);
super.close();
}
@Override
protected void dispose() {
controller.dispose();
super.dispose();
}
@Override
public Class<?>[] getSupportedDataTypes() {
return new Class[] { VTSession.class, Program.class };
}
@Override
public boolean acceptData(DomainFile[] data) {
if (data == null || data.length == 0) {
return false;
}
for (DomainFile domainFile : data) {
if (domainFile != null &&
VTSession.class.isAssignableFrom(domainFile.getDomainObjectClass())) {
openVersionTrackingSession(domainFile);
return true;
}
}
DomainFile programFile1 = null;
DomainFile programFile2 = null;
for (DomainFile domainFile : data) {
if (domainFile != null &&
Program.class.isAssignableFrom(domainFile.getDomainObjectClass())) {
if (programFile1 == null) {
programFile1 = domainFile;
}
else if (programFile2 == null) {
programFile2 = domainFile;
}
}
}
if (programFile1 != null) {
if (!controller.closeVersionTrackingSession()) {
return false; // user cancelled during save dialog
}
VTNewSessionWizardManager vtWizardManager =
new VTNewSessionWizardManager(controller, programFile1, programFile2);
WizardManager wizardManager =
new WizardManager("Version Tracking Wizard", true, vtWizardManager);
wizardManager.showWizard(tool.getToolFrame());
return true;
}
return false;
}
private void openVersionTrackingSession(DomainFile domainFile) {
controller.openVersionTrackingSession(domainFile);
}
@Override
public void readConfigState(SaveState saveState) {
controller.readConfigState(saveState);
matchesProvider.readConfigState(saveState);
markupProvider.readConfigState(saveState);
impliedMatchesTable.readConfigState(saveState);
functionAssociationProvider.readConfigState(saveState);
}
@Override
public void writeConfigState(SaveState saveState) {
controller.writeConfigState(saveState);
matchesProvider.writeConfigState(saveState);
markupProvider.writeConfigState(saveState);
impliedMatchesTable.writeConfigState(saveState);
functionAssociationProvider.writeConfigState(saveState);
}
@Override
public void readDataState(SaveState saveState) {
String pathname = saveState.getString("PATHNAME", null);
String location = saveState.getString("PROJECT_LOCATION", null);
String projectName = saveState.getString("PROJECT_NAME", null);
if (location == null || projectName == null) {
return;
}
ProjectLocator url = new ProjectLocator(location, projectName);
ProjectData projectData = tool.getProject().getProjectData(url);
if (projectData == null) {
Msg.showError(this, tool.getToolFrame(), "File Not Found", "Could not find " + url);
return;
}
DomainFile domainFile = projectData.getFile(pathname);
controller.openVersionTrackingSession(domainFile);
}
@Override
public void writeDataState(SaveState saveState) {
VTSessionDB session = (VTSessionDB) controller.getSession();
if (session == null) {
return;
}
DomainFile domainFile = session.getDomainFile();
String projectLocation = null;
String projectName = null;
String path = null;
ProjectLocator url = domainFile.getProjectLocator();
if (url != null) {
projectLocation = url.getLocation();
projectName = url.getName();
path = domainFile.getPathname();
}
saveState.putString("PROJECT_LOCATION", projectLocation);
saveState.putString("PROJECT_NAME", projectName);
saveState.putString("PATHNAME", path);
}
@Override
protected boolean saveData() {
return controller.checkForUnSavedChanges();
}
@Override
protected boolean canClose() {
PluginTool sourceTool = toolManager.getSourceTool();
PluginTool destinationTool = toolManager.getDestinationTool();
if (toolManager.isToolExecutingCommand(sourceTool)) {
showBusyToolMessage(sourceTool);
return false;
}
else if (toolManager.isToolExecutingCommand(destinationTool)) {
showBusyToolMessage(destinationTool);
return false;
}
return true;
}
public AddressSetView getSelectionInSourceTool() {
return toolManager.getSelectionInSourceTool();
}
public AddressSetView getSelectionInDestinationTool() {
return toolManager.getSelectionInDestinationTool();
}
public void setSelectionInDestinationTool(AddressSetView destinationSet) {
toolManager.setSelectionInDestinationTool(destinationSet);
}
public void setSelectionInSourceTool(AddressSetView sourceSet) {
toolManager.setSelectionInSourceTool(sourceSet);
}
public List<DomainFile> getChangedProgramsInSourceTool() {
return toolManager.getChangedProgramsInSourceTool();
}
public List<DomainFile> getChangedProgramsInDestinationTool() {
return toolManager.getChangedProgramsInDestinationTool();
}
public void gotoSourceLocation(ProgramLocation location) {
toolManager.gotoSourceLocation(location);
}
public void gotoDestinationLocation(ProgramLocation location) {
toolManager.gotoDestinationLocation(location);
}
public void updateUndoActions() {
tool.contextChanged(null);
}
public VTController getController() {
return controller;
}
public VTSubToolManager getToolManager() {
return toolManager;
}
public VTMatchTableProvider getMatchesProvider() {
return matchesProvider;
}
public ColorizingService getSourceColorizingService() {
return toolManager.getSourceColorizingService();
}
public ColorizingService getDestinationColorizingService() {
return toolManager.getDestinationColorizingService();
}
/**
* Displays a dialog stating that a tool is busy.
* @param tool the tool to display that's busy.
*/
static void showBusyToolMessage(PluginTool tool) {
JFrame toolFrame = tool.getToolFrame();
tool.beep();
Msg.showInfo(VTPlugin.class, toolFrame, "Tool \"" + tool.getName() + "\" Busy",
"You must stop all background tasks before exiting.");
}
}
| |
/*
* Copyright (c) 2012, Cloudera, Inc. All Rights Reserved.
*
* Cloudera, Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"). You may not use this file except in
* compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* This software is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for
* the specific language governing permissions and limitations under the
* License.
*/
package com.cloudera.recordbreaker.analyzer;
import java.sql.SQLException;
import java.sql.Connection;
import java.sql.ResultSet;
import java.sql.ResultSetMetaData;
import java.sql.Statement;
import java.sql.DriverManager;
import org.apache.hadoop.fs.permission.FsPermission;
import java.io.File;
import java.io.IOException;
import java.io.Serializable;
import java.util.Map;
import java.util.Set;
import java.util.List;
import java.util.Random;
import java.util.HashMap;
import java.util.HashSet;
import java.util.ArrayList;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.avro.Schema;
/*******************************************************
* DataQuery handles Hive data-importation and query processing.
* Assumes Hive is running locally at port 10000.
*
* @author "Michael Cafarella" <mjc@lofie.local>
* @version 1.0
* @since 1.0
********************************************************/
public class DataQuery implements Serializable {
private static final Log LOG = LogFactory.getLog(DataQuery.class);
private static boolean inited = false;
private static DataQuery dataQuery;
private static String hiveDriverName = "org.apache.hive.jdbc.HiveDriver";
private static String impalaDriverName = "org.apache.hive.jdbc.HiveDriver";
private static String tmpTablesDir = "/tmp/tmptables";
String hiveConnectString;
String impalaConnectString;
Configuration conf;
Connection hiveCon;
Connection impalaCon;
HiveTableCache tableCache;
Random r = new Random();
Map<Path, String> tables;
Set<Path> isLoaded;
public synchronized static DataQuery getInstance() {
return DataQuery.getInstance(false);
}
public synchronized static DataQuery getInstance(boolean force) {
if (force && dataQuery != null) {
try {
dataQuery.close();
} catch (SQLException sqe) {
}
dataQuery = null;
}
if (force || (!inited)) {
try {
dataQuery = new DataQuery();
} catch (SQLException se) {
se.printStackTrace();
} finally {
inited = true;
}
}
return dataQuery;
}
public DataQuery() throws SQLException {
try {
this.conf = new Configuration();
Class.forName(hiveDriverName);
Class.forName(impalaDriverName);
this.hiveConnectString = conf.get("hive.connectstring", "jdbc:hive2://localhost:10000/default");
this.impalaConnectString = conf.get("impala.connectstring", "jdbc:hive2://localhost:21050/;auth=noSasl");
LOG.info("Hive connect string: " + hiveConnectString);
LOG.info("Impala connect string: " + impalaConnectString);
this.tableCache = new HiveTableCache();
try {
this.hiveCon = DriverManager.getConnection(hiveConnectString, "cloudera", "cloudera");
} catch (Exception ex) {
ex.printStackTrace();
}
this.impalaCon = DriverManager.getConnection(impalaConnectString, "cloudera", "cloudera");
} catch (ClassNotFoundException e) {
e.printStackTrace();
} catch (Exception ex) {
ex.printStackTrace();
}
// Force impala to refresh metadata
if (impalaCon != null) {
Statement stmt = impalaCon.createStatement();
try {
try {
LOG.info("Rebuilding Impala metadata...");
stmt.execute("INVALIDATE METADATA");
} catch (Exception iex) {
LOG.info("Impala metadata rebuild failed: " + iex.toString());
}
} finally {
stmt.close();
}
}
// Misc data structures
this.tables = new HashMap<Path, String>();
this.isLoaded = new HashSet<Path>();
}
public void close() throws SQLException {
if (hiveCon != null) {
this.hiveCon.close();
}
this.hiveCon = null;
if (impalaCon != null) {
this.impalaCon.close();
}
this.impalaCon = null;
}
/**
* Connection string for Hive
*/
public String getHiveConnectionString() {
return hiveConnectString;
}
/**
* Run a sample set of Hive test queries to check whether the Hive server is up and active
*/
public boolean testQueryServer() {
if (hiveCon == null) {
return false;
}
try {
//
// Create table
//
String tablename = "test_datatable" + Math.abs(r.nextInt());
Statement stmt = hiveCon.createStatement();
try {
stmt.execute("CREATE TABLE " + tablename + "(a int, b int, c int)");
} finally {
stmt.close();
}
//
// Drop table
//
stmt = hiveCon.createStatement();
try {
stmt.execute("DROP TABLE " + tablename);
} finally {
stmt.close();
}
return true;
} catch (Exception ex) {
ex.printStackTrace();
return false;
}
}
String grabTable(DataDescriptor desc) throws SQLException, IOException {
// Set up Hive table
Path p = desc.getFilename();
String tablename = tableCache.get(p);
if (tablename == null) {
tablename = "datatable" + Math.abs(r.nextInt());
Statement stmt = hiveCon.createStatement();
try {
String creatTxt = desc.getHiveCreateTableStatement(tablename);
LOG.info("Create: " + creatTxt);
stmt.execute(creatTxt);
tables.put(p, tablename);
} finally {
stmt.close();
}
// Copy avro version of data into secret location prior to Hive import
FileSystem fs = FileSystem.get(conf);
Path tmpTables = new Path(tmpTablesDir);
if (! fs.exists(tmpTables)) {
fs.mkdirs(tmpTables, new FsPermission("-rwxrwxrwx"));
}
Path secretDst = new Path(tmpTables, "r" + r.nextInt());
LOG.info("Preparing Avro data at " + secretDst);
desc.prepareAvroFile(fs, fs, secretDst, conf);
fs.setPermission(secretDst, new FsPermission("-rwxrwxrwx"));
// Import data
stmt = hiveCon.createStatement();
try {
LOG.info("Import data into Hive: " + desc.getHiveImportDataStatement(tablename, secretDst));
stmt.execute(desc.getHiveImportDataStatement(tablename, secretDst));
isLoaded.add(p);
} finally {
stmt.close();
}
// Refresh impala metadata
stmt = impalaCon.createStatement();
try {
try {
LOG.info("Rebuilding Impala metadata...");
stmt.execute("INVALIDATE METADATA");
} catch (Exception iex) {
LOG.info("Impala metadata rebuild failed: " + iex.toString());
}
} finally {
stmt.close();
}
// Insert into table cache
tableCache.put(p, tablename);
}
return tablename;
}
public List<List<Object>> query(DataDescriptor desc1, DataDescriptor desc2, String projectionClause, String selectionClause) throws SQLException, IOException {
String tablename1 = grabTable(desc1);
String tablename2 = null;
if (desc2 != null) {
tablename2 = grabTable(desc2);
}
//
// Build the SQL query against the table
//
if (projectionClause == null || projectionClause.trim().length() == 0) {
projectionClause = "*";
}
if (selectionClause == null) {
selectionClause = "";
}
if (tablename2 == null) {
projectionClause = projectionClause.replaceAll("DATA", tablename1);
selectionClause = selectionClause.replaceAll("DATA", tablename1);
}
projectionClause = projectionClause.trim();
selectionClause = selectionClause.trim();
String query;
if (tablename2 == null) {
query = "SELECT " + projectionClause + " FROM " + tablename1;
} else {
query = "SELECT " + projectionClause + " FROM " + tablename1 + " DATA1" + ", " + tablename2 + " DATA2";
}
if (selectionClause.length() > 0) {
query = query + " WHERE " + selectionClause;
}
//
// Try to run it first with the impala connection.
// If that fails, try hive.
//
List<List<Object>> result = new ArrayList<List<Object>>();
Statement stmt = impalaCon.createStatement();
LOG.info("Processing: " + query);
try {
ResultSet res = null;
try {
res = stmt.executeQuery(query);
LOG.info("Ran Impala query: " + query);
} catch (Exception iex) {
iex.printStackTrace();
// Fail back to Hive!
stmt.close();
stmt = hiveCon.createStatement();
res = stmt.executeQuery(query);
LOG.info("Ran Hive query: " + query);
}
// OK now do the real work
ResultSetMetaData rsmd = res.getMetaData();
List<Object> metatuple = new ArrayList<Object>();
for (int i = 1; i <= rsmd.getColumnCount(); i++) {
metatuple.add(rsmd.getColumnLabel(i));
}
result.add(metatuple);
while (res.next()) {
List<Object> tuple = new ArrayList<Object>();
for (int i = 1; i <= rsmd.getColumnCount(); i++) {
tuple.add(res.getObject(i));
}
result.add(tuple);
}
return result;
} finally {
stmt.close();
}
}
}
| |
package com.xhh.demo.http.algorithms.util;
/*************************************************************************
* Compilation: javac Picture.java
* Execution: java Picture imagename
*
* Data type for manipulating individual pixels of an image. The original
* image can be read from a file in jpg, gif, or png format, or the
* user can create a blank image of a given size. Includes methods for
* displaying the image in a window on the screen or saving to a file.
*
* % java Picture mandrill.jpg
*
* Remarks
* -------
* - pixel (x, y) is column x and row y, where (0, 0) is upper left
*
* - see also GrayPicture.java for a grayscale version
*
*************************************************************************/
import java.awt.Color;
import java.awt.FileDialog;
import java.awt.Toolkit;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.awt.event.KeyEvent;
import java.awt.image.BufferedImage;
import java.io.File;
import java.io.IOException;
import java.net.URL;
import javax.imageio.ImageIO;
import javax.swing.ImageIcon;
import javax.swing.JFrame;
import javax.swing.JLabel;
import javax.swing.JMenu;
import javax.swing.JMenuBar;
import javax.swing.JMenuItem;
import javax.swing.KeyStroke;
/**
* This class provides methods for manipulating individual pixels of
* an image. The original image can be read from a <tt>.jpg</tt>, <tt>.gif</tt>,
* or <tt>.png</tt> file or the user can create a blank image of a given size.
* This class includes methods for displaying the image in a window on
* the screen or saving it to a file.
* <p/>
* Pixel (<em>x</em>, <em>y</em>) is column <em>x</em> and row <em>y</em>.
* By default, the origin (0, 0) is upper left, which is a common convention
* in image processing.
* The method <tt>setOriginLowerLeft()</tt> change the origin to the lower left.
* <p/>
* For additional documentation, see
* <a href="http://introcs.cs.princeton.edu/31datatype">Section 3.1</a> of
* <i>Introduction to Programming in Java: An Interdisciplinary Approach</i>
* by Robert Sedgewick and Kevin Wayne.
*
* @author Robert Sedgewick
* @author Kevin Wayne
*/
public final class Picture implements ActionListener {
private BufferedImage image; // the rasterized image
private JFrame frame; // on-screen view
private String filename; // name of file
private boolean isOriginUpperLeft = true; // location of origin
private final int width, height; // width and height
/**
* Initializes a blank <tt>w</tt>-by-<tt>h</tt> picture, where each pixel is black.
*/
public Picture(int w, int h) {
if (w < 0) throw new IllegalArgumentException("width must be nonnegative");
if (h < 0) throw new IllegalArgumentException("height must be nonnegative");
width = w;
height = h;
image = new BufferedImage(w, h, BufferedImage.TYPE_INT_RGB);
// set to TYPE_INT_ARGB to support transparency
filename = w + "-by-" + h;
}
/**
* Initializes a new picture that is a deep copy of <tt>pic</tt>.
*/
public Picture(Picture pic) {
width = pic.width();
height = pic.height();
image = new BufferedImage(width, height, BufferedImage.TYPE_INT_RGB);
filename = pic.filename;
for (int x = 0; x < width(); x++)
for (int y = 0; y < height(); y++)
image.setRGB(x, y, pic.get(x, y).getRGB());
}
/**
* Initializes a picture by reading in a .png, .gif, or .jpg from
* the given filename or URL name.
*/
public Picture(String filename) {
this.filename = filename;
try {
// try to read from file in working directory
File file = new File(filename);
if (file.isFile()) {
image = ImageIO.read(file);
}
// now try to read from file in same directory as this .class file
else {
URL url = getClass().getResource(filename);
if (url == null) {
url = new URL(filename);
}
image = ImageIO.read(url);
}
width = image.getWidth(null);
height = image.getHeight(null);
} catch (IOException e) {
// e.printStackTrace();
throw new RuntimeException("Could not open file: " + filename);
}
}
/**
* Initializes a picture by reading in a .png, .gif, or .jpg from a File.
*/
public Picture(File file) {
try {
image = ImageIO.read(file);
} catch (IOException e) {
e.printStackTrace();
throw new RuntimeException("Could not open file: " + file);
}
if (image == null) {
throw new RuntimeException("Invalid image file: " + file);
}
width = image.getWidth(null);
height = image.getHeight(null);
filename = file.getName();
}
/**
* Returns a JLabel containing this picture, for embedding in a JPanel,
* JFrame or other GUI widget.
*/
public JLabel getJLabel() {
if (image == null) {
return null;
} // no image available
ImageIcon icon = new ImageIcon(image);
return new JLabel(icon);
}
/**
* Sets the origin to be the upper left pixel.
*/
public void setOriginUpperLeft() {
isOriginUpperLeft = true;
}
/**
* Sets the origin to be the lower left pixel.
*/
public void setOriginLowerLeft() {
isOriginUpperLeft = false;
}
/**
* Displays the picture in a window on the screen.
*/
public void show() {
// create the GUI for viewing the image if needed
if (frame == null) {
frame = new JFrame();
JMenuBar menuBar = new JMenuBar();
JMenu menu = new JMenu("File");
menuBar.add(menu);
JMenuItem menuItem1 = new JMenuItem(" Save... ");
menuItem1.addActionListener(this);
menuItem1.setAccelerator(KeyStroke.getKeyStroke(KeyEvent.VK_S,
Toolkit.getDefaultToolkit().getMenuShortcutKeyMask()));
menu.add(menuItem1);
frame.setJMenuBar(menuBar);
frame.setContentPane(getJLabel());
// f.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
frame.setDefaultCloseOperation(JFrame.DISPOSE_ON_CLOSE);
frame.setTitle(filename);
frame.setResizable(false);
frame.pack();
frame.setVisible(true);
}
// draw
frame.repaint();
}
/**
* Returns the height of the picture (in pixels).
*/
public int height() {
return height;
}
/**
* Returns the width of the picture (in pixels).
*/
public int width() {
return width;
}
/**
* Returns the color of pixel (<em>x</em>, <em>y</em>).
*/
public Color get(int x, int y) {
if (x < 0 || x >= width()) throw new IndexOutOfBoundsException("x must be between 0 and " + (width() - 1));
if (y < 0 || y >= height()) throw new IndexOutOfBoundsException("y must be between 0 and " + (height() - 1));
if (isOriginUpperLeft) return new Color(image.getRGB(x, y));
else return new Color(image.getRGB(x, height - y - 1));
}
/**
* Sets the color of pixel (<em>x</em>, <em>y</em>) to given color.
*/
public void set(int x, int y, Color color) {
if (x < 0 || x >= width()) throw new IndexOutOfBoundsException("x must be between 0 and " + (width() - 1));
if (y < 0 || y >= height()) throw new IndexOutOfBoundsException("y must be between 0 and " + (height() - 1));
if (color == null) throw new NullPointerException("can't set Color to null");
if (isOriginUpperLeft) image.setRGB(x, y, color.getRGB());
else image.setRGB(x, height - y - 1, color.getRGB());
}
/**
* Is this Picture equal to obj?
*/
public boolean equals(Object obj) {
if (obj == this) return true;
if (obj == null) return false;
if (obj.getClass() != this.getClass()) return false;
Picture that = (Picture) obj;
if (this.width() != that.width()) return false;
if (this.height() != that.height()) return false;
for (int x = 0; x < width(); x++)
for (int y = 0; y < height(); y++)
if (!this.get(x, y).equals(that.get(x, y))) return false;
return true;
}
/**
* Saves the picture to a file in a standard image format.
* The filetype must be .png or .jpg.
*/
public void save(String name) {
save(new File(name));
}
/**
* Saves the picture to a file in a standard image format.
*/
public void save(File file) {
this.filename = file.getName();
if (frame != null) {
frame.setTitle(filename);
}
String suffix = filename.substring(filename.lastIndexOf('.') + 1);
suffix = suffix.toLowerCase();
if (suffix.equals("jpg") || suffix.equals("png")) {
try {
ImageIO.write(image, suffix, file);
} catch (IOException e) {
e.printStackTrace();
}
} else {
System.out.println("Error: filename must end in .jpg or .png");
}
}
/**
* Opens a save dialog box when the user selects "Save As" from the menu.
*/
public void actionPerformed(ActionEvent e) {
FileDialog chooser = new FileDialog(frame,
"Use a .png or .jpg extension", FileDialog.SAVE);
chooser.setVisible(true);
if (chooser.getFile() != null) {
save(chooser.getDirectory() + File.separator + chooser.getFile());
}
}
/**
* Tests this <tt>Picture</tt> data type. Reads a picture specified by the command-line argument,
* and shows it in a window on the screen.
*/
public static void main(String[] args) {
Picture pic = new Picture(args[0]);
System.out.printf("%d-by-%d\n", pic.width(), pic.height());
pic.show();
}
}
| |
/*
* Copyright (c) 2014 Spotify AB.
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.spotify.helios.common.descriptors;
import com.google.common.base.Joiner;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import com.fasterxml.jackson.annotation.JsonProperty;
import org.jetbrains.annotations.Nullable;
import java.util.Collections;
import java.util.Map;
import static com.google.common.base.Optional.fromNullable;
import static com.google.common.base.Preconditions.checkNotNull;
/**
* Gives the Helios host status for the agent, which includes all jobs, their statuses, as well
* as host and agent information.
* <pre>
* {
* "agentInfo" : { #... see the description of AgentInfo },
* "environment" : {
* "SYSLOG_HOST_PORT" : "10.99.0.1:514",
* },
* "hostInfo" : { #... see the description of HostInfo },
* "jobs" : {
* "myservice:0.5:3539b7bc2235d53f79e6e8511942bbeaa8816265" : {
* "goal" : "START",
* "jobId" : "myservice:0.5:3539b7bc2235d53f79e6e8511942bbeaa8816265",
* }
* },
* "labels" : {
* "role" : "foo",
* "xyz" : "123"
* },
* "status" : "UP",
* "statuses" : {
* "elva:0.0.4:9f64cf43353c55c36276b7df76b066584f9c49aa" : {
* "containerId" : "5a31d4fd48b5b4349980175e2f865494146704e684d89b6a95a9a766cc2f43a3",
* "env" : {
* "SYSLOG_HOST_PORT" : "10.99.0.1:514",
* },
* "goal" : "START",
* "job" : { #... See definition of Job },
* "state" : "RUNNING",
* "throttled" : "NO"
* }
* }
* }
* </pre>
*/
@JsonIgnoreProperties(ignoreUnknown = true)
public class HostStatus extends Descriptor {
public enum Status {
UP,
DOWN
}
private final Status status;
private final HostInfo hostInfo;
private final AgentInfo agentInfo;
private final Map<JobId, Deployment> jobs;
private final Map<JobId, TaskStatus> statuses;
private final Map<String, String> environment;
private final Map<String, String> labels;
/**
* Constructor.
* @param jobs Map of jobs and their deployments for this host.
* @param statuses the statuses of jobs on this host.
* @param status The up/down status of this host.
* @param hostInfo The host information.
* @param agentInfo The agent information.
* @param environment The environment provided to the agent on it's command line.
* @param labels The labels assigned to the agent.
*/
public HostStatus(@JsonProperty("jobs") final Map<JobId, Deployment> jobs,
@JsonProperty("statuses") final Map<JobId, TaskStatus> statuses,
@JsonProperty("status") final Status status,
@JsonProperty("hostInfo") final HostInfo hostInfo,
@JsonProperty("agentInfo") final AgentInfo agentInfo,
@JsonProperty("environment") final Map<String, String> environment,
@JsonProperty("labels") final Map<String, String> labels) {
this.status = checkNotNull(status, "status");
this.jobs = checkNotNull(jobs, "jobs");
this.statuses = checkNotNull(statuses, "statuses");
// Host, runtime info and environment might not be available
this.hostInfo = hostInfo;
this.agentInfo = agentInfo;
this.environment = fromNullable(environment).or(Collections.<String, String>emptyMap());
this.labels = fromNullable(labels).or(Collections.<String, String>emptyMap());
}
public Map<String, String> getEnvironment() {
return environment;
}
public Map<String, String> getLabels() {
return labels;
}
public Status getStatus() {
return status;
}
@Nullable
public HostInfo getHostInfo() {
return hostInfo;
}
@Nullable
public AgentInfo getAgentInfo() {
return agentInfo;
}
public Map<JobId, Deployment> getJobs() {
return jobs;
}
public Map<JobId, TaskStatus> getStatuses() {
return statuses;
}
public static Builder newBuilder() {
return new Builder();
}
public static class Builder {
private Map<JobId, Deployment> jobs;
private Map<JobId, TaskStatus> statuses;
private Status status;
private HostInfo hostInfo;
private AgentInfo agentInfo;
private Map<String, String> environment;
private Map<String, String> labels;
public Builder setJobs(final Map<JobId, Deployment> jobs) {
this.jobs = jobs;
return this;
}
public Builder setStatuses(final Map<JobId, TaskStatus> statuses) {
this.statuses = statuses;
return this;
}
public Builder setStatus(final Status status) {
this.status = status;
return this;
}
public Builder setHostInfo(final HostInfo hostInfo) {
this.hostInfo = hostInfo;
return this;
}
public Builder setAgentInfo(final AgentInfo agentInfo) {
this.agentInfo = agentInfo;
return this;
}
public Builder setEnvironment(final Map<String, String> environment) {
this.environment = environment;
return this;
}
public Builder setLabels(final Map<String, String> labels) {
this.labels = labels;
return this;
}
public HostStatus build() {
return new HostStatus(jobs, statuses, status, hostInfo, agentInfo, environment, labels);
}
}
@Override
public boolean equals(final Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
final HostStatus that = (HostStatus) o;
if (hostInfo != null ? !hostInfo.equals(that.hostInfo) : that.hostInfo != null) {
return false;
}
if (jobs != null ? !jobs.equals(that.jobs) : that.jobs != null) {
return false;
}
if (agentInfo != null ? !agentInfo.equals(that.agentInfo) : that.agentInfo != null) {
return false;
}
if (status != that.status) {
return false;
}
if (statuses != null ? !statuses.equals(that.statuses) : that.statuses != null) {
return false;
}
if (environment != null ? !environment.equals(that.environment) : that.environment != null) {
return false;
}
if (labels != null ? !labels.equals(that.labels) : that.labels != null) {
return false;
}
return true;
}
@Override
public int hashCode() {
int result = status != null ? status.hashCode() : 0;
result = 31 * result + (hostInfo != null ? hostInfo.hashCode() : 0);
result = 31 * result + (agentInfo != null ? agentInfo.hashCode() : 0);
result = 31 * result + (jobs != null ? jobs.hashCode() : 0);
result = 31 * result + (statuses != null ? statuses.hashCode() : 0);
result = 31 * result + (environment != null ? environment.hashCode() : 0);
result = 31 * result + (labels != null ? labels.hashCode() : 0);
return result;
}
@Override
public String toString() {
return "HostStatus{" +
"status=" + status +
", hostInfo=" + hostInfo +
", agentInfo=" + agentInfo +
", jobs=" + jobs +
", statuses=" + statuses +
", environment=" + stringMapToString(environment) +
", labels=" + stringMapToString(labels) +
'}';
}
private static String stringMapToString(final Map<String, String> map) {
return "{" + Joiner.on(", ").withKeyValueSeparator("=").join(map) + "}";
}
}
| |
/*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.mediastore;
import javax.annotation.Generated;
import com.amazonaws.services.mediastore.model.*;
/**
* Interface for accessing MediaStore asynchronously. Each asynchronous method will return a Java Future object
* representing the asynchronous operation; overloads which accept an {@code AsyncHandler} can be used to receive
* notification when an asynchronous operation completes.
* <p>
* <b>Note:</b> Do not directly implement this interface, new methods are added to it regularly. Extend from
* {@link com.amazonaws.services.mediastore.AbstractAWSMediaStoreAsync} instead.
* </p>
* <p>
* <p>
* An AWS Elemental MediaStore container is a namespace that holds folders and objects. You use a container endpoint to
* create, read, and delete objects.
* </p>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public interface AWSMediaStoreAsync extends AWSMediaStore {
/**
* <p>
* Creates a storage container to hold objects. A container is similar to a bucket in the Amazon S3 service.
* </p>
*
* @param createContainerRequest
* @return A Java Future containing the result of the CreateContainer operation returned by the service.
* @sample AWSMediaStoreAsync.CreateContainer
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/mediastore-2017-09-01/CreateContainer" target="_top">AWS API
* Documentation</a>
*/
java.util.concurrent.Future<CreateContainerResult> createContainerAsync(CreateContainerRequest createContainerRequest);
/**
* <p>
* Creates a storage container to hold objects. A container is similar to a bucket in the Amazon S3 service.
* </p>
*
* @param createContainerRequest
* @param asyncHandler
* Asynchronous callback handler for events in the lifecycle of the request. Users can provide an
* implementation of the callback methods in this interface to receive notification of successful or
* unsuccessful completion of the operation.
* @return A Java Future containing the result of the CreateContainer operation returned by the service.
* @sample AWSMediaStoreAsyncHandler.CreateContainer
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/mediastore-2017-09-01/CreateContainer" target="_top">AWS API
* Documentation</a>
*/
java.util.concurrent.Future<CreateContainerResult> createContainerAsync(CreateContainerRequest createContainerRequest,
com.amazonaws.handlers.AsyncHandler<CreateContainerRequest, CreateContainerResult> asyncHandler);
/**
* <p>
* Deletes the specified container. Before you make a <code>DeleteContainer</code> request, delete any objects in
* the container or in any folders in the container. You can delete only empty containers.
* </p>
*
* @param deleteContainerRequest
* @return A Java Future containing the result of the DeleteContainer operation returned by the service.
* @sample AWSMediaStoreAsync.DeleteContainer
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/mediastore-2017-09-01/DeleteContainer" target="_top">AWS API
* Documentation</a>
*/
java.util.concurrent.Future<DeleteContainerResult> deleteContainerAsync(DeleteContainerRequest deleteContainerRequest);
/**
* <p>
* Deletes the specified container. Before you make a <code>DeleteContainer</code> request, delete any objects in
* the container or in any folders in the container. You can delete only empty containers.
* </p>
*
* @param deleteContainerRequest
* @param asyncHandler
* Asynchronous callback handler for events in the lifecycle of the request. Users can provide an
* implementation of the callback methods in this interface to receive notification of successful or
* unsuccessful completion of the operation.
* @return A Java Future containing the result of the DeleteContainer operation returned by the service.
* @sample AWSMediaStoreAsyncHandler.DeleteContainer
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/mediastore-2017-09-01/DeleteContainer" target="_top">AWS API
* Documentation</a>
*/
java.util.concurrent.Future<DeleteContainerResult> deleteContainerAsync(DeleteContainerRequest deleteContainerRequest,
com.amazonaws.handlers.AsyncHandler<DeleteContainerRequest, DeleteContainerResult> asyncHandler);
/**
* <p>
* Deletes the access policy that is associated with the specified container.
* </p>
*
* @param deleteContainerPolicyRequest
* @return A Java Future containing the result of the DeleteContainerPolicy operation returned by the service.
* @sample AWSMediaStoreAsync.DeleteContainerPolicy
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/mediastore-2017-09-01/DeleteContainerPolicy"
* target="_top">AWS API Documentation</a>
*/
java.util.concurrent.Future<DeleteContainerPolicyResult> deleteContainerPolicyAsync(DeleteContainerPolicyRequest deleteContainerPolicyRequest);
/**
* <p>
* Deletes the access policy that is associated with the specified container.
* </p>
*
* @param deleteContainerPolicyRequest
* @param asyncHandler
* Asynchronous callback handler for events in the lifecycle of the request. Users can provide an
* implementation of the callback methods in this interface to receive notification of successful or
* unsuccessful completion of the operation.
* @return A Java Future containing the result of the DeleteContainerPolicy operation returned by the service.
* @sample AWSMediaStoreAsyncHandler.DeleteContainerPolicy
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/mediastore-2017-09-01/DeleteContainerPolicy"
* target="_top">AWS API Documentation</a>
*/
java.util.concurrent.Future<DeleteContainerPolicyResult> deleteContainerPolicyAsync(DeleteContainerPolicyRequest deleteContainerPolicyRequest,
com.amazonaws.handlers.AsyncHandler<DeleteContainerPolicyRequest, DeleteContainerPolicyResult> asyncHandler);
/**
* <p>
* Deletes the cross-origin resource sharing (CORS) configuration information that is set for the container.
* </p>
* <p>
* To use this operation, you must have permission to perform the <code>MediaStore:DeleteCorsPolicy</code> action.
* The container owner has this permission by default and can grant this permission to others.
* </p>
*
* @param deleteCorsPolicyRequest
* @return A Java Future containing the result of the DeleteCorsPolicy operation returned by the service.
* @sample AWSMediaStoreAsync.DeleteCorsPolicy
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/mediastore-2017-09-01/DeleteCorsPolicy" target="_top">AWS
* API Documentation</a>
*/
java.util.concurrent.Future<DeleteCorsPolicyResult> deleteCorsPolicyAsync(DeleteCorsPolicyRequest deleteCorsPolicyRequest);
/**
* <p>
* Deletes the cross-origin resource sharing (CORS) configuration information that is set for the container.
* </p>
* <p>
* To use this operation, you must have permission to perform the <code>MediaStore:DeleteCorsPolicy</code> action.
* The container owner has this permission by default and can grant this permission to others.
* </p>
*
* @param deleteCorsPolicyRequest
* @param asyncHandler
* Asynchronous callback handler for events in the lifecycle of the request. Users can provide an
* implementation of the callback methods in this interface to receive notification of successful or
* unsuccessful completion of the operation.
* @return A Java Future containing the result of the DeleteCorsPolicy operation returned by the service.
* @sample AWSMediaStoreAsyncHandler.DeleteCorsPolicy
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/mediastore-2017-09-01/DeleteCorsPolicy" target="_top">AWS
* API Documentation</a>
*/
java.util.concurrent.Future<DeleteCorsPolicyResult> deleteCorsPolicyAsync(DeleteCorsPolicyRequest deleteCorsPolicyRequest,
com.amazonaws.handlers.AsyncHandler<DeleteCorsPolicyRequest, DeleteCorsPolicyResult> asyncHandler);
/**
* <p>
* Removes an object lifecycle policy from a container. It takes up to 20 minutes for the change to take effect.
* </p>
*
* @param deleteLifecyclePolicyRequest
* @return A Java Future containing the result of the DeleteLifecyclePolicy operation returned by the service.
* @sample AWSMediaStoreAsync.DeleteLifecyclePolicy
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/mediastore-2017-09-01/DeleteLifecyclePolicy"
* target="_top">AWS API Documentation</a>
*/
java.util.concurrent.Future<DeleteLifecyclePolicyResult> deleteLifecyclePolicyAsync(DeleteLifecyclePolicyRequest deleteLifecyclePolicyRequest);
/**
* <p>
* Removes an object lifecycle policy from a container. It takes up to 20 minutes for the change to take effect.
* </p>
*
* @param deleteLifecyclePolicyRequest
* @param asyncHandler
* Asynchronous callback handler for events in the lifecycle of the request. Users can provide an
* implementation of the callback methods in this interface to receive notification of successful or
* unsuccessful completion of the operation.
* @return A Java Future containing the result of the DeleteLifecyclePolicy operation returned by the service.
* @sample AWSMediaStoreAsyncHandler.DeleteLifecyclePolicy
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/mediastore-2017-09-01/DeleteLifecyclePolicy"
* target="_top">AWS API Documentation</a>
*/
java.util.concurrent.Future<DeleteLifecyclePolicyResult> deleteLifecyclePolicyAsync(DeleteLifecyclePolicyRequest deleteLifecyclePolicyRequest,
com.amazonaws.handlers.AsyncHandler<DeleteLifecyclePolicyRequest, DeleteLifecyclePolicyResult> asyncHandler);
/**
* <p>
* Deletes the metric policy that is associated with the specified container. If there is no metric policy
* associated with the container, MediaStore doesn't send metrics to CloudWatch.
* </p>
*
* @param deleteMetricPolicyRequest
* @return A Java Future containing the result of the DeleteMetricPolicy operation returned by the service.
* @sample AWSMediaStoreAsync.DeleteMetricPolicy
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/mediastore-2017-09-01/DeleteMetricPolicy" target="_top">AWS
* API Documentation</a>
*/
java.util.concurrent.Future<DeleteMetricPolicyResult> deleteMetricPolicyAsync(DeleteMetricPolicyRequest deleteMetricPolicyRequest);
/**
* <p>
* Deletes the metric policy that is associated with the specified container. If there is no metric policy
* associated with the container, MediaStore doesn't send metrics to CloudWatch.
* </p>
*
* @param deleteMetricPolicyRequest
* @param asyncHandler
* Asynchronous callback handler for events in the lifecycle of the request. Users can provide an
* implementation of the callback methods in this interface to receive notification of successful or
* unsuccessful completion of the operation.
* @return A Java Future containing the result of the DeleteMetricPolicy operation returned by the service.
* @sample AWSMediaStoreAsyncHandler.DeleteMetricPolicy
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/mediastore-2017-09-01/DeleteMetricPolicy" target="_top">AWS
* API Documentation</a>
*/
java.util.concurrent.Future<DeleteMetricPolicyResult> deleteMetricPolicyAsync(DeleteMetricPolicyRequest deleteMetricPolicyRequest,
com.amazonaws.handlers.AsyncHandler<DeleteMetricPolicyRequest, DeleteMetricPolicyResult> asyncHandler);
/**
* <p>
* Retrieves the properties of the requested container. This request is commonly used to retrieve the endpoint of a
* container. An endpoint is a value assigned by the service when a new container is created. A container's endpoint
* does not change after it has been assigned. The <code>DescribeContainer</code> request returns a single
* <code>Container</code> object based on <code>ContainerName</code>. To return all <code>Container</code> objects
* that are associated with a specified AWS account, use <a>ListContainers</a>.
* </p>
*
* @param describeContainerRequest
* @return A Java Future containing the result of the DescribeContainer operation returned by the service.
* @sample AWSMediaStoreAsync.DescribeContainer
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/mediastore-2017-09-01/DescribeContainer" target="_top">AWS
* API Documentation</a>
*/
java.util.concurrent.Future<DescribeContainerResult> describeContainerAsync(DescribeContainerRequest describeContainerRequest);
/**
* <p>
* Retrieves the properties of the requested container. This request is commonly used to retrieve the endpoint of a
* container. An endpoint is a value assigned by the service when a new container is created. A container's endpoint
* does not change after it has been assigned. The <code>DescribeContainer</code> request returns a single
* <code>Container</code> object based on <code>ContainerName</code>. To return all <code>Container</code> objects
* that are associated with a specified AWS account, use <a>ListContainers</a>.
* </p>
*
* @param describeContainerRequest
* @param asyncHandler
* Asynchronous callback handler for events in the lifecycle of the request. Users can provide an
* implementation of the callback methods in this interface to receive notification of successful or
* unsuccessful completion of the operation.
* @return A Java Future containing the result of the DescribeContainer operation returned by the service.
* @sample AWSMediaStoreAsyncHandler.DescribeContainer
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/mediastore-2017-09-01/DescribeContainer" target="_top">AWS
* API Documentation</a>
*/
java.util.concurrent.Future<DescribeContainerResult> describeContainerAsync(DescribeContainerRequest describeContainerRequest,
com.amazonaws.handlers.AsyncHandler<DescribeContainerRequest, DescribeContainerResult> asyncHandler);
/**
* <p>
* Retrieves the access policy for the specified container. For information about the data that is included in an
* access policy, see the <a href="https://aws.amazon.com/documentation/iam/">AWS Identity and Access Management
* User Guide</a>.
* </p>
*
* @param getContainerPolicyRequest
* @return A Java Future containing the result of the GetContainerPolicy operation returned by the service.
* @sample AWSMediaStoreAsync.GetContainerPolicy
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/mediastore-2017-09-01/GetContainerPolicy" target="_top">AWS
* API Documentation</a>
*/
java.util.concurrent.Future<GetContainerPolicyResult> getContainerPolicyAsync(GetContainerPolicyRequest getContainerPolicyRequest);
/**
* <p>
* Retrieves the access policy for the specified container. For information about the data that is included in an
* access policy, see the <a href="https://aws.amazon.com/documentation/iam/">AWS Identity and Access Management
* User Guide</a>.
* </p>
*
* @param getContainerPolicyRequest
* @param asyncHandler
* Asynchronous callback handler for events in the lifecycle of the request. Users can provide an
* implementation of the callback methods in this interface to receive notification of successful or
* unsuccessful completion of the operation.
* @return A Java Future containing the result of the GetContainerPolicy operation returned by the service.
* @sample AWSMediaStoreAsyncHandler.GetContainerPolicy
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/mediastore-2017-09-01/GetContainerPolicy" target="_top">AWS
* API Documentation</a>
*/
java.util.concurrent.Future<GetContainerPolicyResult> getContainerPolicyAsync(GetContainerPolicyRequest getContainerPolicyRequest,
com.amazonaws.handlers.AsyncHandler<GetContainerPolicyRequest, GetContainerPolicyResult> asyncHandler);
/**
* <p>
* Returns the cross-origin resource sharing (CORS) configuration information that is set for the container.
* </p>
* <p>
* To use this operation, you must have permission to perform the <code>MediaStore:GetCorsPolicy</code> action. By
* default, the container owner has this permission and can grant it to others.
* </p>
*
* @param getCorsPolicyRequest
* @return A Java Future containing the result of the GetCorsPolicy operation returned by the service.
* @sample AWSMediaStoreAsync.GetCorsPolicy
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/mediastore-2017-09-01/GetCorsPolicy" target="_top">AWS API
* Documentation</a>
*/
java.util.concurrent.Future<GetCorsPolicyResult> getCorsPolicyAsync(GetCorsPolicyRequest getCorsPolicyRequest);
/**
* <p>
* Returns the cross-origin resource sharing (CORS) configuration information that is set for the container.
* </p>
* <p>
* To use this operation, you must have permission to perform the <code>MediaStore:GetCorsPolicy</code> action. By
* default, the container owner has this permission and can grant it to others.
* </p>
*
* @param getCorsPolicyRequest
* @param asyncHandler
* Asynchronous callback handler for events in the lifecycle of the request. Users can provide an
* implementation of the callback methods in this interface to receive notification of successful or
* unsuccessful completion of the operation.
* @return A Java Future containing the result of the GetCorsPolicy operation returned by the service.
* @sample AWSMediaStoreAsyncHandler.GetCorsPolicy
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/mediastore-2017-09-01/GetCorsPolicy" target="_top">AWS API
* Documentation</a>
*/
java.util.concurrent.Future<GetCorsPolicyResult> getCorsPolicyAsync(GetCorsPolicyRequest getCorsPolicyRequest,
com.amazonaws.handlers.AsyncHandler<GetCorsPolicyRequest, GetCorsPolicyResult> asyncHandler);
/**
* <p>
* Retrieves the object lifecycle policy that is assigned to a container.
* </p>
*
* @param getLifecyclePolicyRequest
* @return A Java Future containing the result of the GetLifecyclePolicy operation returned by the service.
* @sample AWSMediaStoreAsync.GetLifecyclePolicy
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/mediastore-2017-09-01/GetLifecyclePolicy" target="_top">AWS
* API Documentation</a>
*/
java.util.concurrent.Future<GetLifecyclePolicyResult> getLifecyclePolicyAsync(GetLifecyclePolicyRequest getLifecyclePolicyRequest);
/**
* <p>
* Retrieves the object lifecycle policy that is assigned to a container.
* </p>
*
* @param getLifecyclePolicyRequest
* @param asyncHandler
* Asynchronous callback handler for events in the lifecycle of the request. Users can provide an
* implementation of the callback methods in this interface to receive notification of successful or
* unsuccessful completion of the operation.
* @return A Java Future containing the result of the GetLifecyclePolicy operation returned by the service.
* @sample AWSMediaStoreAsyncHandler.GetLifecyclePolicy
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/mediastore-2017-09-01/GetLifecyclePolicy" target="_top">AWS
* API Documentation</a>
*/
java.util.concurrent.Future<GetLifecyclePolicyResult> getLifecyclePolicyAsync(GetLifecyclePolicyRequest getLifecyclePolicyRequest,
com.amazonaws.handlers.AsyncHandler<GetLifecyclePolicyRequest, GetLifecyclePolicyResult> asyncHandler);
/**
* <p>
* Returns the metric policy for the specified container.
* </p>
*
* @param getMetricPolicyRequest
* @return A Java Future containing the result of the GetMetricPolicy operation returned by the service.
* @sample AWSMediaStoreAsync.GetMetricPolicy
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/mediastore-2017-09-01/GetMetricPolicy" target="_top">AWS API
* Documentation</a>
*/
java.util.concurrent.Future<GetMetricPolicyResult> getMetricPolicyAsync(GetMetricPolicyRequest getMetricPolicyRequest);
/**
* <p>
* Returns the metric policy for the specified container.
* </p>
*
* @param getMetricPolicyRequest
* @param asyncHandler
* Asynchronous callback handler for events in the lifecycle of the request. Users can provide an
* implementation of the callback methods in this interface to receive notification of successful or
* unsuccessful completion of the operation.
* @return A Java Future containing the result of the GetMetricPolicy operation returned by the service.
* @sample AWSMediaStoreAsyncHandler.GetMetricPolicy
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/mediastore-2017-09-01/GetMetricPolicy" target="_top">AWS API
* Documentation</a>
*/
java.util.concurrent.Future<GetMetricPolicyResult> getMetricPolicyAsync(GetMetricPolicyRequest getMetricPolicyRequest,
com.amazonaws.handlers.AsyncHandler<GetMetricPolicyRequest, GetMetricPolicyResult> asyncHandler);
/**
* <p>
* Lists the properties of all containers in AWS Elemental MediaStore.
* </p>
* <p>
* You can query to receive all the containers in one response. Or you can include the <code>MaxResults</code>
* parameter to receive a limited number of containers in each response. In this case, the response includes a
* token. To get the next set of containers, send the command again, this time with the <code>NextToken</code>
* parameter (with the returned token as its value). The next set of responses appears, with a token if there are
* still more containers to receive.
* </p>
* <p>
* See also <a>DescribeContainer</a>, which gets the properties of one container.
* </p>
*
* @param listContainersRequest
* @return A Java Future containing the result of the ListContainers operation returned by the service.
* @sample AWSMediaStoreAsync.ListContainers
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/mediastore-2017-09-01/ListContainers" target="_top">AWS API
* Documentation</a>
*/
java.util.concurrent.Future<ListContainersResult> listContainersAsync(ListContainersRequest listContainersRequest);
/**
* <p>
* Lists the properties of all containers in AWS Elemental MediaStore.
* </p>
* <p>
* You can query to receive all the containers in one response. Or you can include the <code>MaxResults</code>
* parameter to receive a limited number of containers in each response. In this case, the response includes a
* token. To get the next set of containers, send the command again, this time with the <code>NextToken</code>
* parameter (with the returned token as its value). The next set of responses appears, with a token if there are
* still more containers to receive.
* </p>
* <p>
* See also <a>DescribeContainer</a>, which gets the properties of one container.
* </p>
*
* @param listContainersRequest
* @param asyncHandler
* Asynchronous callback handler for events in the lifecycle of the request. Users can provide an
* implementation of the callback methods in this interface to receive notification of successful or
* unsuccessful completion of the operation.
* @return A Java Future containing the result of the ListContainers operation returned by the service.
* @sample AWSMediaStoreAsyncHandler.ListContainers
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/mediastore-2017-09-01/ListContainers" target="_top">AWS API
* Documentation</a>
*/
java.util.concurrent.Future<ListContainersResult> listContainersAsync(ListContainersRequest listContainersRequest,
com.amazonaws.handlers.AsyncHandler<ListContainersRequest, ListContainersResult> asyncHandler);
/**
* <p>
* Returns a list of the tags assigned to the specified container.
* </p>
*
* @param listTagsForResourceRequest
* @return A Java Future containing the result of the ListTagsForResource operation returned by the service.
* @sample AWSMediaStoreAsync.ListTagsForResource
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/mediastore-2017-09-01/ListTagsForResource" target="_top">AWS
* API Documentation</a>
*/
java.util.concurrent.Future<ListTagsForResourceResult> listTagsForResourceAsync(ListTagsForResourceRequest listTagsForResourceRequest);
/**
* <p>
* Returns a list of the tags assigned to the specified container.
* </p>
*
* @param listTagsForResourceRequest
* @param asyncHandler
* Asynchronous callback handler for events in the lifecycle of the request. Users can provide an
* implementation of the callback methods in this interface to receive notification of successful or
* unsuccessful completion of the operation.
* @return A Java Future containing the result of the ListTagsForResource operation returned by the service.
* @sample AWSMediaStoreAsyncHandler.ListTagsForResource
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/mediastore-2017-09-01/ListTagsForResource" target="_top">AWS
* API Documentation</a>
*/
java.util.concurrent.Future<ListTagsForResourceResult> listTagsForResourceAsync(ListTagsForResourceRequest listTagsForResourceRequest,
com.amazonaws.handlers.AsyncHandler<ListTagsForResourceRequest, ListTagsForResourceResult> asyncHandler);
/**
* <p>
* Creates an access policy for the specified container to restrict the users and clients that can access it. For
* information about the data that is included in an access policy, see the <a
* href="https://aws.amazon.com/documentation/iam/">AWS Identity and Access Management User Guide</a>.
* </p>
* <p>
* For this release of the REST API, you can create only one policy for a container. If you enter
* <code>PutContainerPolicy</code> twice, the second command modifies the existing policy.
* </p>
*
* @param putContainerPolicyRequest
* @return A Java Future containing the result of the PutContainerPolicy operation returned by the service.
* @sample AWSMediaStoreAsync.PutContainerPolicy
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/mediastore-2017-09-01/PutContainerPolicy" target="_top">AWS
* API Documentation</a>
*/
java.util.concurrent.Future<PutContainerPolicyResult> putContainerPolicyAsync(PutContainerPolicyRequest putContainerPolicyRequest);
/**
* <p>
* Creates an access policy for the specified container to restrict the users and clients that can access it. For
* information about the data that is included in an access policy, see the <a
* href="https://aws.amazon.com/documentation/iam/">AWS Identity and Access Management User Guide</a>.
* </p>
* <p>
* For this release of the REST API, you can create only one policy for a container. If you enter
* <code>PutContainerPolicy</code> twice, the second command modifies the existing policy.
* </p>
*
* @param putContainerPolicyRequest
* @param asyncHandler
* Asynchronous callback handler for events in the lifecycle of the request. Users can provide an
* implementation of the callback methods in this interface to receive notification of successful or
* unsuccessful completion of the operation.
* @return A Java Future containing the result of the PutContainerPolicy operation returned by the service.
* @sample AWSMediaStoreAsyncHandler.PutContainerPolicy
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/mediastore-2017-09-01/PutContainerPolicy" target="_top">AWS
* API Documentation</a>
*/
java.util.concurrent.Future<PutContainerPolicyResult> putContainerPolicyAsync(PutContainerPolicyRequest putContainerPolicyRequest,
com.amazonaws.handlers.AsyncHandler<PutContainerPolicyRequest, PutContainerPolicyResult> asyncHandler);
/**
* <p>
* Sets the cross-origin resource sharing (CORS) configuration on a container so that the container can service
* cross-origin requests. For example, you might want to enable a request whose origin is http://www.example.com to
* access your AWS Elemental MediaStore container at my.example.container.com by using the browser's XMLHttpRequest
* capability.
* </p>
* <p>
* To enable CORS on a container, you attach a CORS policy to the container. In the CORS policy, you configure rules
* that identify origins and the HTTP methods that can be executed on your container. The policy can contain up to
* 398,000 characters. You can add up to 100 rules to a CORS policy. If more than one rule applies, the service uses
* the first applicable rule listed.
* </p>
* <p>
* To learn more about CORS, see <a
* href="https://docs.aws.amazon.com/mediastore/latest/ug/cors-policy.html">Cross-Origin Resource Sharing (CORS) in
* AWS Elemental MediaStore</a>.
* </p>
*
* @param putCorsPolicyRequest
* @return A Java Future containing the result of the PutCorsPolicy operation returned by the service.
* @sample AWSMediaStoreAsync.PutCorsPolicy
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/mediastore-2017-09-01/PutCorsPolicy" target="_top">AWS API
* Documentation</a>
*/
java.util.concurrent.Future<PutCorsPolicyResult> putCorsPolicyAsync(PutCorsPolicyRequest putCorsPolicyRequest);
/**
* <p>
* Sets the cross-origin resource sharing (CORS) configuration on a container so that the container can service
* cross-origin requests. For example, you might want to enable a request whose origin is http://www.example.com to
* access your AWS Elemental MediaStore container at my.example.container.com by using the browser's XMLHttpRequest
* capability.
* </p>
* <p>
* To enable CORS on a container, you attach a CORS policy to the container. In the CORS policy, you configure rules
* that identify origins and the HTTP methods that can be executed on your container. The policy can contain up to
* 398,000 characters. You can add up to 100 rules to a CORS policy. If more than one rule applies, the service uses
* the first applicable rule listed.
* </p>
* <p>
* To learn more about CORS, see <a
* href="https://docs.aws.amazon.com/mediastore/latest/ug/cors-policy.html">Cross-Origin Resource Sharing (CORS) in
* AWS Elemental MediaStore</a>.
* </p>
*
* @param putCorsPolicyRequest
* @param asyncHandler
* Asynchronous callback handler for events in the lifecycle of the request. Users can provide an
* implementation of the callback methods in this interface to receive notification of successful or
* unsuccessful completion of the operation.
* @return A Java Future containing the result of the PutCorsPolicy operation returned by the service.
* @sample AWSMediaStoreAsyncHandler.PutCorsPolicy
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/mediastore-2017-09-01/PutCorsPolicy" target="_top">AWS API
* Documentation</a>
*/
java.util.concurrent.Future<PutCorsPolicyResult> putCorsPolicyAsync(PutCorsPolicyRequest putCorsPolicyRequest,
com.amazonaws.handlers.AsyncHandler<PutCorsPolicyRequest, PutCorsPolicyResult> asyncHandler);
/**
* <p>
* Writes an object lifecycle policy to a container. If the container already has an object lifecycle policy, the
* service replaces the existing policy with the new policy. It takes up to 20 minutes for the change to take
* effect.
* </p>
* <p>
* For information about how to construct an object lifecycle policy, see <a
* href="https://docs.aws.amazon.com/mediastore/latest/ug/policies-object-lifecycle-components.html">Components of
* an Object Lifecycle Policy</a>.
* </p>
*
* @param putLifecyclePolicyRequest
* @return A Java Future containing the result of the PutLifecyclePolicy operation returned by the service.
* @sample AWSMediaStoreAsync.PutLifecyclePolicy
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/mediastore-2017-09-01/PutLifecyclePolicy" target="_top">AWS
* API Documentation</a>
*/
java.util.concurrent.Future<PutLifecyclePolicyResult> putLifecyclePolicyAsync(PutLifecyclePolicyRequest putLifecyclePolicyRequest);
/**
* <p>
* Writes an object lifecycle policy to a container. If the container already has an object lifecycle policy, the
* service replaces the existing policy with the new policy. It takes up to 20 minutes for the change to take
* effect.
* </p>
* <p>
* For information about how to construct an object lifecycle policy, see <a
* href="https://docs.aws.amazon.com/mediastore/latest/ug/policies-object-lifecycle-components.html">Components of
* an Object Lifecycle Policy</a>.
* </p>
*
* @param putLifecyclePolicyRequest
* @param asyncHandler
* Asynchronous callback handler for events in the lifecycle of the request. Users can provide an
* implementation of the callback methods in this interface to receive notification of successful or
* unsuccessful completion of the operation.
* @return A Java Future containing the result of the PutLifecyclePolicy operation returned by the service.
* @sample AWSMediaStoreAsyncHandler.PutLifecyclePolicy
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/mediastore-2017-09-01/PutLifecyclePolicy" target="_top">AWS
* API Documentation</a>
*/
java.util.concurrent.Future<PutLifecyclePolicyResult> putLifecyclePolicyAsync(PutLifecyclePolicyRequest putLifecyclePolicyRequest,
com.amazonaws.handlers.AsyncHandler<PutLifecyclePolicyRequest, PutLifecyclePolicyResult> asyncHandler);
/**
* <p>
* The metric policy that you want to add to the container. A metric policy allows AWS Elemental MediaStore to send
* metrics to Amazon CloudWatch. It takes up to 20 minutes for the new policy to take effect.
* </p>
*
* @param putMetricPolicyRequest
* @return A Java Future containing the result of the PutMetricPolicy operation returned by the service.
* @sample AWSMediaStoreAsync.PutMetricPolicy
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/mediastore-2017-09-01/PutMetricPolicy" target="_top">AWS API
* Documentation</a>
*/
java.util.concurrent.Future<PutMetricPolicyResult> putMetricPolicyAsync(PutMetricPolicyRequest putMetricPolicyRequest);
/**
* <p>
* The metric policy that you want to add to the container. A metric policy allows AWS Elemental MediaStore to send
* metrics to Amazon CloudWatch. It takes up to 20 minutes for the new policy to take effect.
* </p>
*
* @param putMetricPolicyRequest
* @param asyncHandler
* Asynchronous callback handler for events in the lifecycle of the request. Users can provide an
* implementation of the callback methods in this interface to receive notification of successful or
* unsuccessful completion of the operation.
* @return A Java Future containing the result of the PutMetricPolicy operation returned by the service.
* @sample AWSMediaStoreAsyncHandler.PutMetricPolicy
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/mediastore-2017-09-01/PutMetricPolicy" target="_top">AWS API
* Documentation</a>
*/
java.util.concurrent.Future<PutMetricPolicyResult> putMetricPolicyAsync(PutMetricPolicyRequest putMetricPolicyRequest,
com.amazonaws.handlers.AsyncHandler<PutMetricPolicyRequest, PutMetricPolicyResult> asyncHandler);
/**
* <p>
* Starts access logging on the specified container. When you enable access logging on a container, MediaStore
* delivers access logs for objects stored in that container to Amazon CloudWatch Logs.
* </p>
*
* @param startAccessLoggingRequest
* @return A Java Future containing the result of the StartAccessLogging operation returned by the service.
* @sample AWSMediaStoreAsync.StartAccessLogging
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/mediastore-2017-09-01/StartAccessLogging" target="_top">AWS
* API Documentation</a>
*/
java.util.concurrent.Future<StartAccessLoggingResult> startAccessLoggingAsync(StartAccessLoggingRequest startAccessLoggingRequest);
/**
* <p>
* Starts access logging on the specified container. When you enable access logging on a container, MediaStore
* delivers access logs for objects stored in that container to Amazon CloudWatch Logs.
* </p>
*
* @param startAccessLoggingRequest
* @param asyncHandler
* Asynchronous callback handler for events in the lifecycle of the request. Users can provide an
* implementation of the callback methods in this interface to receive notification of successful or
* unsuccessful completion of the operation.
* @return A Java Future containing the result of the StartAccessLogging operation returned by the service.
* @sample AWSMediaStoreAsyncHandler.StartAccessLogging
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/mediastore-2017-09-01/StartAccessLogging" target="_top">AWS
* API Documentation</a>
*/
java.util.concurrent.Future<StartAccessLoggingResult> startAccessLoggingAsync(StartAccessLoggingRequest startAccessLoggingRequest,
com.amazonaws.handlers.AsyncHandler<StartAccessLoggingRequest, StartAccessLoggingResult> asyncHandler);
/**
* <p>
* Stops access logging on the specified container. When you stop access logging on a container, MediaStore stops
* sending access logs to Amazon CloudWatch Logs. These access logs are not saved and are not retrievable.
* </p>
*
* @param stopAccessLoggingRequest
* @return A Java Future containing the result of the StopAccessLogging operation returned by the service.
* @sample AWSMediaStoreAsync.StopAccessLogging
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/mediastore-2017-09-01/StopAccessLogging" target="_top">AWS
* API Documentation</a>
*/
java.util.concurrent.Future<StopAccessLoggingResult> stopAccessLoggingAsync(StopAccessLoggingRequest stopAccessLoggingRequest);
/**
* <p>
* Stops access logging on the specified container. When you stop access logging on a container, MediaStore stops
* sending access logs to Amazon CloudWatch Logs. These access logs are not saved and are not retrievable.
* </p>
*
* @param stopAccessLoggingRequest
* @param asyncHandler
* Asynchronous callback handler for events in the lifecycle of the request. Users can provide an
* implementation of the callback methods in this interface to receive notification of successful or
* unsuccessful completion of the operation.
* @return A Java Future containing the result of the StopAccessLogging operation returned by the service.
* @sample AWSMediaStoreAsyncHandler.StopAccessLogging
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/mediastore-2017-09-01/StopAccessLogging" target="_top">AWS
* API Documentation</a>
*/
java.util.concurrent.Future<StopAccessLoggingResult> stopAccessLoggingAsync(StopAccessLoggingRequest stopAccessLoggingRequest,
com.amazonaws.handlers.AsyncHandler<StopAccessLoggingRequest, StopAccessLoggingResult> asyncHandler);
/**
* <p>
* Adds tags to the specified AWS Elemental MediaStore container. Tags are key:value pairs that you can associate
* with AWS resources. For example, the tag key might be "customer" and the tag value might be "companyA." You can
* specify one or more tags to add to each container. You can add up to 50 tags to each container. For more
* information about tagging, including naming and usage conventions, see <a
* href="https://docs.aws.amazon.com/mediastore/latest/ug/tagging.html">Tagging Resources in MediaStore</a>.
* </p>
*
* @param tagResourceRequest
* @return A Java Future containing the result of the TagResource operation returned by the service.
* @sample AWSMediaStoreAsync.TagResource
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/mediastore-2017-09-01/TagResource" target="_top">AWS API
* Documentation</a>
*/
java.util.concurrent.Future<TagResourceResult> tagResourceAsync(TagResourceRequest tagResourceRequest);
/**
* <p>
* Adds tags to the specified AWS Elemental MediaStore container. Tags are key:value pairs that you can associate
* with AWS resources. For example, the tag key might be "customer" and the tag value might be "companyA." You can
* specify one or more tags to add to each container. You can add up to 50 tags to each container. For more
* information about tagging, including naming and usage conventions, see <a
* href="https://docs.aws.amazon.com/mediastore/latest/ug/tagging.html">Tagging Resources in MediaStore</a>.
* </p>
*
* @param tagResourceRequest
* @param asyncHandler
* Asynchronous callback handler for events in the lifecycle of the request. Users can provide an
* implementation of the callback methods in this interface to receive notification of successful or
* unsuccessful completion of the operation.
* @return A Java Future containing the result of the TagResource operation returned by the service.
* @sample AWSMediaStoreAsyncHandler.TagResource
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/mediastore-2017-09-01/TagResource" target="_top">AWS API
* Documentation</a>
*/
java.util.concurrent.Future<TagResourceResult> tagResourceAsync(TagResourceRequest tagResourceRequest,
com.amazonaws.handlers.AsyncHandler<TagResourceRequest, TagResourceResult> asyncHandler);
/**
* <p>
* Removes tags from the specified container. You can specify one or more tags to remove.
* </p>
*
* @param untagResourceRequest
* @return A Java Future containing the result of the UntagResource operation returned by the service.
* @sample AWSMediaStoreAsync.UntagResource
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/mediastore-2017-09-01/UntagResource" target="_top">AWS API
* Documentation</a>
*/
java.util.concurrent.Future<UntagResourceResult> untagResourceAsync(UntagResourceRequest untagResourceRequest);
/**
* <p>
* Removes tags from the specified container. You can specify one or more tags to remove.
* </p>
*
* @param untagResourceRequest
* @param asyncHandler
* Asynchronous callback handler for events in the lifecycle of the request. Users can provide an
* implementation of the callback methods in this interface to receive notification of successful or
* unsuccessful completion of the operation.
* @return A Java Future containing the result of the UntagResource operation returned by the service.
* @sample AWSMediaStoreAsyncHandler.UntagResource
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/mediastore-2017-09-01/UntagResource" target="_top">AWS API
* Documentation</a>
*/
java.util.concurrent.Future<UntagResourceResult> untagResourceAsync(UntagResourceRequest untagResourceRequest,
com.amazonaws.handlers.AsyncHandler<UntagResourceRequest, UntagResourceResult> asyncHandler);
}
| |
package org.json.zip;
/*
Copyright (c) 2013 JSON.org
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
The Software shall be used for Good, not Evil.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
*/
/**
* JSONzip is a binary-encoded JSON dialect. It is designed to compress the
* messages in a session. It is adaptive, so with each message seen, it should
* improve its compression. It minimizes JSON's overhead, reducing punctuation
* to a small number of bits. It uses Huffman encoding to reduce the average
* size of characters. It uses caches (or Keeps) to keep recently seen strings
* and values, so repetitive content (such as object keys) can be substantially
* reduced. It uses a character encoding called Kim (Keep it minimal) that is
* smaller than UTF-8 for most East European, African, and Asian scripts.
*
* JSONzip tends to reduce most content by about half. If there is a lot of
* recurring information, the reduction can be much more dramatic.
*
* FOR EVALUATION PURPOSES ONLY. THIS PACKAGE HAS NOT YET BEEN TESTED ADEQUATELY
* FOR PRODUCTION USE.
*
* @author JSON.org
* @version 2013-04-18
*/
public abstract class JSONzip implements None, PostMortem {
/**
* Powers of 2.
*/
public static final int[] twos = { 1, 2, 4, 8, 16, 32, 64, 128, 256, 512, 1024, 2048, 4096, 8192, 16384, 32768, 65536 };
/**
* The characters in JSON numbers can be reduced to 4 bits each.
*/
public static final byte[] bcd = { '0', '1', '2', '3', '4', '5', '6', '7', '8', '9', '.', '-', '+', 'E' };
/**
* The number of integers that can be encoded in 4 bits.
*/
public static final long int4 = 16;
/**
* The number of integers that can be encoded in 7 bits.
*/
public static final long int7 = 128;
/**
* The number of integers that can be encoded in 14 bits.
*/
public static final long int14 = 16384;
/**
* The end of string code.
*/
public static final int end = 256;
/**
* The end of number code.
*/
public static final int endOfNumber = bcd.length;
/**
* The maximum substring length when registering many. The registration of
* one substring may be longer.
*/
public static final int maxSubstringLength = 10;
/**
* The minimum substring length.
*/
public static final int minSubstringLength = 3;
/**
* The package supports tracing for debugging.
*/
public static final boolean probe = false;
/**
* The maximum number of substrings added to the substrings keep per string.
*/
public static final int substringLimit = 40;
/**
* The value code for an empty object.
*/
public static final int zipEmptyObject = 0;
/**
* The value code for an empty array.
*/
public static final int zipEmptyArray = 1;
/**
* The value code for true.
*/
public static final int zipTrue = 2;
/**
* The value code for false.
*/
public static final int zipFalse = 3;
/**
* The value code for null.
*/
public static final int zipNull = 4;
/**
* The value code for a non-empty object.
*/
public static final int zipObject = 5;
/**
* The value code for an array with a string as its first element.
*/
public static final int zipArrayString = 6;
/**
* The value code for an array with a non-string value as its first element.
*/
public static final int zipArrayValue = 7;
/**
* A Huffman encoder for names.
*/
protected final Huff namehuff;
/**
* A place to keep the names (keys).
*/
protected final MapKeep namekeep;
/**
* A place to keep the strings.
*/
protected final MapKeep stringkeep;
/**
* A Huffman encoder for string values.
*/
protected final Huff substringhuff;
/**
* A place to keep the strings.
*/
protected final TrieKeep substringkeep;
/**
* A place to keep the values.
*/
protected final MapKeep values;
/**
* Initialize the data structures.
*/
protected JSONzip() {
this.namehuff = new Huff(end + 1);
this.namekeep = new MapKeep(9);
this.stringkeep = new MapKeep(11);
this.substringhuff = new Huff(end + 1);
this.substringkeep = new TrieKeep(12);
this.values = new MapKeep(10);
// Increase the weights of the ASCII letters, digits, and special
// characters
// because they are highly likely to occur more frequently. The weight
// of each
// character will increase as it is used. The Huffman encoder will tend
// to
// use fewer bits to encode heavier characters.
this.namehuff.tick(' ', '}');
this.namehuff.tick('a', 'z');
this.namehuff.tick(end);
this.namehuff.tick(end);
this.substringhuff.tick(' ', '}');
this.substringhuff.tick('a', 'z');
this.substringhuff.tick(end);
this.substringhuff.tick(end);
}
/**
*
*/
protected void begin() {
this.namehuff.generate();
this.substringhuff.generate();
}
/**
* Write an end-of-line to the console.
*/
static void log() {
log("\n");
}
/**
* Write an integer to the console.
*
* @param integer
*/
static void log(int integer) {
log(integer + " ");
}
/**
* Write two integers, separated by ':' to the console.
*
* @param integer
* @param width
*/
static void log(int integer, int width) {
log(integer + ":" + width + " ");
}
/**
* Write a string to the console.
*
* @param string
*/
static void log(String string) {
System.out.print(string);
}
/**
* Write a character or its code to the console.
*
* @param integer
* @param width
*/
static void logchar(int integer, int width) {
if (integer > ' ' && integer <= '}') {
log("'" + (char) integer + "':" + width + " ");
} else {
log(integer, width);
}
}
/**
* This method is used for testing the implementation of JSONzip. It is not
* suitable for any other purpose. It is used to compare a Compressor and a
* Decompressor, verifying that the data structures that were built during
* zipping and unzipping were the same.
*
* @return true if the structures match.
*/
public boolean postMortem(PostMortem pm) {
JSONzip that = (JSONzip) pm;
return this.namehuff.postMortem(that.namehuff) && this.namekeep.postMortem(that.namekeep) && this.stringkeep.postMortem(that.stringkeep) && this.substringhuff.postMortem(that.substringhuff) && this.substringkeep.postMortem(that.substringkeep) && this.values.postMortem(that.values);
}
}
| |
package org.apache.helix.manager.zk;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.Semaphore;
import org.apache.helix.PreConnectCallback;
import org.apache.helix.PropertyKey;
import org.apache.helix.TestHelper;
import org.apache.helix.ZkTestHelper;
import org.apache.helix.common.ZkTestBase;
import org.apache.helix.integration.manager.MockParticipantManager;
import org.apache.helix.model.InstanceConfig;
import org.apache.helix.model.LiveInstance;
import org.apache.helix.task.TaskConstants;
import org.apache.helix.zookeeper.datamodel.ZNRecord;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.testng.Assert;
import org.testng.annotations.Test;
public class TestParticipantManager extends ZkTestBase {
private static Logger LOG = LoggerFactory.getLogger(TestParticipantManager.class);
/*
* Simulates zk session expiry before creating live instance in participant manager. This test
* makes sure the session aware create ephemeral API is called, which validates the expected zk
* session.
* What this test does is:
* 1. Sets up live instance with session S0
* 2. Expires S0 and gets new session S1
* 3. S1 is blocked before creating live instance in participant manager
* 4. Expires S1 and gets new session S2
* 5. Proceeds S1 to create live instance, which will fail because session S1 is expired
* 6. Proceeds S2 to create live instance, which will succeed
*/
@Test
public void testSessionExpiryCreateLiveInstance() throws Exception {
final String className = TestHelper.getTestClassName();
final String methodName = TestHelper.getTestMethodName();
final String clusterName = className + "_" + methodName;
final ZKHelixDataAccessor accessor =
new ZKHelixDataAccessor(clusterName, new ZkBaseDataAccessor<>(ZK_ADDR));
final PropertyKey.Builder keyBuilder = accessor.keyBuilder();
TestHelper.setupCluster(clusterName, ZK_ADDR,
12918, // participant port
"localhost", // participant name prefix
"TestDB", // resource name prefix
1, // resources
10, // partitions per resource
5, // number of nodes
3, // replicas
"MasterSlave",
true); // do rebalance
final String instanceName = "localhost_12918";
final MockParticipantManager manager =
new MockParticipantManager(ZK_ADDR, clusterName, instanceName);
manager.syncStart();
final LiveInstance liveInstance = accessor.getProperty(keyBuilder.liveInstance(instanceName));
final long originalCreationTime = liveInstance.getStat().getCreationTime();
final String originalSessionId = manager.getSessionId();
// Verify current live instance.
Assert.assertNotNull(liveInstance);
Assert.assertEquals(liveInstance.getEphemeralOwner(), originalSessionId);
final CountDownLatch startCountdown = new CountDownLatch(1);
final CountDownLatch endCountdown = new CountDownLatch(1);
final Semaphore semaphore = new Semaphore(0);
manager.addPreConnectCallback(
new BlockingPreConnectCallback(instanceName, startCountdown, endCountdown, semaphore));
// Expire S0 and new session S1 will be created.
ZkTestHelper.asyncExpireSession(manager.getZkClient());
// Wait for onPreConnect to start
semaphore.acquire();
// New session S1 should not be equal to S0.
Assert.assertFalse(originalSessionId.equals(manager.getSessionId()));
// Live instance should be gone as original session S0 is expired.
Assert.assertNull(accessor.getProperty(keyBuilder.liveInstance(instanceName)));
final String sessionOne = manager.getSessionId();
// Expire S1 when S1 is blocked in onPreConnect().
// New session S2 will be created.
ZkTestHelper.asyncExpireSession(manager.getZkClient());
TestHelper.verify(
() -> !(ZKUtil.toHexSessionId(manager.getZkClient().getSessionId()).equals(sessionOne)),
TestHelper.WAIT_DURATION);
// New session S2 should not be equal to S1.
final String sessionTwo = ZKUtil.toHexSessionId(manager.getZkClient().getSessionId());
Assert.assertFalse(sessionOne.equals(sessionTwo));
// Proceed S1 to create live instance, which will fail.
startCountdown.countDown();
// Wait until S2 starts onPreConnect, which indicates S1's handling new session is completed.
semaphore.acquire();
// Live instance should not be created because zk session is expired.
Assert.assertNull(accessor.getProperty(keyBuilder.liveInstance(instanceName)),
"Live instance should not be created because zk session is expired!");
// Proceed S2 to create live instance.
endCountdown.countDown();
TestHelper.verify(() -> {
// Newly created live instance should be created by the latest session S2
// and have a new creation time.
LiveInstance newLiveInstance =
accessor.getProperty(keyBuilder.liveInstance(instanceName));
return newLiveInstance != null
&& newLiveInstance.getStat().getCreationTime() != originalCreationTime
&& newLiveInstance.getEphemeralOwner().equals(sessionTwo);
}, TestHelper.WAIT_DURATION);
// Clean up.
manager.syncStop();
deleteCluster(clusterName);
}
@Test(dependsOnMethods = "testSessionExpiryCreateLiveInstance")
public void testCurrentTaskThreadPoolSizeCreation() throws Exception {
// Using a pool sized different from the default value to verify correctness
final int testThreadPoolSize = TaskConstants.DEFAULT_TASK_THREAD_POOL_SIZE + 1;
final String className = TestHelper.getTestClassName();
final String methodName = TestHelper.getTestMethodName();
final String clusterName = className + "_" + methodName;
final ZKHelixDataAccessor accessor = new ZKHelixDataAccessor(clusterName,
new ZkBaseDataAccessor.Builder<ZNRecord>().setZkAddress(ZK_ADDR).build());
final PropertyKey.Builder keyBuilder = accessor.keyBuilder();
TestHelper.setupCluster(clusterName, ZK_ADDR, 12918, // participant port
"localhost", // participant name prefix
"TestDB", // resource name prefix
1, // resources
10, // partitions per resource
5, // number of nodes
3, // replicas
"MasterSlave", true); // do rebalance
final String instanceName = "localhost_12918";
final MockParticipantManager manager =
new MockParticipantManager(ZK_ADDR, clusterName, instanceName);
InstanceConfig instanceConfig = accessor.getProperty(keyBuilder.instanceConfig(instanceName));
instanceConfig.setTargetTaskThreadPoolSize(testThreadPoolSize);
accessor.setProperty(keyBuilder.instanceConfig(instanceName), instanceConfig);
manager.syncStart();
final LiveInstance liveInstance = accessor.getProperty(keyBuilder.liveInstance(instanceName));
Assert.assertNotNull(liveInstance);
Assert.assertEquals(liveInstance.getCurrentTaskThreadPoolSize(), testThreadPoolSize);
// Clean up.
manager.syncStop();
deleteCluster(clusterName);
}
/*
* Mocks PreConnectCallback to insert session expiry during ParticipantManager#handleNewSession()
*/
static class BlockingPreConnectCallback implements PreConnectCallback {
private final String instanceName;
private final CountDownLatch startCountDown;
private final CountDownLatch endCountDown;
private final Semaphore semaphore;
private boolean canCreateLiveInstance;
BlockingPreConnectCallback(String instanceName, CountDownLatch startCountdown,
CountDownLatch endCountdown, Semaphore semaphore) {
this.instanceName = instanceName;
this.startCountDown = startCountdown;
this.endCountDown = endCountdown;
this.semaphore = semaphore;
}
@Override
public void onPreConnect() {
LOG.info("Handling new session for instance: {}", instanceName);
semaphore.release();
try {
LOG.info("Waiting session expiry to happen.");
startCountDown.await();
if (canCreateLiveInstance) {
LOG.info("Waiting to continue creating live instance.");
endCountDown.await();
}
} catch (InterruptedException ex) {
LOG.error("Interrupted in waiting", ex);
}
canCreateLiveInstance = true;
}
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity;
import java.util.HashSet;
import java.util.Set;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.yarn.api.records.Resource;
import org.apache.hadoop.yarn.nodelabels.CommonNodeLabelsManager;
import org.apache.hadoop.yarn.server.resourcemanager.nodelabels.RMNodeLabelsManager;
import org.apache.hadoop.yarn.server.utils.Lock;
import org.apache.hadoop.yarn.util.resource.ResourceCalculator;
import org.apache.hadoop.yarn.util.resource.Resources;
class CSQueueUtils {
private static final Log LOG = LogFactory.getLog(CSQueueUtils.class);
final static float EPSILON = 0.0001f;
/*
* Used only by tests
*/
public static void checkMaxCapacity(String queueName,
float capacity, float maximumCapacity) {
if (maximumCapacity < 0.0f || maximumCapacity > 1.0f) {
throw new IllegalArgumentException(
"Illegal value of maximumCapacity " + maximumCapacity +
" used in call to setMaxCapacity for queue " + queueName);
}
}
/*
* Used only by tests
*/
public static void checkAbsoluteCapacity(String queueName,
float absCapacity, float absMaxCapacity) {
if (absMaxCapacity < (absCapacity - EPSILON)) {
throw new IllegalArgumentException("Illegal call to setMaxCapacity. "
+ "Queue '" + queueName + "' has " + "an absolute capacity (" + absCapacity
+ ") greater than " + "its absolute maximumCapacity (" + absMaxCapacity
+ ")");
}
}
/**
* Check sanity of capacities:
* - capacity <= maxCapacity
* - absCapacity <= absMaximumCapacity
*/
private static void capacitiesSanityCheck(String queueName,
QueueCapacities queueCapacities) {
for (String label : queueCapacities.getExistingNodeLabels()) {
float capacity = queueCapacities.getCapacity(label);
float maximumCapacity = queueCapacities.getMaximumCapacity(label);
if (capacity > maximumCapacity) {
throw new IllegalArgumentException("Illegal queue capacity setting, "
+ "(capacity=" + capacity + ") > (maximum-capacity="
+ maximumCapacity + "). When label=[" + label + "]");
}
// Actually, this may not needed since we have verified capacity <=
// maximumCapacity. And the way we compute absolute capacity (abs(x) =
// cap(x) * cap(x.parent) * ...) is a monotone increasing function. But
// just keep it here to make sure our compute abs capacity method works
// correctly.
float absCapacity = queueCapacities.getAbsoluteCapacity(label);
float absMaxCapacity = queueCapacities.getAbsoluteMaximumCapacity(label);
if (absCapacity > absMaxCapacity) {
throw new IllegalArgumentException("Illegal queue capacity setting, "
+ "(abs-capacity=" + absCapacity + ") > (abs-maximum-capacity="
+ absMaxCapacity + "). When label=[" + label + "]");
}
}
}
public static float computeAbsoluteMaximumCapacity(
float maximumCapacity, CSQueue parent) {
float parentAbsMaxCapacity =
(parent == null) ? 1.0f : parent.getAbsoluteMaximumCapacity();
return (parentAbsMaxCapacity * maximumCapacity);
}
/**
* This method intends to be used by ReservationQueue, ReservationQueue will
* not appear in configuration file, so we shouldn't do load capacities
* settings in configuration for reservation queue.
*/
public static void updateAndCheckCapacitiesByLabel(String queuePath,
QueueCapacities queueCapacities, QueueCapacities parentQueueCapacities) {
updateAbsoluteCapacitiesByNodeLabels(queueCapacities, parentQueueCapacities);
capacitiesSanityCheck(queuePath, queueCapacities);
}
/**
* Do following steps for capacities
* - Load capacities from configuration
* - Update absolute capacities for new capacities
* - Check if capacities/absolute-capacities legal
*/
public static void loadUpdateAndCheckCapacities(String queuePath,
Set<String> accessibleLabels, CapacitySchedulerConfiguration csConf,
QueueCapacities queueCapacities, QueueCapacities parentQueueCapacities,
RMNodeLabelsManager nlm) {
loadCapacitiesByLabelsFromConf(queuePath, accessibleLabels, nlm,
queueCapacities, csConf);
updateAbsoluteCapacitiesByNodeLabels(queueCapacities, parentQueueCapacities);
capacitiesSanityCheck(queuePath, queueCapacities);
}
// Considered NO_LABEL, ANY and null cases
private static Set<String> normalizeAccessibleNodeLabels(Set<String> labels,
RMNodeLabelsManager mgr) {
Set<String> accessibleLabels = new HashSet<String>();
if (labels != null) {
accessibleLabels.addAll(labels);
}
if (accessibleLabels.contains(CommonNodeLabelsManager.ANY)) {
accessibleLabels.addAll(mgr.getClusterNodeLabels());
}
accessibleLabels.add(CommonNodeLabelsManager.NO_LABEL);
return accessibleLabels;
}
private static void loadCapacitiesByLabelsFromConf(String queuePath,
Set<String> labels, RMNodeLabelsManager mgr,
QueueCapacities queueCapacities, CapacitySchedulerConfiguration csConf) {
queueCapacities.clearConfigurableFields();
labels = normalizeAccessibleNodeLabels(labels, mgr);
for (String label : labels) {
if (label.equals(CommonNodeLabelsManager.NO_LABEL)) {
queueCapacities.setCapacity(CommonNodeLabelsManager.NO_LABEL,
csConf.getNonLabeledQueueCapacity(queuePath) / 100);
queueCapacities.setMaximumCapacity(CommonNodeLabelsManager.NO_LABEL,
csConf.getNonLabeledQueueMaximumCapacity(queuePath) / 100);
} else {
queueCapacities.setCapacity(label,
csConf.getLabeledQueueCapacity(queuePath, label) / 100);
queueCapacities.setMaximumCapacity(label,
csConf.getLabeledQueueMaximumCapacity(queuePath, label) / 100);
}
}
}
// Set absolute capacities for {capacity, maximum-capacity}
private static void updateAbsoluteCapacitiesByNodeLabels(
QueueCapacities queueCapacities, QueueCapacities parentQueueCapacities) {
for (String label : queueCapacities.getExistingNodeLabels()) {
float capacity = queueCapacities.getCapacity(label);
if (capacity > 0f) {
queueCapacities.setAbsoluteCapacity(
label,
capacity
* (parentQueueCapacities == null ? 1 : parentQueueCapacities
.getAbsoluteCapacity(label)));
}
float maxCapacity = queueCapacities.getMaximumCapacity(label);
if (maxCapacity > 0f) {
queueCapacities.setAbsoluteMaximumCapacity(
label,
maxCapacity
* (parentQueueCapacities == null ? 1 : parentQueueCapacities
.getAbsoluteMaximumCapacity(label)));
}
}
}
@Lock(CSQueue.class)
public static void updateQueueStatistics(
final ResourceCalculator calculator,
final CSQueue childQueue, final CSQueue parentQueue,
final Resource clusterResource, final Resource minimumAllocation) {
Resource queueLimit = Resources.none();
Resource usedResources = childQueue.getUsedResources();
float absoluteUsedCapacity = 0.0f;
float usedCapacity = 0.0f;
if (Resources.greaterThan(
calculator, clusterResource, clusterResource, Resources.none())) {
queueLimit =
Resources.multiply(clusterResource, childQueue.getAbsoluteCapacity());
absoluteUsedCapacity =
Resources.divide(calculator, clusterResource,
usedResources, clusterResource);
usedCapacity =
Resources.equals(queueLimit, Resources.none()) ? 0 :
Resources.divide(calculator, clusterResource,
usedResources, queueLimit);
}
childQueue.setUsedCapacity(usedCapacity);
childQueue.setAbsoluteUsedCapacity(absoluteUsedCapacity);
Resource available = Resources.subtract(queueLimit, usedResources);
childQueue.getMetrics().setAvailableResourcesToQueue(
Resources.max(
calculator,
clusterResource,
available,
Resources.none()
)
);
}
}
| |
package devopsdistilled.operp.server.data.service.impl;
import java.io.Serializable;
import java.lang.reflect.ParameterizedType;
import java.lang.reflect.Type;
import java.util.LinkedList;
import java.util.List;
import org.springframework.beans.MutablePropertyValues;
import org.springframework.beans.factory.config.AutowireCapableBeanFactory;
import org.springframework.beans.factory.support.BeanDefinitionRegistry;
import org.springframework.beans.factory.support.GenericBeanDefinition;
import org.springframework.context.ApplicationContext;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.Pageable;
import org.springframework.data.domain.Sort;
import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.remoting.RemoteConnectFailureException;
import org.springframework.remoting.rmi.RmiProxyFactoryBean;
import devopsdistilled.operp.client.abstracts.EntityModel;
import devopsdistilled.operp.server.ServerApp;
import devopsdistilled.operp.server.data.entity.Entiti;
import devopsdistilled.operp.server.data.service.EntityService;
public abstract class AbstractEntityService<E extends Entiti<?>, ID extends Serializable, ER extends JpaRepository<E, ID>, EM extends EntityModel<E, ?, ?>>
implements EntityService<E, ID> {
private static final long serialVersionUID = 4892118695516828793L;
protected abstract ER getRepo();
protected List<EM> entityModels = new LinkedList<>();
public void registerEntityModel(EM entityModel) {
entityModels.add(entityModel);
}
public void removeEntityModel(EM entityModel) {
int i = entityModels.indexOf(entityModel);
if (i >= 0) {
entityModels.remove(i);
}
}
@Override
public void notifyClientsForUpdate() {
for (EM entityModel : entityModels) {
try {
entityModel.update();
} catch (RemoteConnectFailureException e) {
entityModels.remove(entityModel);
}
}
}
@Override
public boolean isEntityNameExists(String entityName) {
E entity = findByEntityName(entityName);
if (entity != null)
return true;
return false;
}
@Override
public boolean isEntityNameValidForTheEntity(ID id, String entityName) {
E entity = findByEntityName(entityName);
if (entity == null)
return true;
if (entity.id().equals(id))
return true;
return false;
}
protected abstract E findByEntityName(String entityName);
@Override
public List<E> findAll() {
return getRepo().findAll();
}
@Override
public List<E> findAll(Sort sort) {
return getRepo().findAll(sort);
}
@Override
public <S extends E> List<S> save(Iterable<S> entities) {
return getRepo().save(entities);
}
@Override
public void flush() {
getRepo().flush();
}
@Override
public E saveAndFlush(E entity) {
return getRepo().saveAndFlush(entity);
}
@Override
public void deleteInBatch(Iterable<E> entities) {
getRepo().deleteInBatch(entities);
}
@Override
public void deleteAllInBatch() {
getRepo().deleteAllInBatch();
}
@Override
public Page<E> findAll(Pageable pageable) {
return getRepo().findAll(pageable);
}
@Override
public <S extends E> S save(S entity) {
return getRepo().save(entity);
}
@Override
public E findOne(ID id) {
return getRepo().findOne(id);
}
@Override
public boolean exists(ID id) {
return getRepo().exists(id);
}
@Override
public long count() {
return getRepo().count();
}
@Override
public void delete(ID id) {
getRepo().delete(id);
}
@Override
public void delete(E entity) {
getRepo().delete(entity);
}
@Override
public void delete(Iterable<? extends E> entities) {
getRepo().delete(entities);
}
@Override
public void deleteAll() {
getRepo().deleteAll();
}
@Override
public Iterable<E> findAll(Iterable<ID> ids) {
return getRepo().findAll(ids);
}
@Override
public void registerClient(String clientAddress) {
System.out.println("Client from " + clientAddress);
ApplicationContext context = ServerApp.getApplicationContext();
AutowireCapableBeanFactory factory = context
.getAutowireCapableBeanFactory();
BeanDefinitionRegistry registry = (BeanDefinitionRegistry) factory;
GenericBeanDefinition beanDefinition = new GenericBeanDefinition();
beanDefinition.setBeanClass(RmiProxyFactoryBean.class);
beanDefinition.setAutowireCandidate(true);
Class<EM> entityModelInterfaceClass = getEntityModelInterfaceClass();
MutablePropertyValues propertyValues = new MutablePropertyValues();
propertyValues.addPropertyValue("serviceInterface",
entityModelInterfaceClass);
propertyValues.addPropertyValue("serviceUrl", "rmi://" + clientAddress
+ ":1099/" + entityModelInterfaceClass.getCanonicalName());
beanDefinition.setPropertyValues(propertyValues);
registry.registerBeanDefinition(
entityModelInterfaceClass.getCanonicalName(), beanDefinition);
EM entityModel = context.getBean(entityModelInterfaceClass);
registerEntityModel(entityModel);
System.out.println(entityModel);
}
@SuppressWarnings("unchecked")
protected Class<EM> getEntityModelInterfaceClass() {
Type superclass = getClass().getGenericSuperclass();
Type[] typeArguments = ((ParameterizedType) superclass)
.getActualTypeArguments();
Class<EM> observerClass = (Class<EM>) (typeArguments[typeArguments.length - 1]);
return observerClass;
}
}
| |
/*
* Copyright (c) 2002-2018 "Neo Technology,"
* Network Engine for Objects in Lund AB [http://neotechnology.com]
*
* This file is part of Neo4j.
*
* Neo4j is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.neo4j.kernel.impl.util;
import java.util.Map;
import java.util.concurrent.Executor;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Future;
import java.util.concurrent.RejectedExecutionException;
import java.util.concurrent.ScheduledThreadPoolExecutor;
import java.util.concurrent.ThreadFactory;
import java.util.concurrent.TimeUnit;
import org.neo4j.kernel.lifecycle.LifecycleAdapter;
import static java.util.concurrent.Executors.newCachedThreadPool;
import static org.neo4j.helpers.NamedThreadFactory.daemon;
import static org.neo4j.kernel.impl.util.DebugUtil.trackTest;
import static org.neo4j.kernel.impl.util.JobScheduler.Group.NO_METADATA;
public class Neo4jJobScheduler extends LifecycleAdapter implements JobScheduler
{
private ExecutorService globalPool;
private ScheduledThreadPoolExecutor scheduledExecutor;
@Override
public void init()
{
this.globalPool = newCachedThreadPool( daemon( "neo4j.Pooled" + trackTest() ) );
this.scheduledExecutor = new ScheduledThreadPoolExecutor( 2, daemon( "neo4j.Scheduled" + trackTest() ) );
}
@Override
public Executor executor( final Group group )
{
return new Executor()
{
@Override
public void execute( Runnable command )
{
schedule( group, command );
}
};
}
@Override
public ThreadFactory threadFactory( final Group group )
{
return new ThreadFactory()
{
@Override
public Thread newThread( Runnable r )
{
return createNewThread( group, r, NO_METADATA );
}
};
}
@Override
public JobHandle schedule( Group group, Runnable job )
{
return schedule( group, job, NO_METADATA );
}
@Override
public JobHandle schedule( Group group, Runnable job, Map<String,String> metadata )
{
if (globalPool == null)
throw new RejectedExecutionException( "Scheduler is not started" );
switch( group.strategy() )
{
case POOLED:
return new PooledJobHandle( this.globalPool.submit( job ) );
case NEW_THREAD:
Thread thread = createNewThread( group, job, metadata );
thread.start();
return new SingleThreadHandle( thread );
default:
throw new IllegalArgumentException( "Unsupported strategy for scheduling job: " + group.strategy() );
}
}
@Override
public JobHandle scheduleRecurring( Group group, final Runnable runnable, long period, TimeUnit timeUnit )
{
return scheduleRecurring( group, runnable, 0, period, timeUnit );
}
@Override
public JobHandle scheduleRecurring( Group group, final Runnable runnable, long initialDelay, long period,
TimeUnit timeUnit )
{
switch ( group.strategy() )
{
case POOLED:
return new PooledJobHandle( scheduledExecutor.scheduleAtFixedRate( runnable, initialDelay, period, timeUnit ) );
default:
throw new IllegalArgumentException( "Unsupported strategy to use for recurring jobs: " + group.strategy() );
}
}
@Override
public JobHandle schedule( Group group, final Runnable runnable, long initialDelay, TimeUnit timeUnit )
{
switch ( group.strategy() )
{
case POOLED:
return new PooledJobHandle( scheduledExecutor.schedule( runnable, initialDelay, timeUnit ) );
default:
throw new IllegalArgumentException( "Unsupported strategy to use for delayed jobs: " + group.strategy() );
}
}
@Override
public void shutdown()
{
RuntimeException exception = null;
try
{
if( globalPool != null)
{
globalPool.shutdownNow();
globalPool.awaitTermination( 5, TimeUnit.SECONDS );
globalPool = null;
}
} catch(RuntimeException e)
{
exception = e;
}
catch ( InterruptedException e )
{
exception = new RuntimeException(e);
}
try
{
if(scheduledExecutor != null)
{
scheduledExecutor.shutdown();
scheduledExecutor.awaitTermination( 5, TimeUnit.SECONDS );
scheduledExecutor = null;
}
} catch(RuntimeException e)
{
exception = e;
}
catch ( InterruptedException e )
{
exception = new RuntimeException(e);
}
if(exception != null)
{
throw new RuntimeException( "Unable to shut down job scheduler properly.", exception);
}
}
/**
* Used to spin up new threads for groups or access-patterns that don't use the pooled thread options.
* The returned thread is not started, to allow users to modify it before setting it in motion.
*/
private Thread createNewThread( Group group, Runnable job, Map<String,String> metadata )
{
Thread thread = new Thread( null, job, group.threadName( metadata ) );
thread.setDaemon( true );
return thread;
}
private static class PooledJobHandle implements JobHandle
{
private final Future<?> job;
public PooledJobHandle( Future<?> job )
{
this.job = job;
}
@Override
public void cancel( boolean mayInterruptIfRunning )
{
job.cancel( mayInterruptIfRunning );
}
}
private static class SingleThreadHandle implements JobHandle
{
private final Thread thread;
public SingleThreadHandle( Thread thread )
{
this.thread = thread;
}
@Override
public void cancel( boolean mayInterruptIfRunning )
{
if ( mayInterruptIfRunning )
{
thread.interrupt();
}
}
}
}
| |
package com.freud.tool;
import java.awt.event.ActionListener;
import java.io.IOException;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.naming.ConfigurationException;
import javax.swing.JFrame;
import javax.swing.JMenu;
import javax.swing.JMenuBar;
import javax.swing.JMenuItem;
import javax.swing.JTabbedPane;
import javax.swing.UIManager;
import javax.swing.UnsupportedLookAndFeelException;
import javax.swing.event.ChangeEvent;
import javax.swing.event.ChangeListener;
import org.dom4j.DocumentException;
import com.freud.tool.extension.MenuItemActionListener;
import com.freud.tool.extension.ShutterPanel;
import com.freud.tool.ui.model.MenuItemProp;
import com.freud.tool.ui.model.MenuProp;
import com.freud.tool.ui.model.ShutterProp;
import com.freud.tool.ui.utils.ToolsUtil;
import com.fu.log.FULogger;
import com.fu.log.LogLevel;
/**
* Entrance Class
*
* @author Freud
*
*/
public class BeyondTools {
/**
* Main Frame Drawer
*/
private JFrame mainFrame;
/**
* Menu bar boxes
*/
private JMenuBar menuBar;
/**
* MenuItem Listeners
*/
static Map<String, MenuItemActionListener> menuItemActionListenerMap = new HashMap<String, MenuItemActionListener>();
/**
* Shutter panel map
*/
static Map<String, ShutterPanel> shutterPanelMap = new HashMap<String, ShutterPanel>();
/**
* Current selected panel name
*/
public static String currentPanelName;
public BeyondTools() throws ClassNotFoundException, InstantiationException,
IllegalAccessException, UnsupportedLookAndFeelException {
UIManager.setLookAndFeel("javax.swing.plaf.metal.MetalLookAndFeel");
}
/**
* Initialize the log level
*/
private void initLogLevel() {
FULogger.setConsoleLevel(LogLevel.INFO);
FULogger.setFileLevel(LogLevel.FATAL);
}
/**
* Initialize UI Components
*
* @throws ConfigurationException
* @throws DocumentException
* @throws IOException
*/
public void init() throws ConfigurationException, DocumentException,
IOException {
/**
* Main Component
*/
mainFrame = new JFrame();
mainFrame.setBounds(100, 100, 700, 500);
// mainFrame.setVisible(true);
mainFrame.setTitle("Utility Tools");
mainFrame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
mainFrame.setResizable(false);
mainFrame.setLayout(null);
menuBar = new JMenuBar();
menuBar.setBounds(0, 0, 700, 30);
/**
* Initialize the menu bars
*/
initMenus();
/**
* Initialize the shutters
*/
initShutters();
mainFrame.setJMenuBar(menuBar);
mainFrame.setVisible(true);
}
/**
* Add shutterPanel
*
* @param name
* @param pane
*/
private void addShutterPane(String name, ShutterPanel pane) {
shutterPanelMap.put(name, pane);
}
/**
* Get shutter panel
*
* @param name
* @return
*/
public static ShutterPanel getShutterPane(String name) {
return shutterPanelMap.get(name);
}
/**
* Initialize the Menu and Menu items
*
* @throws ConfigurationException
* @throws DocumentException
* @throws IOException
*/
public void initMenus() throws ConfigurationException, DocumentException,
IOException {
/**
* Get configured menu and menu items
*/
List<MenuProp> list = ToolsUtil.getToolBarsConfigure();
for (MenuProp menu : list) {
try {
JMenu jMenu = new JMenu(menu.getName());
jMenu.setName(menu.getName());
jMenu.setLayout(null);
jMenu.setBounds(0, 0, 100, 30);
for (MenuItemProp menuItem : menu.getDescendants()) {
try {
JMenuItem jMenuItem = new JMenuItem(menuItem.getName());
jMenuItem.setName(menuItem.getName());
jMenuItem.setBounds(0, 0, 100, 30);
jMenuItem
.addActionListener(((ActionListener) Class
.forName(menuItem.getClassName())
.newInstance()));
jMenu.add(jMenuItem);
} catch (Exception e) {
FULogger.log(
LogLevel.ERROR,
new Date(),
this.getClass(),
"MenuItem : [" + menuItem.getName()
+ "] at Menu:[" + menu.getName()
+ "] was initialized failed!\r\n"
+ e.getMessage(), e);
}
}
menuBar.add(jMenu);
} catch (Exception e) {
FULogger.log(
LogLevel.ERROR,
new Date(),
this.getClass(),
"Menu:[" + menu.getName()
+ "] was initialized failed!\r\n"
+ e.getMessage(), e);
}
}
mainFrame.repaint();
}
/**
* Initialize the shutters panel
*
* @throws DocumentException
* @throws IOException
*/
public void initShutters() throws DocumentException, IOException {
List<ShutterProp> list = ToolsUtil.getShutterConfigure();
JTabbedPane tabbedPane = new JTabbedPane();
tabbedPane.setBounds(0, 0, 700, 470);
tabbedPane.addChangeListener(new ChangeListener() {
@Override
public void stateChanged(ChangeEvent event) {
JTabbedPane pane = (JTabbedPane) event.getSource();
BeyondTools.currentPanelName = pane.getSelectedComponent()
.getName();
}
});
for (ShutterProp shutter : list) {
try {
ShutterPanel pane = (ShutterPanel) (Class.forName(shutter
.getClassName()).newInstance());
pane.init();
String name = shutter.getName();
pane.setName(name);
tabbedPane.addTab(name, pane);
addShutterPane(name, pane);
} catch (Exception e) {
FULogger.log(
LogLevel.ERROR,
new Date(),
this.getClass(),
"Panel [" + shutter.getName()
+ "] cannot be initilized!\r\n"
+ e.getMessage(), e);
}
}
mainFrame.add(tabbedPane);
mainFrame.repaint();
}
/**
* Start the beyond tool from here
*
* @param args
* @throws ConfigurationException
* @throws DocumentException
* @throws IOException
*/
public void start(String[] args) throws ConfigurationException,
DocumentException, IOException {
/** Log */
initLogLevel();
/** UI */
init();
}
/**
* Deprecated, Due to need create a new classloader to load the class under
* bin folder
*
* @param args
* @throws UnsupportedLookAndFeelException
* @throws IllegalAccessException
* @throws InstantiationException
* @throws ClassNotFoundException
*/
public static void main(String[] args) throws ClassNotFoundException,
InstantiationException, IllegalAccessException,
UnsupportedLookAndFeelException {
try {
new BeyondTools().start(args);
} catch (ConfigurationException e) {
e.printStackTrace();
} catch (DocumentException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
}
}
| |
/********************************************************************************
* Copyright (c) 2011-2017 Red Hat Inc. and/or its affiliates and others
*
* This program and the accompanying materials are made available under the
* terms of the Apache License, Version 2.0 which is available at
* http://www.apache.org/licenses/LICENSE-2.0
*
* SPDX-License-Identifier: Apache-2.0
********************************************************************************/
package org.eclipse.ceylon.compiler.typechecker.analyzer;
import static java.lang.Integer.parseInt;
import static org.eclipse.ceylon.compiler.typechecker.analyzer.AnalyzerUtil.correctionMessage;
import static org.eclipse.ceylon.compiler.typechecker.analyzer.AnalyzerUtil.getPackageTypeDeclaration;
import static org.eclipse.ceylon.compiler.typechecker.analyzer.AnalyzerUtil.getPackageTypedDeclaration;
import static org.eclipse.ceylon.compiler.typechecker.analyzer.AnalyzerUtil.getTypeArguments;
import static org.eclipse.ceylon.compiler.typechecker.analyzer.AnalyzerUtil.getTypeDeclaration;
import static org.eclipse.ceylon.compiler.typechecker.analyzer.AnalyzerUtil.getTypeMember;
import static org.eclipse.ceylon.compiler.typechecker.analyzer.AnalyzerUtil.getTypedDeclaration;
import static org.eclipse.ceylon.compiler.typechecker.analyzer.AnalyzerUtil.isVeryAbstractClass;
import static org.eclipse.ceylon.compiler.typechecker.analyzer.AnalyzerUtil.memberCorrectionMessage;
import static org.eclipse.ceylon.compiler.typechecker.analyzer.AnalyzerUtil.typeParametersString;
import static org.eclipse.ceylon.compiler.typechecker.analyzer.AnalyzerUtil.unwrapAliasedTypeConstructor;
import static org.eclipse.ceylon.compiler.typechecker.tree.TreeUtil.name;
import static org.eclipse.ceylon.compiler.typechecker.tree.TreeUtil.unwrapExpressionUntilTerm;
import static org.eclipse.ceylon.model.typechecker.model.ModelUtil.appliedType;
import static org.eclipse.ceylon.model.typechecker.model.ModelUtil.getContainingClassOrInterface;
import static org.eclipse.ceylon.model.typechecker.model.ModelUtil.getNativeDeclaration;
import static org.eclipse.ceylon.model.typechecker.model.ModelUtil.getNativeHeader;
import static org.eclipse.ceylon.model.typechecker.model.ModelUtil.intersection;
import static org.eclipse.ceylon.model.typechecker.model.ModelUtil.intersectionOfSupertypes;
import static org.eclipse.ceylon.model.typechecker.model.ModelUtil.isNativeForWrongBackend;
import static org.eclipse.ceylon.model.typechecker.model.ModelUtil.isTypeUnknown;
import static org.eclipse.ceylon.model.typechecker.model.ModelUtil.union;
import static org.eclipse.ceylon.model.typechecker.model.SiteVariance.IN;
import static org.eclipse.ceylon.model.typechecker.model.SiteVariance.OUT;
import java.util.ArrayList;
import java.util.List;
import org.eclipse.ceylon.common.Backends;
import org.eclipse.ceylon.compiler.typechecker.tree.Node;
import org.eclipse.ceylon.compiler.typechecker.tree.Tree;
import org.eclipse.ceylon.compiler.typechecker.tree.Visitor;
import org.eclipse.ceylon.model.typechecker.model.Cancellable;
import org.eclipse.ceylon.model.typechecker.model.Class;
import org.eclipse.ceylon.model.typechecker.model.ClassAlias;
import org.eclipse.ceylon.model.typechecker.model.ClassOrInterface;
import org.eclipse.ceylon.model.typechecker.model.Constructor;
import org.eclipse.ceylon.model.typechecker.model.Declaration;
import org.eclipse.ceylon.model.typechecker.model.Function;
import org.eclipse.ceylon.model.typechecker.model.FunctionOrValue;
import org.eclipse.ceylon.model.typechecker.model.Interface;
import org.eclipse.ceylon.model.typechecker.model.NothingType;
import org.eclipse.ceylon.model.typechecker.model.Parameter;
import org.eclipse.ceylon.model.typechecker.model.Scope;
import org.eclipse.ceylon.model.typechecker.model.Specification;
import org.eclipse.ceylon.model.typechecker.model.Type;
import org.eclipse.ceylon.model.typechecker.model.TypeAlias;
import org.eclipse.ceylon.model.typechecker.model.TypeDeclaration;
import org.eclipse.ceylon.model.typechecker.model.TypeParameter;
import org.eclipse.ceylon.model.typechecker.model.TypedDeclaration;
import org.eclipse.ceylon.model.typechecker.model.Unit;
import org.eclipse.ceylon.model.typechecker.model.UnknownType;
import org.eclipse.ceylon.model.typechecker.model.Value;
/**
* Second phase of type analysis.
* Scan the compilation unit looking for literal type
* declarations and maps them to the associated model
* objects. Also builds up a list of imports for the
* compilation unit. Finally, assigns types to the
* associated model objects of declarations declared
* using an explicit type (this must be done in this
* phase, since shared declarations may be used out of
* order in expressions).
*
* @author Gavin King
*
*/
public class TypeVisitor extends Visitor {
private Unit unit;
private Cancellable cancellable;
private boolean inDelegatedConstructor;
private boolean inTypeLiteral;
private boolean inExtendsOrClassAlias;
public TypeVisitor(Cancellable cancellable) {
this.cancellable = cancellable;
}
public TypeVisitor(Unit unit, Cancellable cancellable) {
this.unit = unit;
this.cancellable = cancellable;
}
@Override public void visit(Tree.CompilationUnit that) {
unit = that.getUnit();
super.visit(that);
}
public void visit(Tree.GroupedType that) {
super.visit(that);
Tree.StaticType type = that.getType();
if (type!=null) {
that.setTypeModel(type.getTypeModel());
}
}
@Override
public void visit(Tree.UnionType that) {
super.visit(that);
List<Tree.StaticType> sts =
that.getStaticTypes();
List<Type> types =
new ArrayList<Type>
(sts.size());
for (Tree.StaticType st: sts) {
//can't use addToUnion() here
Type t = st.getTypeModel();
if (t!=null) {
types.add(t);
}
}
Type type = union(types ,unit);
that.setTypeModel(type);
}
@Override
public void visit(Tree.IntersectionType that) {
super.visit(that);
List<Tree.StaticType> sts =
that.getStaticTypes();
List<Type> types =
new ArrayList<Type>
(sts.size());
for (Tree.StaticType st: sts) {
//can't use addToIntersection() here
Type t = st.getTypeModel();
if (t!=null) {
types.add(t);
}
}
Type type = intersection(types, unit);
that.setTypeModel(type);
}
@Override
public void visit(Tree.SequenceType that) {
super.visit(that);
Tree.StaticType elementType = that.getElementType();
Tree.NaturalLiteral length = that.getLength();
Type et = elementType.getTypeModel();
if (et!=null) {
Type t;
if (length==null) {
t = unit.getSequentialType(et);
}
else {
final int len;
try {
len = parseInt(length.getText());
}
catch (NumberFormatException nfe) {
length.addError("must be a positive decimal integer");
return;
}
if (len<1) {
length.addError("must be positive");
return;
}
if (len>1000) {
length.addError("may not be greater than 1000");
return;
}
Class td = unit.getTupleDeclaration();
t = unit.getEmptyType();
for (int i=0; i<len; i++) {
t = appliedType(td, et, et, t);
}
}
that.setTypeModel(t);
}
}
@Override
public void visit(Tree.IterableType that) {
super.visit(that);
Tree.Type elem = that.getElementType();
if (elem==null) {
Type nt = unit.getNothingType();
that.setTypeModel(unit.getIterableType(nt));
that.addError("iterable type must have an element type");
}
else {
if (elem instanceof Tree.SequencedType) {
Tree.SequencedType st =
(Tree.SequencedType) elem;
Type et = st.getType().getTypeModel();
if (et!=null) {
Type t =
st.getAtLeastOne() ?
unit.getNonemptyIterableType(et) :
unit.getIterableType(et);
that.setTypeModel(t);
}
}
else {
that.addError("malformed iterable type");
}
}
}
@Override
public void visit(Tree.OptionalType that) {
super.visit(that);
List<Type> types =
new ArrayList<Type>(2);
types.add(unit.getNullType());
Type dt = that.getDefiniteType().getTypeModel();
if (dt!=null) types.add(dt);
that.setTypeModel(union(types, unit));
}
@Override
public void visit(Tree.EntryType that) {
super.visit(that);
Type kt =
that.getKeyType().getTypeModel();
Type vt =
that.getValueType()==null ?
new UnknownType(unit).getType() :
that.getValueType().getTypeModel();
that.setTypeModel(unit.getEntryType(kt, vt));
}
@Override
public void visit(Tree.TypeConstructor that) {
super.visit(that);
TypeAlias ta = that.getDeclarationModel();
ta.setExtendedType(that.getType().getTypeModel());
Type type = ta.getType();
type.setTypeConstructor(true);
that.setTypeModel(type);
}
@Override
public void visit(Tree.FunctionType that) {
super.visit(that);
Tree.StaticType rt =
that.getReturnType();
if (rt!=null) {
List<Tree.Type> argumentTypes =
that.getArgumentTypes();
Type tt = getTupleType(argumentTypes, unit);
Interface cd = unit.getCallableDeclaration();
Type pt =
appliedType(cd, rt.getTypeModel(), tt);
that.setTypeModel(pt);
}
}
@Override
public void visit(Tree.TupleType that) {
super.visit(that);
List<Tree.Type> elementTypes =
that.getElementTypes();
Type tt = getTupleType(elementTypes, unit);
that.setTypeModel(tt);
}
static Type getTupleType(List<Tree.Type> ets,
Unit unit) {
List<Type> args =
new ArrayList<Type>
(ets.size());
boolean sequenced = false;
boolean atleastone = false;
int firstDefaulted = -1;
for (int i=0; i<ets.size(); i++) {
Tree.Type st = ets.get(i);
Type arg = st==null ?
null : st.getTypeModel();
if (arg==null) {
arg = new UnknownType(unit).getType();
}
else if (st instanceof Tree.SpreadType) {
//currently we only allow a
//single spread type, but in
//future we should also allow
//X, Y, *Zs
return st.getTypeModel();
}
else if (st instanceof Tree.DefaultedType) {
if (firstDefaulted==-1) {
firstDefaulted = i;
}
}
else if (st instanceof Tree.SequencedType) {
if (i!=ets.size()-1) {
st.addError("variant element must occur last in a tuple type");
}
else {
sequenced = true;
Tree.SequencedType sst =
(Tree.SequencedType) st;
atleastone = sst.getAtLeastOne();
arg = sst.getType().getTypeModel();
}
if (firstDefaulted!=-1 && atleastone) {
st.addError("nonempty variadic element must occur after defaulted elements in a tuple type");
}
}
else {
if (firstDefaulted!=-1) {
st.addError("required element must occur after defaulted elements in a tuple type");
}
}
args.add(arg);
}
return getTupleType(args, sequenced, atleastone,
firstDefaulted, unit);
}
//Note: quite similar to Unit.getTupleType(), but does
// not canonicalize (since aliases are not yet
// resolvable in this phase)
private static Type getTupleType(
List<Type> elemTypes,
boolean variadic, boolean atLeastOne,
int firstDefaulted,
Unit unit) {
Class tupleDeclaration = unit.getTupleDeclaration();
Type emptyType = unit.getEmptyType();
Type result = emptyType;
Type union = unit.getNothingType();
int last = elemTypes.size()-1;
for (int i=last; i>=0; i--) {
Type elemType = elemTypes.get(i);
//can't use addToUnion() here
union =
addToUncanonicalizedUnion(unit,
union, elemType);
if (variadic && i==last) {
result = atLeastOne ?
unit.getSequenceType(elemType) :
unit.getSequentialType(elemType);
}
else {
result =
appliedType(tupleDeclaration,
union, elemType, result);
if (firstDefaulted>=0 && i>=firstDefaulted) {
//can't use addToUnion() here
result =
addToUncanonicalizedUnion(unit,
result, emptyType);
}
}
}
return result;
}
private static Type addToUncanonicalizedUnion(Unit unit,
Type union, Type type) {
if (union.isNothing() || type.isAnything()) {
return type;
}
else if (union.isAnything() || type.isNothing()) {
return union;
}
else {
List<Type> pair = new ArrayList<Type>();
pair.add(type);
pair.add(union);
return union(pair, unit);
}
}
@Override
public void visit(Tree.BaseType that) {
super.visit(that);
Tree.Identifier id = that.getIdentifier();
if (id!=null) {
String name = name(id);
Scope scope = that.getScope();
TypeDeclaration type;
if (that.getPackageQualified()) {
type = getPackageTypeDeclaration(name,
null, false, unit);
}
else {
type = getTypeDeclaration(scope, name,
null, false, unit);
}
if (type==null) {
if (!isNativeForWrongBackend(scope, unit)) {
that.addError("type is not defined: '"
+ name + "'"
+ correctionMessage(name, scope,
unit, cancellable),
102);
unit.setUnresolvedReferences();
}
}
else {
type = (TypeDeclaration)
handleNativeHeader(type, that);
Type outerType =
scope.getDeclaringType(type);
visitSimpleType(that, outerType, type);
}
}
}
public void visit(Tree.SuperType that) {
//if (inExtendsClause) { //can't appear anywhere else in the tree!
Scope scope = that.getScope();
ClassOrInterface ci =
getContainingClassOrInterface(scope);
if (ci!=null) {
if (scope instanceof Constructor) {
that.setTypeModel(intersectionOfSupertypes(ci));
}
else if (ci.isClassOrInterfaceMember()) {
ClassOrInterface oci =
(ClassOrInterface)
ci.getContainer();
that.setTypeModel(intersectionOfSupertypes(oci));
}
else {
that.addError("super appears in extends for non-member class");
}
}
//}
}
@Override
public void visit(Tree.MemberLiteral that) {
super.visit(that);
if (that.getType()!=null) {
Type pt = that.getType().getTypeModel();
if (pt!=null) {
Tree.TypeArgumentList tal =
that.getTypeArgumentList();
if (tal!=null &&
isTypeUnknown(pt) &&
!pt.isUnknown()) {
tal.addError("qualifying type does not fully-specify type arguments");
}
}
}
}
@Override
public void visit(Tree.QualifiedType that) {
boolean onl = inTypeLiteral;
boolean oiea = inExtendsOrClassAlias;
boolean oidc = inDelegatedConstructor;
inTypeLiteral = false;
inExtendsOrClassAlias = false;
inDelegatedConstructor = false;
super.visit(that);
inExtendsOrClassAlias = oiea;
inDelegatedConstructor = oidc;
inTypeLiteral = onl;
Tree.StaticType ot = that.getOuterType();
Type pt = ot.getTypeModel();
if (pt!=null) {
Tree.TypeArgumentList tal =
that.getTypeArgumentList();
if (that.getMetamodel() &&
tal!=null &&
isTypeUnknown(pt) &&
!pt.isUnknown()) {
tal.addError("qualifying type does not fully-specify type arguments");
}
TypeDeclaration d = pt.getDeclaration();
Tree.Identifier id = that.getIdentifier();
if (id!=null) {
String name = name(id);
Scope scope = that.getScope();
TypeDeclaration type =
getTypeMember(d, name,
null, false, unit, scope);
if (type==null) {
if (!isNativeForWrongBackend(scope, unit)) {
if (d.isMemberAmbiguous(name, unit, null, false)) {
that.addError("member type declaration is ambiguous: '" +
name + "' for type '" +
d.getName() + "'");
}
else {
that.addError("member type is not defined: '"
+ name + "' in type '"
+ d.getName() + "'"
+ memberCorrectionMessage(name, d,
null, unit, cancellable),
100);
unit.setUnresolvedReferences();
}
}
}
else {
visitSimpleType(that, pt, type);
if (type.isStatic()) {
ot.setStaticTypePrimary(true);
}
}
}
}
}
@Override
public void visit(Tree.TypeLiteral that) {
inTypeLiteral = true;
super.visit(that);
inTypeLiteral = false;
boolean isDeclaration =
that instanceof Tree.InterfaceLiteral
|| that instanceof Tree.ClassLiteral
|| that instanceof Tree.AliasLiteral
|| that instanceof Tree.TypeParameterLiteral;
if (isDeclaration) {
Tree.StaticType type = that.getType();
if (type!=null) {
Type t = type.getTypeModel();
if (t!=null) {
t.setTypeConstructor(false);
}
}
}
}
private void visitSimpleType(Tree.SimpleType that,
Type ot, TypeDeclaration dec) {
//check this isn't a constructor name masquerading
//as a type name via the Class.\Iconstructor syntax
if (dec.isConstructor()
// //in a metamodel type literal, a constructor
// //is allowed
// && !inTypeLiteral
//for an extends clause or aliased class,
//either a class with parameters or a
//constructor is allowed
&& !inExtendsOrClassAlias
&& !inDelegatedConstructor) {
that.addError("constructor is not a type: '" +
dec.getName(unit) + "' is a constructor");
}
Tree.TypeArgumentList tal =
that.getTypeArgumentList();
if (tal!=null) {
dec = unwrapAliasedTypeConstructor(dec);
}
List<TypeParameter> params =
dec.getTypeParameters();
List<Type> typeArgs =
getTypeArguments(tal, ot, params);
//Note: we actually *check* these type arguments
// later in ExpressionVisitor
Type pt = dec.appliedType(ot, typeArgs);
if (tal==null) {
if (!params.isEmpty()) {
pt.setTypeConstructor(true);
}
}
else {
//handled in ExpressionVisitor
/*if (params.isEmpty()) {
that.addError("type declaration does not accept type arguments: '" +
dec.getName(unit) +
"' is not a generic type");
}*/
tal.setTypeModels(typeArgs);
List<Tree.Type> args = tal.getTypes();
for (int i = 0;
i<args.size() &&
i<params.size();
i++) {
Tree.Type t = args.get(i);
if (t instanceof Tree.StaticType) {
Tree.StaticType st =
(Tree.StaticType) t;
Tree.TypeVariance variance =
st.getTypeVariance();
if (variance!=null) {
TypeParameter p = params.get(i);
String var = variance.getText();
if (var.equals("out")) {
pt.setVariance(p, OUT);
}
else if (var.equals("in")) {
pt.setVariance(p, IN);
}
if (!p.isInvariant()) {
//Type doesn't yet know
//how to reason about *runtime*
//instantiations of variant types
//since they are effectively
//invariant
variance.addUnsupportedError(
"use-site variant instantiation of declaration-site variant types is not supported: type parameter '" +
p.getName() + "' of '" +
dec.getName(unit) +
"' is declared " +
(p.isCovariant() ? "covariant" : "contravariant") +
" (remove the '" + var + "')");
}
}
}
}
}
that.setTypeModel(pt);
that.setDeclarationModel(dec);
}
@Override
public void visit(Tree.VoidModifier that) {
Class vtd = unit.getAnythingDeclaration();
if (vtd!=null) {
that.setTypeModel(vtd.getType());
}
}
@Override
public void visit(Tree.SequencedType that) {
super.visit(that);
Type type = that.getType().getTypeModel();
if (type!=null) {
Type et =
that.getAtLeastOne() ?
unit.getSequenceType(type) :
unit.getSequentialType(type);
that.setTypeModel(et);
}
}
@Override
public void visit(Tree.DefaultedType that) {
super.visit(that);
Type type = that.getType().getTypeModel();
if (type!=null) {
that.setTypeModel(type);
}
}
@Override
public void visit(Tree.SpreadType that) {
super.visit(that);
Tree.Type t = that.getType();
if (t!=null) {
Type type = t.getTypeModel();
if (type!=null) {
that.setTypeModel(type);
}
}
}
@Override
public void visit(Tree.TypedDeclaration that) {
super.visit(that);
Tree.Type type = that.getType();
TypedDeclaration dec = that.getDeclarationModel();
setType(that, type, dec);
if (dec instanceof FunctionOrValue) {
FunctionOrValue mv = (FunctionOrValue) dec;
if (dec.isLate() &&
mv.isParameter()) {
that.addError("parameter may not be annotated late");
}
}
}
@Override
public void visit(Tree.TypedArgument that) {
super.visit(that);
setType(that, that.getType(),
that.getDeclarationModel());
}
private void setType(Node that, Tree.Type type,
TypedDeclaration td) {
if (type==null) {
that.addError("missing type of declaration: '" +
td.getName() + "'");
}
else if (!(type instanceof Tree.LocalModifier)) { //if the type declaration is missing, we do type inference later
Type t = type.getTypeModel();
if (t!=null) {
td.setType(t);
}
}
}
@Override
public void visit(Tree.ObjectDefinition that) {
Class o = that.getAnonymousClass();
o.setExtendedType(unit.getBasicType());
o.getSatisfiedTypes().clear();
super.visit(that);
Type type = o.getType();
that.getDeclarationModel().setType(type);
that.getType().setTypeModel(type);
}
@Override
public void visit(Tree.ObjectArgument that) {
Class o = that.getAnonymousClass();
o.setExtendedType(unit.getBasicType());
o.getSatisfiedTypes().clear();
super.visit(that);
Type type = o.getType();
that.getDeclarationModel().setType(type);
that.getType().setTypeModel(type);
}
@Override
public void visit(Tree.ObjectExpression that) {
Class o = that.getAnonymousClass();
o.setExtendedType(unit.getBasicType());
o.getSatisfiedTypes().clear();
super.visit(that);
}
@Override
public void visit(Tree.ClassDefinition that) {
Class cd = that.getDeclarationModel();
if (!isVeryAbstractClass(that, unit)) {
cd.setExtendedType(unit.getBasicType());
}
else {
cd.setExtendedType(null);
}
cd.getSatisfiedTypes().clear();
super.visit(that);
Tree.ParameterList pl = that.getParameterList();
if (pl!=null) {
if (cd.hasConstructors()) {
pl.addError("class with parameters may not declare constructors: class '" +
cd.getName() +
"' has a parameter list and a constructor", 1002);
}
else if (cd.hasEnumerated()) {
pl.addError("class with parameters may not declare constructors: class '" +
cd.getName() +
"' has a parameter list and a value constructor", 1003);
}
else if (cd.hasStaticMembers()) {
pl.addError("class with parameters may not declare static members: class '" +
cd.getName() +
"' has a parameter list and a static member", 1003);
}
}
else {
if (!cd.hasConstructors() &&
!cd.hasEnumerated()) {
// No parameter list and no constructors or enumerated
// normally means something is wrong
boolean error = true;
// Check if the declaration is a native implementation
if (cd.isNativeImplementation()) {
Declaration hdr = getNativeHeader(cd);
// Check that it has a native header
if (hdr instanceof Class) {
Class hcd = (Class) hdr;
// And finally check if the header has any
// constructors or enumerated
if (hcd.hasConstructors() ||
hcd.hasEnumerated()) {
// In that case there's no error
error = false;
}
}
}
if (error) {
that.addError("class must have a parameter list or at least one constructor: class '" +
cd.getName() +
"' has neither parameter list nor constructor",
1001);
}
}
}
}
@Override
public void visit(Tree.InterfaceDefinition that) {
Interface id = that.getDeclarationModel();
id.setExtendedType(null);
id.getSatisfiedTypes().clear();
Class od = unit.getObjectDeclaration();
if (od!=null) {
id.setExtendedType(od.getType());
}
super.visit(that);
}
@Override
public void visit(Tree.TypeParameterDeclaration that) {
TypeParameter p = that.getDeclarationModel();
p.setExtendedType(null);
p.getSatisfiedTypes().clear();
Class vd = unit.getAnythingDeclaration();
if (vd!=null) {
p.setExtendedType(vd.getType());
}
super.visit(that);
Tree.TypeSpecifier ts = that.getTypeSpecifier();
if (ts!=null) {
Tree.StaticType type = ts.getType();
if (type!=null) {
Type dta = type.getTypeModel();
Declaration dec = p.getDeclaration();
if (dta!=null &&
dta.involvesDeclaration(dec)) {
type.addError("default type argument involves parameterized type: '" +
dta.asString(unit) +
"' involves '" +
dec.getName(unit) +
"'");
dta = null;
}
p.setDefaultTypeArgument(dta);
}
}
}
@Override
public void visit(Tree.TypeParameterList that) {
super.visit(that);
List<Tree.TypeParameterDeclaration> tpds =
that.getTypeParameterDeclarations();
List<TypeParameter> params =
new ArrayList<TypeParameter>
(tpds.size());
for (int i=tpds.size()-1; i>=0; i--) {
Tree.TypeParameterDeclaration tpd = tpds.get(i);
if (tpd!=null) {
TypeParameter tp =
tpd.getDeclarationModel();
Type dta = tp.getDefaultTypeArgument();
if (dta!=null) {
params.add(tp);
if (dta.involvesTypeParameters(params)) {
tpd.getTypeSpecifier()
.addError("default type argument involves a type parameter not yet declared");
}
}
}
}
}
@Override
public void visit(Tree.ClassDeclaration that) {
ClassAlias td =
(ClassAlias)
that.getDeclarationModel();
td.setExtendedType(null);
super.visit(that);
Tree.ClassSpecifier cs = that.getClassSpecifier();
if (cs==null) {
if (!td.isNativeHeader()) {
that.addError("missing class body or aliased class reference");
}
}
else {
Tree.ExtendedType et =
that.getExtendedType();
if (et!=null) {
et.addError("class alias may not extend a type");
}
Tree.SatisfiedTypes sts =
that.getSatisfiedTypes();
if (sts!=null) {
sts.addError("class alias may not satisfy a type");
}
Tree.CaseTypes cts =
that.getCaseTypes();
if (cts!=null) {
that.addError("class alias may not have cases or a self type");
}
Tree.SimpleType ct = cs.getType();
if (ct==null) {
// that.addError("malformed aliased class");
}
else if (!(ct instanceof Tree.StaticType)) {
ct.addError("aliased type must be a class");
}
else {
Type type = ct.getTypeModel();
if (type!=null && !type.isUnknown()) {
TypeDeclaration dec =
type.getDeclaration();
td.setConstructor(dec);
if (dec.isConstructor()) {
if (dec.isValueConstructor()) {
ct.addError("aliases a value constructor");
}
else if (dec.isAbstract()) {
ct.addError("aliases a partial constructor: '" +
dec.getName(unit) +
"' is declared abstract");
}
if (td.isShared() && !dec.isShared()) {
ct.addError("shared alias of an unshared constructor: '" +
dec.getName(unit) +
"' is not shared");
}
type = type.getExtendedType();
dec = dec.getExtendedType()
.getDeclaration();
}
if (dec instanceof Class) {
td.setExtendedType(type);
}
else {
ct.addError("not a class: '" +
dec.getName(unit) + "'");
}
TypeDeclaration etd =
ct.getDeclarationModel();
if (etd==td) {
ct.addError("directly aliases itself: '" +
td.getName() + "'");
}
}
}
}
}
@Override
public void visit(Tree.InterfaceDeclaration that) {
Interface id = that.getDeclarationModel();
id.setExtendedType(null);
super.visit(that);
Tree.TypeSpecifier typeSpecifier =
that.getTypeSpecifier();
if (typeSpecifier==null) {
if (!id.isNativeHeader()) {
that.addError("missing interface body or aliased interface reference");
}
}
else {
Tree.SatisfiedTypes sts =
that.getSatisfiedTypes();
if (sts!=null) {
sts.addError("interface alias may not satisfy a type");
}
Tree.CaseTypes cts =
that.getCaseTypes();
if (cts!=null) {
that.addError("class alias may not have cases or a self type");
}
Tree.StaticType et =
typeSpecifier.getType();
if (et==null) {
// that.addError("malformed aliased interface");
}
else if (!(et instanceof Tree.StaticType)) {
typeSpecifier.addError("aliased type must be an interface");
}
else {
Type type = et.getTypeModel();
if (type!=null && !type.isUnknown()) {
TypeDeclaration dec =
type.getDeclaration();
if (dec instanceof Interface) {
id.setExtendedType(type);
}
else {
et.addError("not an interface: '" +
dec.getName(unit) +
"'");
}
}
}
}
}
@Override
public void visit(Tree.TypeAliasDeclaration that) {
TypeAlias ta = that.getDeclarationModel();
ta.setExtendedType(null);
super.visit(that);
Tree.TypeSpecifier typeSpecifier =
that.getTypeSpecifier();
if (typeSpecifier==null) {
if (!ta.isNativeHeader()) {
that.addError("missing aliased type");
}
}
else {
Tree.StaticType et = typeSpecifier.getType();
if (et==null) {
that.addError("malformed aliased type");
}
else {
ta.setExtendedType(et.getTypeModel());
}
}
}
private boolean isInitializerParameter(FunctionOrValue dec) {
return dec!=null
&& dec.isParameter()
&& dec.getInitializerParameter()
.isHidden();
}
@Override
public void visit(Tree.MethodDeclaration that) {
super.visit(that);
Tree.SpecifierExpression sie =
that.getSpecifierExpression();
Function dec = that.getDeclarationModel();
if (isInitializerParameter(dec)) {
if (sie!=null) {
sie.addError("function is an initializer parameter and may not have an initial value: '" +
dec.getName() + "'");
}
}
}
@Override
public void visit(Tree.MethodDefinition that) {
super.visit(that);
Function dec = that.getDeclarationModel();
if (isInitializerParameter(dec)) {
that.getBlock()
.addError("function is an initializer parameter and may not have a body: '" +
dec.getName() + "'");
}
}
@Override
public void visit(Tree.AttributeDeclaration that) {
super.visit(that);
Tree.SpecifierOrInitializerExpression sie =
that.getSpecifierOrInitializerExpression();
Value dec = that.getDeclarationModel();
if (isInitializerParameter(dec)) {
Parameter param = dec.getInitializerParameter();
Tree.Type type = that.getType();
if (type instanceof Tree.SequencedType) {
param.setSequenced(true);
Tree.SequencedType st =
(Tree.SequencedType) type;
param.setAtLeastOne(st.getAtLeastOne());
}
if (sie!=null) {
sie.addError("value is an initializer parameter and may not have an initial value: '" +
dec.getName() + "'");
}
}
}
@Override
public void visit(Tree.AttributeGetterDefinition that) {
super.visit(that);
Value dec = that.getDeclarationModel();
if (isInitializerParameter(dec)) {
that.getBlock()
.addError("value is an initializer parameter and may not have a body: '" +
dec.getName() + "'");
}
}
void checkExtendedTypeExpression(Tree.Type type) {
if (type instanceof Tree.QualifiedType) {
Tree.QualifiedType qualifiedType =
(Tree.QualifiedType) type;
Tree.StaticType outerType =
qualifiedType.getOuterType();
if (!(outerType instanceof Tree.SuperType)) {
TypeDeclaration otd =
qualifiedType.getDeclarationModel();
if (otd!=null) {
if (otd.isStatic() || otd.isConstructor()) {
checkExtendedTypeExpression(outerType);
}
else {
outerType.addError("illegal qualifier in constructor delegation (must be super)");
}
}
}
}
}
private static void inheritedType(Tree.StaticType st) {
if (st instanceof Tree.SimpleType) {
((Tree.SimpleType) st).setInherited(true);
if (st instanceof Tree.QualifiedType) {
inheritedType(((Tree.QualifiedType) st).getOuterType());
}
}
}
@Override
public void visit(Tree.DelegatedConstructor that) {
inDelegatedConstructor = true;
super.visit(that);
inDelegatedConstructor = false;
checkExtendedTypeExpression(that.getType());
inheritedType(that.getType());
}
@Override
public void visit(Tree.ClassSpecifier that) {
inExtendsOrClassAlias = true;
super.visit(that);
inExtendsOrClassAlias = false;
checkExtendedTypeExpression(that.getType());
inheritedType(that.getType());
}
@Override
public void visit(Tree.ExtendedType that) {
inExtendsOrClassAlias =
that.getInvocationExpression()!=null;
super.visit(that);
inExtendsOrClassAlias = false;
inheritedType(that.getType());
checkExtendedTypeExpression(that.getType());
TypeDeclaration td =
(TypeDeclaration)
that.getScope();
if (!td.isAlias()) {
Tree.SimpleType et = that.getType();
if (et!=null) {
Type type = et.getTypeModel();
if (type!=null) {
TypeDeclaration etd =
et.getDeclarationModel();
if (etd!=null &&
!(etd instanceof UnknownType)) {
if (etd.isConstructor()) {
type = type.getExtendedType();
etd = etd.getExtendedType()
.getDeclaration();
}
if (type.isTypeConstructor()) {
et.addError("missing type arguments of generic type: '" +
etd.getName(unit) +
"' has type parameters " +
typeParametersString(etd) +
" (add missing type argument list)");
}
else if (etd==td) {
//unnecessary, handled by SupertypeVisitor
// et.addError("directly extends itself: '" +
// td.getName() + "'");
}
else if (etd instanceof TypeParameter) {
et.addError("directly extends a type parameter: '" +
type.getDeclaration().getName(unit) +
"'");
}
else if (etd instanceof Interface) {
et.addError("extends an interface: '" +
type.getDeclaration().getName(unit) +
"'");
}
else if (etd instanceof TypeAlias) {
et.addError("extends a type alias: '" +
type.getDeclaration().getName(unit) +
"'");
}
else if (etd instanceof NothingType) {
et.addError("extends the bottom type 'Nothing'");
}
else {
td.setExtendedType(type);
}
}
}
}
}
}
@Override
public void visit(Tree.SatisfiedTypes that) {
super.visit(that);
TypeDeclaration td =
(TypeDeclaration)
that.getScope();
if (td.isAlias()) {
return;
}
List<Tree.StaticType> types = that.getTypes();
List<Type> list =
new ArrayList<Type>
(types.size());
if (types.isEmpty()) {
that.addError("missing types in satisfies");
}
boolean foundTypeParam = false;
boolean foundClass = false;
boolean foundInterface = false;
for (Tree.StaticType st: types) {
inheritedType(st);
Type type = st.getTypeModel();
if (type!=null) {
TypeDeclaration std = type.getDeclaration();
if (type.isTypeConstructor()) {
st.addError("missing type arguments of generic type: '" +
std.getName(unit) +
"' has type parameters " +
typeParametersString(std) +
" (add missing type argument list)");
}
else if (std!=null &&
!(std instanceof UnknownType)) {
if (std==td) {
//unnecessary, handled by SupertypeVisitor
// st.addError("directly extends itself: '" +
// td.getName() + "'");
}
else if (std instanceof NothingType) {
st.addError("satisfies the bottom type 'Nothing'");
}
else if (std instanceof TypeAlias) {
st.addError("satisfies a type alias: '" +
std.getName(unit) +
"'");
}
else if (std instanceof Constructor) {
//nothing to do
}
else if (td instanceof TypeParameter) {
if (foundTypeParam) {
st.addUnsupportedError("type parameter upper bounds are not yet supported in combination with other bounds");
}
else if (std instanceof TypeParameter) {
if (foundClass||foundInterface) {
st.addUnsupportedError("type parameter upper bounds are not yet supported in combination with other bounds");
}
foundTypeParam = true;
list.add(type);
}
else if (std instanceof Class) {
if (foundClass) {
st.addUnsupportedError("multiple class upper bounds are not yet supported");
}
foundClass = true;
list.add(type);
}
else if (std instanceof Interface) {
foundInterface = true;
list.add(type);
}
else {
st.addError("upper bound must be a class, interface, or type parameter");
}
}
else {
if (std instanceof TypeParameter) {
st.addError("directly satisfies type parameter: '" +
std.getName(unit) + "'");
}
else if (std instanceof Class) {
st.addError("satisfies a class: '" +
std.getName(unit) + "'");
}
else if (std instanceof Interface) {
if (td.isDynamic() &&
!std.isDynamic()) {
st.addError("dynamic interface satisfies a non-dynamic interface: '" +
std.getName(unit) + "'");
}
else {
list.add(type);
}
}
else {
st.addError("satisfied type must be an interface");
}
}
}
}
}
td.setSatisfiedTypes(list);
}
@Override
public void visit(Tree.CaseTypes that) {
super.visit(that);
TypeDeclaration td =
(TypeDeclaration)
that.getScope();
List<Tree.StaticMemberOrTypeExpression> bmes =
that.getBaseMemberExpressions();
List<Tree.StaticType> cts = that.getTypes();
List<TypedDeclaration> caseValues =
new ArrayList<TypedDeclaration>
(bmes.size());
List<Type> caseTypes =
new ArrayList<Type>
(bmes.size()+cts.size());
if (td instanceof TypeParameter) {
if (!bmes.isEmpty()) {
that.addError("cases of type parameter must be a types");
}
}
else {
for (Tree.StaticMemberOrTypeExpression bme: bmes) {
//bmes have not yet been resolved
String name = name(bme.getIdentifier());
TypedDeclaration od =
bme instanceof Tree.BaseMemberExpression ?
getTypedDeclaration(bme.getScope(), name,
null, false, unit) :
getPackageTypedDeclaration(name,
null, false, unit);
if (od!=null) {
caseValues.add(od);
Type type = od.getType();
if (type!=null) {
caseTypes.add(type);
}
}
}
}
for (Tree.StaticType ct: cts) {
inheritedType(ct);
Type type = ct.getTypeModel();
if (!isTypeUnknown(type)) {
if (type.isUnion() ||
type.isIntersection() ||
type.isNothing()) {
//union/intersection types don't have equals()
if (td instanceof TypeParameter) {
ct.addError("enumerated bound must be a class or interface type");
}
else {
ct.addError("case type must be a class, interface, or self type");
}
}
else {
TypeDeclaration ctd = type.getDeclaration();
if (ctd.equals(td)) {
ct.addError("directly enumerates itself: '" +
td.getName() + "'");
}
else if (type.isClassOrInterface()) {
caseTypes.add(type);
}
else if (type.isTypeParameter()) {
if (td instanceof TypeParameter) {
caseTypes.add(type);
}
else {
TypeParameter tp =
(TypeParameter) ctd;
if (tp.isContravariant()) {
ct.addError("contravariant type parameter may not act as self type");
}
td.setSelfType(type);
if (tp.isSelfType()) {
ct.addError("type parameter may not act as self type for two different types");
}
else {
tp.setSelfTypedDeclaration(td);
caseTypes.add(type);
}
if (cts.size()>1) {
ct.addError("a type may not have more than one self type");
}
}
}
else {
if (td instanceof TypeParameter) {
ct.addError("enumerated bound must be a class or interface type");
}
else {
ct.addError("case type must be a class, interface, or self type");
}
}
}
}
}
if (!caseTypes.isEmpty()) {
TypeDeclaration first =
caseTypes.get(0)
.getDeclaration();
if (caseTypes.size() == 1
&& first.isSelfType()) {
//for a type family, the type that declares
//the type parameter may not be the same
//type for which it acts as a self type
Scope scope = first.getContainer();
if (scope instanceof ClassOrInterface) {
ClassOrInterface ci =
(ClassOrInterface) scope;
if (!ci.isAbstract()) {
Tree.StaticType ct = cts.get(0);
if (ci.equals(td)) {
ct.addError("concrete class parameterized by self type: '"
+ ci.getName()
+ "' is not abstract but has the self type '"
+ first.getName() +
"' (make '" + ci.getName() + "' abstract)",
905);
}
else {
//type family
ct.addError("concrete class parameterized by self type: '"
+ ci.getName()
+ "' is not abstract but declares the self type '"
+ first.getName()
+ "' of '" + td.getName()
+ "' (make '" + ci.getName() + "' abstract)",
905);
}
}
}
}
else {
if (td instanceof ClassOrInterface) {
ClassOrInterface ci =
(ClassOrInterface) td;
if (!ci.isAbstract()) {
Class c = (Class) ci;
if (!c.hasEnumerated()) {
that.addError("concrete class has enumerated subtypes: " +
"enumerated class '" + ci.getName() + "' is not abstract" +
" (make '" + ci.getName() + "' abstract)",
905);
}
}
}
}
td.setCaseTypes(caseTypes);
td.setCaseValues(caseValues);
}
}
@Override
public void visit(Tree.InitializerParameter that) {
super.visit(that);
Parameter p = that.getParameterModel();
String name = p.getName();
Declaration a =
that.getScope()
.getDirectMember(name, null, false);
if (a==null) {
//Now done in ExpressionVisitor!
// that.addError("parameter is not defined: '" + p.getName() + "'");
}
else if (!isLegalParameter(a)) {
that.addError("parameter is not a reference value or function: '" +
name + "' is not a value or function");
}
else {
if (a.isFormal()) {
that.addError("parameter is a formal attribute: '" +
name + "' is annotated 'formal'", 320);
}
FunctionOrValue mov = (FunctionOrValue) a;
if (mov.getInitializerParameter()!=null) {
that.addError("duplicate parameter: '" +
name + "' already occurs in the parameter list");
}
else {
mov.setInitializerParameter(p);
p.setModel(mov);
}
}
if (p.isDefaulted()) {
checkDefaultArg(that.getSpecifierExpression(), p);
}
}
public boolean isLegalParameter(Declaration a) {
if (a instanceof Value) {
Value v = (Value) a;
if (v.isTransient()) {
return false;
}
else {
TypeDeclaration td = v.getTypeDeclaration();
return td==null || !td.isObjectClass();
}
}
else if (a instanceof Function) {
return true;
}
else {
return false;
}
}
@Override
public void visit(Tree.AnyAttribute that) {
super.visit(that);
Tree.Type type = that.getType();
if (type instanceof Tree.SequencedType) {
Value v = (Value) that.getDeclarationModel();
Parameter p = v.getInitializerParameter();
if (p==null) {
type.addError("value is not a parameter, so may not be variadic: '" +
v.getName() + "'");
}
else {
p.setSequenced(true);
}
}
}
@Override
public void visit(Tree.AnyMethod that) {
super.visit(that);
Tree.Type type = that.getType();
if (type instanceof Tree.SequencedType) {
type.addError("function type may not be variadic");
}
}
@Override
public void visit(Tree.QualifiedMemberOrTypeExpression that) {
Tree.Primary primary = that.getPrimary();
if (primary instanceof Tree.MemberOrTypeExpression) {
Tree.MemberOrTypeExpression mte =
(Tree.MemberOrTypeExpression) primary;
if (mte instanceof Tree.BaseTypeExpression ||
mte instanceof Tree.QualifiedTypeExpression) {
that.setStaticMethodReference(true);
mte.setStaticMethodReferencePrimary(true);
if (that.getDirectlyInvoked()) {
mte.setDirectlyInvoked(true);
}
}
if (that.getIndirectlyInvoked()) {
mte.setIndirectlyInvoked(true);
}
}
if (primary instanceof Tree.Package) {
Tree.Package pack = (Tree.Package) primary;
pack.setQualifier(true);
}
super.visit(that);
}
@Override
public void visit(Tree.InvocationExpression that) {
Tree.Term primary =
unwrapExpressionUntilTerm(that.getPrimary());
if (primary instanceof Tree.MemberOrTypeExpression) {
Tree.MemberOrTypeExpression mte =
(Tree.MemberOrTypeExpression) primary;
mte.setDirectlyInvoked(true);
mte.setIndirectlyInvoked(true);
}
super.visit(that);
}
private static Tree.SpecifierOrInitializerExpression
getSpecifier(Tree.ParameterDeclaration that) {
Tree.TypedDeclaration dec =
that.getTypedDeclaration();
if (dec instanceof Tree.AttributeDeclaration) {
Tree.AttributeDeclaration ad =
(Tree.AttributeDeclaration) dec;
return ad.getSpecifierOrInitializerExpression();
}
else if (dec instanceof Tree.MethodDeclaration) {
Tree.MethodDeclaration md =
(Tree.MethodDeclaration) dec;
return md.getSpecifierExpression();
}
else {
return null;
}
}
private void checkDefaultArg(
Tree.SpecifierOrInitializerExpression se,
Parameter p) {
if (se!=null) {
if (se.getScope()
.getContainer()
instanceof Specification) {
se.addError("parameter of specification statement may not define default value");
}
else {
Declaration d = p.getDeclaration();
if (d.isActual()) {
se.addError("parameter of actual declaration may not define default value: parameter '" +
p.getName() + "' of '" +
p.getDeclaration().getName() +
"'");
}
}
}
}
@Override public void visit(Tree.ParameterDeclaration that) {
super.visit(that);
Parameter p = that.getParameterModel();
if (p.isDefaulted()) {
if (p.getDeclaration().isParameter()) {
getSpecifier(that)
.addError("parameter of callable parameter may not have default argument");
}
checkDefaultArg(getSpecifier(that), p);
}
}
private Declaration handleNativeHeader(Declaration hdr,
Node that) {
if (hdr.isNativeHeader()) {
Scope scope = that.getScope();
if (scope == hdr) {
scope = scope.getScope();
}
Backends inBackends = scope.getScopedBackends();
Backends backends =
inBackends.none() ?
unit.getSupportedBackends() :
inBackends;
Declaration impl =
getNativeDeclaration(hdr, backends);
return inBackends == null || impl==null ?
hdr : impl;
}
return hdr;
}
}
| |
package org.cugos.wkg;
import org.junit.Test;
import java.util.*;
import static org.junit.Assert.*;
public class GeoJSONWriterTest {
@Test
public void writeFeatureWithProperties() {
GeoJSONWriter writer = new GeoJSONWriter();
Point point = new Point(Coordinate.create2D(122.34, -43.56), Dimension.Two);
Map<String, Object> properties = new TreeMap<>();
properties.put("id", 1);
properties.put("name", "Seattle");
point.setData(properties);
String json = writer.writeFeature(point);
assertEquals("{\"type\": \"Feature\", \"properties\": {\"id\": \"1\", \"name\": \"Seattle\"}, \"geometry\": {\"type\": \"Point\", \"coordinates\": [122.34, -43.56]}}", json);
}
@Test
public void writeFeature() {
GeoJSONWriter writer = new GeoJSONWriter();
Point point = new Point(Coordinate.create2D(122.34, -43.56), Dimension.Two);
String json = writer.writeFeature(point);
assertEquals("{\"type\": \"Feature\", \"properties\": {}, \"geometry\": {\"type\": \"Point\", \"coordinates\": [122.34, -43.56]}}", json);
}
@Test
public void writeFeatureCollection() {
GeoJSONWriter writer = new GeoJSONWriter();
Point point = new Point(Coordinate.create2D(122.34, -43.56), Dimension.Two);
String json = writer.writeFeatureCollection(point);
assertEquals("{\"type\": \"FeatureCollection\", \"features\": [{\"type\": \"Feature\", \"properties\": {}, \"geometry\": {\"type\": \"Point\", \"coordinates\": [122.34, -43.56]}}]}", json);
}
@Test
public void writeFeatureCollectionWithProperties() {
GeoJSONWriter writer = new GeoJSONWriter();
Point point = new Point(Coordinate.create2D(122.34, -43.56), Dimension.Two);
Map<String, Object> properties = new TreeMap<>();
properties.put("id", 1);
properties.put("name", "Seattle");
point.setData(properties);
String json = writer.writeFeatureCollection(point);
assertEquals("{\"type\": \"FeatureCollection\", \"features\": [{\"type\": \"Feature\", \"properties\": {\"id\": \"1\", \"name\": \"Seattle\"}, \"geometry\": {\"type\": \"Point\", \"coordinates\": [122.34, -43.56]}}]}", json);
}
@Test
public void writePoint() {
GeoJSONWriter writer = new GeoJSONWriter();
Point point = new Point(Coordinate.create2D(122.34, -43.56), Dimension.Two);
String json = writer.write(point);
assertEquals("{\"type\": \"Point\", \"coordinates\": [122.34, -43.56]}", json);
}
@Test
public void writePointZ() {
GeoJSONWriter writer = new GeoJSONWriter();
Point point = new Point(Coordinate.create3D(122.34, -43.56, 56.7), Dimension.Three);
String json = writer.write(point);
assertEquals("{\"type\": \"Point\", \"coordinates\": [122.34, -43.56, 56.7]}", json);
}
@Test
public void writePointZM() {
GeoJSONWriter writer = new GeoJSONWriter();
Point point = new Point(Coordinate.create3DM(122.34, -43.56, 56.7, 12.2), Dimension.ThreeMeasured);
String json = writer.write(point);
assertEquals("{\"type\": \"Point\", \"coordinates\": [122.34, -43.56, 56.7]}", json);
}
@Test
public void writeEmptyPoint() {
GeoJSONWriter writer = new GeoJSONWriter();
Point point = Point.createEmpty();
String json = writer.write(point);
assertEquals("{\"type\": \"Point\", \"coordinates\": []}", json);
}
@Test
public void writeLineString() {
GeoJSONWriter writer = new GeoJSONWriter();
LineString lineString = new LineString(Arrays.asList(
Coordinate.create2D(0, 1),
Coordinate.create2D(10, 11)
), Dimension.Two);
String json = writer.write(lineString);
assertEquals("{\"type\": \"LineString\", \"coordinates\": [[0.0, 1.0], [10.0, 11.0]]}", json);
}
@Test
public void writePolygon() {
GeoJSONWriter writer = new GeoJSONWriter();
Geometry polygon = new Polygon(
new LinearRing(
Arrays.asList(
Coordinate.create2D(30,10),
Coordinate.create2D(40,40),
Coordinate.create2D(20,40),
Coordinate.create2D(10,20),
Coordinate.create2D(30,10)
), Dimension.Two, "4326"
),
new ArrayList<LinearRing>(),
Dimension.Two, "4326"
);
String json = writer.write(polygon);
assertEquals("{\"type\": \"Polygon\", \"coordinates\": [[[30.0, 10.0], [40.0, 40.0], [20.0, 40.0], [10.0, 20.0], [30.0, 10.0]]]}", json);
}
@Test
public void writePolygonWithHoles() {
GeoJSONWriter writer = new GeoJSONWriter();
Geometry polygon = new Polygon(
new LinearRing(
Arrays.asList(
Coordinate.create2D(35,10),
Coordinate.create2D(45,45),
Coordinate.create2D(15,40),
Coordinate.create2D(10,20),
Coordinate.create2D(35,10)
), Dimension.Two, "4326"
),
Arrays.asList(
new LinearRing(
Arrays.asList(
Coordinate.create2D(20,30),
Coordinate.create2D(35,35),
Coordinate.create2D(30,20),
Coordinate.create2D(20,30)
), Dimension.Two, "4326"
)
),
Dimension.Two, "4326"
);
String json = writer.write(polygon);
assertEquals("{\"type\": \"Polygon\", \"coordinates\": [[[35.0, 10.0], [45.0, 45.0], [15.0, 40.0], [10.0, 20.0], [35.0, 10.0]], " +
"[[20.0, 30.0], [35.0, 35.0], [30.0, 20.0], [20.0, 30.0]]]}", json);
}
@Test
public void writeMultiPoint() {
GeoJSONWriter writer = new GeoJSONWriter();
MultiPoint multiPoint = new MultiPoint(
Arrays.asList(
new Point(Coordinate.create2D(10,40), Dimension.Two),
new Point(Coordinate.create2D(40,30), Dimension.Two),
new Point(Coordinate.create2D(20,20), Dimension.Two),
new Point(Coordinate.create2D(30,10), Dimension.Two)
),
Dimension.Two
);
String json = writer.write(multiPoint);
assertEquals("{\"type\": \"MultiPoint\", \"coordinates\": [[10.0, 40.0], [40.0, 30.0], [20.0, 20.0], [30.0, 10.0]]}", json);
}
@Test
public void writeMultiLineString() {
GeoJSONWriter writer = new GeoJSONWriter();
MultiLineString multiLineString = new MultiLineString(
Arrays.asList(
new LineString(
Arrays.asList(
Coordinate.create2D(10,10),
Coordinate.create2D(20,20),
Coordinate.create2D(10,40)
),
Dimension.Two
),
new LineString(
Arrays.asList(
Coordinate.create2D(40,40),
Coordinate.create2D(30,30),
Coordinate.create2D(40,20),
Coordinate.create2D(30,10)
),
Dimension.Two
)
),
Dimension.Two
);
String json = writer.write(multiLineString);
assertEquals("{\"type\": \"MultiLineString\", \"coordinates\": [[[10.0, 10.0], [20.0, 20.0], [10.0, 40.0]], [[40.0, 40.0], [30.0, 30.0], [40.0, 20.0], [30.0, 10.0]]]}", json);
}
@Test
public void writeMultiPolygon() {
GeoJSONWriter writer = new GeoJSONWriter();
Geometry polygon = new MultiPolygon(
Arrays.asList(
new Polygon(
new LinearRing(
Arrays.asList(
Coordinate.create2D(30,20),
Coordinate.create2D(45,40),
Coordinate.create2D(10,40),
Coordinate.create2D(30,20)
), Dimension.Two, "4326"
),
new ArrayList<LinearRing>(),
Dimension.Two, "4326"
),
new Polygon(
new LinearRing(
Arrays.asList(
Coordinate.create2D(15,5),
Coordinate.create2D(40,10),
Coordinate.create2D(10,20),
Coordinate.create2D(5,10),
Coordinate.create2D(15,5)
), Dimension.Two, "4326"
),
new ArrayList<LinearRing>(),
Dimension.Two, "4326"
)
),
Dimension.Two
);
String json = writer.write(polygon);
assertEquals("{\"type\": \"MultiPolygon\", \"coordinates\": [[[[30.0, 20.0], [45.0, 40.0], [10.0, 40.0], [30.0, 20.0]]], " +
"[[[15.0, 5.0], [40.0, 10.0], [10.0, 20.0], [5.0, 10.0], [15.0, 5.0]]]]}", json);
}
@Test
public void writeMultiPolygonWithHoles() {
GeoJSONWriter writer = new GeoJSONWriter();
Geometry polygon = new MultiPolygon(
Arrays.asList(
new Polygon(
new LinearRing(
Arrays.asList(
Coordinate.create2D(40,40),
Coordinate.create2D(20,45),
Coordinate.create2D(45,30),
Coordinate.create2D(40,40)
), Dimension.Two, "4326"
),
new ArrayList<LinearRing>(),
Dimension.Two, "4326"
),
new Polygon(
new LinearRing(
Arrays.asList(
Coordinate.create2D(20,35),
Coordinate.create2D(10,30),
Coordinate.create2D(10,10),
Coordinate.create2D(30,5),
Coordinate.create2D(45,20),
Coordinate.create2D(20,35)
),
Dimension.Two, "4326"
),
Arrays.asList(
new LinearRing(
Arrays.asList(
Coordinate.create2D(30,20),
Coordinate.create2D(20,15),
Coordinate.create2D(20,25),
Coordinate.create2D(30,20)
), Dimension.Two, "4326"
)
),
Dimension.Two, "4326"
)
),
Dimension.Two
);
String json = writer.write(polygon);
assertEquals("{\"type\": \"MultiPolygon\", \"coordinates\": [[[[40.0, 40.0], [20.0, 45.0], [45.0, 30.0], [40.0, 40.0]]], " +
"[[[20.0, 35.0], [10.0, 30.0], [10.0, 10.0], [30.0, 5.0], [45.0, 20.0], [20.0, 35.0]], [[30.0, 20.0], [20.0, 15.0], [20.0, 25.0], [30.0, 20.0]]]]}", json);
}
@Test
public void writeGeometryCollection() {
GeoJSONWriter writer = new GeoJSONWriter();
GeometryCollection geometryCollection = new GeometryCollection(
Arrays.asList(
new Point(Coordinate.create2D(10,40), Dimension.Two),
new LineString(Arrays.asList(
Coordinate.create2D(10,10),
Coordinate.create2D(20,20)
), Dimension.Two)
),
Dimension.Two
);
String json = writer.write(geometryCollection);
assertEquals("{\"type\": \"GeometryCollection\", \"geometries\": [{\"type\": \"Point\", \"coordinates\": [10.0, 40.0]}, " +
"{\"type\": \"LineString\", \"coordinates\": [[10.0, 10.0], [20.0, 20.0]]}]}", json);
}
}
| |
package in.co.liftplease.myapplication;
import android.app.FragmentManager;
import android.app.FragmentTransaction;
import android.content.Context;
import android.content.IntentSender;
import android.graphics.Color;
import android.location.Location;
import android.os.AsyncTask;
import android.os.Bundle;
import android.support.v7.app.ActionBarActivity;
import android.util.Log;
import android.view.Menu;
import android.view.MenuInflater;
import android.view.MenuItem;
import android.view.View;
import android.view.inputmethod.InputMethodManager;
import android.widget.AdapterView;
import android.widget.AutoCompleteTextView;
import android.widget.ImageButton;
import android.widget.LinearLayout;
import com.google.android.gms.common.ConnectionResult;
import com.google.android.gms.common.api.GoogleApiClient;
import com.google.android.gms.common.api.PendingResult;
import com.google.android.gms.common.api.ResultCallback;
import com.google.android.gms.location.LocationListener;
import com.google.android.gms.location.LocationRequest;
import com.google.android.gms.location.places.Place;
import com.google.android.gms.location.places.PlaceBuffer;
import com.google.android.gms.location.places.Places;
import com.google.android.gms.maps.CameraUpdate;
import com.google.android.gms.maps.CameraUpdateFactory;
import com.google.android.gms.maps.GoogleMap;
import com.google.android.gms.maps.MapFragment;
import com.google.android.gms.maps.model.LatLng;
import com.google.android.gms.maps.model.LatLngBounds;
import com.google.android.gms.maps.model.Marker;
import com.google.android.gms.maps.model.MarkerOptions;
import com.google.android.gms.maps.model.Polyline;
import com.google.android.gms.maps.model.PolylineOptions;
import org.apache.http.HttpResponse;
import org.apache.http.client.ClientProtocolException;
import org.apache.http.client.HttpClient;
import org.apache.http.client.entity.UrlEncodedFormEntity;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.impl.client.DefaultHttpClient;
import org.apache.http.message.BasicNameValuePair;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.UnsupportedEncodingException;
import java.net.HttpURLConnection;
import java.net.URL;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
public class MapsActivity extends ActionBarActivity implements
GoogleApiClient.ConnectionCallbacks,
GoogleApiClient.OnConnectionFailedListener,
LocationListener {
private static Menu mainMenu;
private GoogleMap mMap; // Might be null if Google Play services APK is not available.
private GoogleApiClient mGoogleApiClient;
private PlaceAutocompleteAdapter mAdapter;
private AutoCompleteTextView mAutocompleteView;
private Location sLocation;
private LinearLayout mAutocompleteBox;
private Location dLocation;
private Marker sLocationMarker;
private Marker dLocationMarker = null;
private ImageButton clearButton;
private List<Polyline> polylines = new ArrayList<Polyline>();
private String encroute;
private String session_id;
private JSONArray listToDisplay;
FragmentManager fm;
SessionManager session;
public static final String TAG = MapsActivity.class.getSimpleName();
private final static int CONNECTION_FAILURE_RESOLUTION_REQUEST = 9000;
private static final LatLngBounds BOUNDS_GREATER_INDIA = new LatLngBounds(
new LatLng(8, 68), new LatLng(38, 98));
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_maps);
session = new SessionManager(getApplicationContext());
mGoogleApiClient = new GoogleApiClient.Builder(this)
.addConnectionCallbacks(this)
.addOnConnectionFailedListener(this)
.addApi(Places.GEO_DATA_API)
.build();
clearButton = (ImageButton)findViewById(R.id.action_clear);
clearButton.setOnClickListener(new View.OnClickListener() {
public void onClick(View v) {
mAutocompleteView.setText("");
InputMethodManager in = (InputMethodManager) getSystemService(Context.INPUT_METHOD_SERVICE);
in.toggleSoftInput(InputMethodManager.SHOW_FORCED, 0);
clearButton.setVisibility(View.INVISIBLE);
mAutocompleteView.setFocusableInTouchMode(true);
mAutocompleteView.setFocusable(true);
}
});
mAutocompleteBox = (LinearLayout)findViewById(R.id.autocomplete_box);
mAutocompleteView = (AutoCompleteTextView)findViewById(R.id.autocomplete_places);
mAutocompleteView.setOnItemClickListener(mAutocompleteClickListener);
mAdapter = new PlaceAutocompleteAdapter(this, android.R.layout.simple_list_item_1,
mGoogleApiClient, BOUNDS_GREATER_INDIA, null);
mAutocompleteView.setAdapter(mAdapter);
}
public void onSuccess(){
fm = getFragmentManager();
hideFragment(fm.findFragmentById(R.id.map));
mAutocompleteBox.setVisibility(View.GONE);
setTitle("All list provider");
FragmentManager fragmentManager = getFragmentManager();
FragmentTransaction fragmentTransaction = fragmentManager.beginTransaction();
ListViewDemoFragment newFragment = new ListViewDemoFragment();
fragmentTransaction.add(R.id.fragment_container, newFragment, "LIST_FRAGMENT");
fragmentTransaction.commit();
}
public MyLocation.LocationResult locationResult = new MyLocation.LocationResult(){
public void gotLocation(final Location location){
location.getLongitude();
location.getLatitude();
sLocation = location;
if(dLocationMarker == null){
handleNewLocation(location);
}else{
showRoute();
}
}
};
public void showFragment(android.app.Fragment fragment){
fm = getFragmentManager();
fm.beginTransaction()
.setCustomAnimations(android.R.animator.fade_in, android.R.animator.fade_out)
.show(fragment)
.commit();
}
public void hideFragment(android.app.Fragment fragment){
fm = getFragmentManager();
fm.beginTransaction()
.setCustomAnimations(android.R.animator.fade_in, android.R.animator.fade_out)
.hide(fragment)
.commit();
}
public void showRoute(){
LatLngBounds.Builder builder = new LatLngBounds.Builder();
builder.include(sLocationMarker.getPosition());
builder.include(dLocationMarker.getPosition());
LatLngBounds bounds = builder.build();
int padding = 100;
CameraUpdate cu = CameraUpdateFactory.newLatLngBounds(bounds, padding);
mMap.animateCamera(cu);
}
public boolean onCreateOptionsMenu(Menu menu) {
this.mainMenu = menu;
MenuInflater inflater = getMenuInflater();
inflater.inflate(R.menu.menu_maps, menu);
return super.onCreateOptionsMenu(menu);
}
private AdapterView.OnItemClickListener mAutocompleteClickListener
= new AdapterView.OnItemClickListener() {
@Override
public void onItemClick(AdapterView<?> parent, View view, int position, long id) {
final PlaceAutocompleteAdapter.PlaceAutocomplete item = mAdapter.getItem(position);
final String placeId = String.valueOf(item.placeId);
PendingResult<PlaceBuffer> placeResult = Places.GeoDataApi
.getPlaceById(mGoogleApiClient, placeId);
placeResult.setResultCallback(mUpdatePlaceDetailsCallback);
for(Polyline line : polylines)
{
line.remove();
}
polylines.clear();
if(dLocationMarker != null){
dLocationMarker.remove();
}
mAutocompleteView.setSelection(0);
mAutocompleteView.setFocusableInTouchMode(false);
mAutocompleteView.setFocusable(false);
clearButton.setVisibility(View.VISIBLE);
InputMethodManager in = (InputMethodManager) getSystemService(Context.INPUT_METHOD_SERVICE);
in.hideSoftInputFromWindow(mAutocompleteView.getApplicationWindowToken(), 0);
}
};
private ResultCallback<PlaceBuffer> mUpdatePlaceDetailsCallback
= new ResultCallback<PlaceBuffer>() {
@Override
public void onResult(PlaceBuffer places) {
if (!places.getStatus().isSuccess()) {
places.release();
return;
}
final Place place = places.get(0);
LatLng dest = place.getLatLng();
dLocationMarker = mMap.addMarker(new MarkerOptions().position(dest).title("Destination"));
dLocationMarker.showInfoWindow();
showRoute();
MapsActivity.mainMenu.findItem(R.id.action_done).setVisible(true);
String url = getDirectionsUrl(dest);
DownloadTask downloadTask = new DownloadTask();
downloadTask.execute(url);
places.release();
}
};
private String getDirectionsUrl(LatLng dest){
String str_origin = "origin="+sLocation.getLatitude()+","+sLocation.getLongitude();
String str_dest = "destination="+dest.latitude+","+dest.longitude;
String sensor = "sensor=false";
String parameters = str_origin+"&"+str_dest+"&"+sensor;
String output = "json";
String url = "https://maps.googleapis.com/maps/api/directions/"+output+"?"+parameters;
return url;
}
private String downloadUrl(String strUrl) throws IOException {
String data = "";
InputStream iStream = null;
HttpURLConnection urlConnection = null;
try{
URL url = new URL(strUrl);
urlConnection = (HttpURLConnection) url.openConnection();
urlConnection.connect();
iStream = urlConnection.getInputStream();
BufferedReader br = new BufferedReader(new InputStreamReader(iStream));
StringBuffer sb = new StringBuffer();
String line = "";
while( ( line = br.readLine()) != null){
sb.append(line);
}
data = sb.toString();
br.close();
}catch(Exception e){
Log.d("Exception while downloading url", e.toString());
}finally{
iStream.close();
urlConnection.disconnect();
}
return data;
}
private class DownloadTask extends AsyncTask<String, Void, String>{
@Override
protected String doInBackground(String... url) {
String data = "";
try{
data = downloadUrl(url[0]);
}catch(Exception e){
Log.d("Background Task",e.toString());
}
return data;
}
@Override
protected void onPostExecute(String result) {
super.onPostExecute(result);
ParserTask parserTask = new ParserTask();
parserTask.execute(result);
}
}
/** A class to parse the Google Places in JSON format */
private class ParserTask extends AsyncTask<String, Integer, List<List<HashMap<String,String>>> >{
@Override
protected List<List<HashMap<String, String>>> doInBackground(String... jsonData) {
JSONObject jObject;
List<List<HashMap<String, String>>> routes = null;
try{
jObject = new JSONObject(jsonData[0]);
JSONArray jRoutes = jObject.getJSONArray("routes");
JSONObject jroute = ( (JSONObject)jRoutes.get(0)).getJSONObject("overview_polyline");
encroute = jroute.getString("points");
DirectionsJSONParser parser = new DirectionsJSONParser();
routes = parser.parse(jObject);
}catch(Exception e){
e.printStackTrace();
}
return routes;
}
// Executes in UI thread, after the parsing process
@Override
protected void onPostExecute(List<List<HashMap<String, String>>> result) {
ArrayList<LatLng> points = null;
PolylineOptions lineOptions = null;
MarkerOptions markerOptions = new MarkerOptions();
// Traversing through all the routes
for(int i=0;i<result.size();i++){
points = new ArrayList<LatLng>();
lineOptions = new PolylineOptions();
// Fetching i-th route
List<HashMap<String, String>> path = result.get(i);
// Fetching all the points in i-th route
for(int j=0;j<path.size();j++){
HashMap<String,String> point = path.get(j);
double lat = Double.parseDouble(point.get("lat"));
double lng = Double.parseDouble(point.get("lng"));
LatLng position = new LatLng(lat, lng);
points.add(position);
}
// Adding all the points in the route to LineOptions
lineOptions.addAll(points);
lineOptions.width(5);
lineOptions.color(Color.BLUE);
}
// Drawing polyline in the Google Map for the i-th route
polylines.add(mMap.addPolyline(lineOptions));
}
}
@Override
public void onConnected(Bundle bundle) {
}
private void handleNewLocation(Location location) {
Log.d(TAG, location.toString());
double currentLatitude = location.getLatitude();
double currentLongitude = location.getLongitude();
LatLng latLng = new LatLng(currentLatitude, currentLongitude);
sLocationMarker = mMap.addMarker(new MarkerOptions()
.position(latLng)
.title("You are here"));
mMap.animateCamera(CameraUpdateFactory.newLatLngZoom(latLng, 15F), new GoogleMap.CancelableCallback() {
@Override
public void onFinish() {
sLocationMarker.showInfoWindow();
}
@Override
public void onCancel() {
}
});
}
@Override
public void onConnectionSuspended(int i) {
Log.i(TAG, "Location services suspended. Please reconnect.");
}
@Override
protected void onStart() {
super.onStart();
}
@Override
protected void onResume() {
super.onResume();
setUpMapIfNeeded();
MyLocation myLocation = new MyLocation();
myLocation.getLocation(this, locationResult);
mGoogleApiClient.connect();
}
@Override
protected void onPause() {
super.onPause();
if (mGoogleApiClient.isConnected()) {
mGoogleApiClient.disconnect();
}
}
private void setUpMapIfNeeded() {
if (mMap == null) {
mMap = ((MapFragment) getFragmentManager().findFragmentById(R.id.map)).getMap();
}
}
@Override
public void onConnectionFailed(ConnectionResult connectionResult) {
if (connectionResult.hasResolution()) {
try {
// Start an Activity that tries to resolve the error
connectionResult.startResolutionForResult(this, CONNECTION_FAILURE_RESOLUTION_REQUEST);
} catch (IntentSender.SendIntentException e) {
e.printStackTrace();
}
} else {
Log.i(TAG, "Location services connection failed with code " + connectionResult.getErrorCode());
}
}
@Override
public void onLocationChanged(Location location) {
handleNewLocation(location);
}
private class MyAsyncTask extends AsyncTask<String, Integer, String>{
@Override
protected String doInBackground(String... params) {
HttpClient httpclient = new DefaultHttpClient();
HttpPost httppost = new HttpPost("http://whenisdryday.in:5000/subscriber");
List nameValuedPairs = new ArrayList();
nameValuedPairs.add(new BasicNameValuePair("route", params[0]));
nameValuedPairs.add(new BasicNameValuePair("key", params[1]));
try {
// UrlEncodedFormEntity is an entity composed of a list of url-encoded pairs.
//This is typically useful while sending an HTTP POST request.
UrlEncodedFormEntity urlEncodedFormEntity = new UrlEncodedFormEntity(nameValuedPairs);
// setEntity() hands the entity (here it is urlEncodedFormEntity) to the request.
httppost.setEntity(urlEncodedFormEntity);
try {
// HttpResponse is an interface just like HttpPost.
//Therefore we can't initialize them
HttpResponse httpResponse = httpclient.execute(httppost);
// According to the JAVA API, InputStream constructor do nothing.
//So we can't initialize InputStream although it is not an interface
InputStream inputStream = httpResponse.getEntity().getContent();
InputStreamReader inputStreamReader = new InputStreamReader(inputStream);
BufferedReader bufferedReader = new BufferedReader(inputStreamReader);
StringBuilder stringBuilder = new StringBuilder();
String bufferedStrChunk = null;
while((bufferedStrChunk = bufferedReader.readLine()) != null){
stringBuilder.append(bufferedStrChunk);
}
return stringBuilder.toString();
} catch (ClientProtocolException cpe) {
System.out.println("First Exception caz of HttpResponese :" + cpe);
cpe.printStackTrace();
} catch (IOException ioe) {
System.out.println("Second Exception caz of HttpResponse :" + ioe);
ioe.printStackTrace();
}
} catch (UnsupportedEncodingException uee) {
System.out.println("An Exception given because of UrlEncodedFormEntity argument :" + uee);
uee.printStackTrace();
}
return null;
}
@Override
protected void onPostExecute(String result) {
try {
JSONObject jObject = new JSONObject(result);
JSONObject dataObject = jObject.getJSONObject("data");
listToDisplay = dataObject.getJSONArray("providers");
onSuccess();
} catch (JSONException e) {
Log.e("JSONException", "Error: " + e.toString());
}
}
}
public JSONArray getListData(){
return this.listToDisplay;
}
public void addSubscriberToTable(){
HashMap<String, String> user = session.getUserDetails();
session_id = user.get(SessionManager.KEY_SESSION);
new MyAsyncTask().execute(encroute,session_id);
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
switch (item.getItemId()) {
case R.id.action_done:
addSubscriberToTable();
return true;
default:
return super.onOptionsItemSelected(item);
}
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.bwcompat;
import com.google.common.base.Predicate;
import com.google.common.util.concurrent.ListenableFuture;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.util.TestUtil;
import org.elasticsearch.Version;
import org.elasticsearch.action.admin.indices.get.GetIndexResponse;
import org.elasticsearch.action.admin.indices.upgrade.UpgradeIT;
import org.elasticsearch.action.get.GetResponse;
import org.elasticsearch.action.search.SearchRequestBuilder;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.io.FileSystemUtils;
import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.util.MultiDataPathUpgrader;
import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.env.NodeEnvironment;
import org.elasticsearch.index.engine.EngineConfig;
import org.elasticsearch.index.mapper.string.StringFieldMapperPositionOffsetGapTests;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.index.shard.MergePolicyConfig;
import org.elasticsearch.indices.recovery.RecoverySettings;
import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.aggregations.AggregationBuilders;
import org.elasticsearch.search.aggregations.bucket.histogram.Histogram;
import org.elasticsearch.search.aggregations.bucket.terms.Terms;
import org.elasticsearch.search.sort.SortOrder;
import org.elasticsearch.test.ESIntegTestCase;
import org.elasticsearch.test.VersionUtils;
import org.elasticsearch.test.hamcrest.ElasticsearchAssertions;
import org.hamcrest.Matchers;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.Test;
import java.io.IOException;
import java.io.InputStream;
import java.nio.file.DirectoryStream;
import java.nio.file.FileVisitResult;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.SimpleFileVisitor;
import java.nio.file.attribute.BasicFileAttributes;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.SortedSet;
import java.util.TreeSet;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures;
import static org.hamcrest.CoreMatchers.containsString;
import static org.hamcrest.Matchers.greaterThanOrEqualTo;
// needs at least 2 nodes since it bumps replicas to 1
@ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.TEST, numDataNodes = 0)
@LuceneTestCase.SuppressFileSystems("ExtrasFS")
public class OldIndexBackwardsCompatibilityIT extends ESIntegTestCase {
// TODO: test for proper exception on unsupported indexes (maybe via separate test?)
// We have a 0.20.6.zip etc for this.
List<String> indexes;
List<String> unsupportedIndexes;
static Path singleDataPath;
static Path[] multiDataPath;
@Before
public void initIndexesList() throws Exception {
indexes = loadIndexesList("index");
unsupportedIndexes = loadIndexesList("unsupported");
}
private List<String> loadIndexesList(String prefix) throws IOException {
List<String> indexes = new ArrayList<>();
Path dir = getDataPath(".");
try (DirectoryStream<Path> stream = Files.newDirectoryStream(dir, prefix + "-*.zip")) {
for (Path path : stream) {
indexes.add(path.getFileName().toString());
}
}
Collections.sort(indexes);
return indexes;
}
@AfterClass
public static void tearDownStatics() {
singleDataPath = null;
multiDataPath = null;
}
@Override
public Settings nodeSettings(int ord) {
return Settings.builder()
.put(MergePolicyConfig.INDEX_MERGE_ENABLED, false) // disable merging so no segments will be upgraded
.put(RecoverySettings.INDICES_RECOVERY_CONCURRENT_SMALL_FILE_STREAMS, 30) // increase recovery speed for small files
.build();
}
void setupCluster() throws Exception {
ListenableFuture<List<String>> replicas = internalCluster().startNodesAsync(1); // for replicas
Path baseTempDir = createTempDir();
// start single data path node
Settings.Builder nodeSettings = Settings.builder()
.put("path.data", baseTempDir.resolve("single-path").toAbsolutePath())
.put("node.master", false); // workaround for dangling index loading issue when node is master
ListenableFuture<String> singleDataPathNode = internalCluster().startNodeAsync(nodeSettings.build());
// start multi data path node
nodeSettings = Settings.builder()
.put("path.data", baseTempDir.resolve("multi-path1").toAbsolutePath() + "," + baseTempDir.resolve("multi-path2").toAbsolutePath())
.put("node.master", false); // workaround for dangling index loading issue when node is master
ListenableFuture<String> multiDataPathNode = internalCluster().startNodeAsync(nodeSettings.build());
// find single data path dir
Path[] nodePaths = internalCluster().getInstance(NodeEnvironment.class, singleDataPathNode.get()).nodeDataPaths();
assertEquals(1, nodePaths.length);
singleDataPath = nodePaths[0].resolve(NodeEnvironment.INDICES_FOLDER);
assertFalse(Files.exists(singleDataPath));
Files.createDirectories(singleDataPath);
logger.info("--> Single data path: " + singleDataPath.toString());
// find multi data path dirs
nodePaths = internalCluster().getInstance(NodeEnvironment.class, multiDataPathNode.get()).nodeDataPaths();
assertEquals(2, nodePaths.length);
multiDataPath = new Path[] {nodePaths[0].resolve(NodeEnvironment.INDICES_FOLDER),
nodePaths[1].resolve(NodeEnvironment.INDICES_FOLDER)};
assertFalse(Files.exists(multiDataPath[0]));
assertFalse(Files.exists(multiDataPath[1]));
Files.createDirectories(multiDataPath[0]);
Files.createDirectories(multiDataPath[1]);
logger.info("--> Multi data paths: " + multiDataPath[0].toString() + ", " + multiDataPath[1].toString());
replicas.get(); // wait for replicas
}
String loadIndex(String indexFile) throws Exception {
Path unzipDir = createTempDir();
Path unzipDataDir = unzipDir.resolve("data");
String indexName = indexFile.replace(".zip", "").toLowerCase(Locale.ROOT).replace("unsupported-", "index-");
// decompress the index
Path backwardsIndex = getDataPath(indexFile);
try (InputStream stream = Files.newInputStream(backwardsIndex)) {
TestUtil.unzip(stream, unzipDir);
}
// check it is unique
assertTrue(Files.exists(unzipDataDir));
Path[] list = FileSystemUtils.files(unzipDataDir);
if (list.length != 1) {
throw new IllegalStateException("Backwards index must contain exactly one cluster");
}
// the bwc scripts packs the indices under this path
Path src = list[0].resolve("nodes/0/indices/" + indexName);
assertTrue("[" + indexFile + "] missing index dir: " + src.toString(), Files.exists(src));
if (randomBoolean()) {
logger.info("--> injecting index [{}] into single data path", indexName);
copyIndex(logger, src, indexName, singleDataPath);
} else {
logger.info("--> injecting index [{}] into multi data path", indexName);
copyIndex(logger, src, indexName, multiDataPath);
}
return indexName;
}
void importIndex(String indexName) throws IOException {
final Iterable<NodeEnvironment> instances = internalCluster().getInstances(NodeEnvironment.class);
for (NodeEnvironment nodeEnv : instances) { // upgrade multidata path
MultiDataPathUpgrader.upgradeMultiDataPath(nodeEnv, logger);
}
// force reloading dangling indices with a cluster state republish
client().admin().cluster().prepareReroute().get();
ensureGreen(indexName);
}
// randomly distribute the files from src over dests paths
public static void copyIndex(final ESLogger logger, final Path src, final String indexName, final Path... dests) throws IOException {
for (Path dest : dests) {
Path indexDir = dest.resolve(indexName);
assertFalse(Files.exists(indexDir));
Files.createDirectories(indexDir);
}
Files.walkFileTree(src, new SimpleFileVisitor<Path>() {
@Override
public FileVisitResult preVisitDirectory(Path dir, BasicFileAttributes attrs) throws IOException {
Path relativeDir = src.relativize(dir);
for (Path dest : dests) {
Path destDir = dest.resolve(indexName).resolve(relativeDir);
Files.createDirectories(destDir);
}
return FileVisitResult.CONTINUE;
}
@Override
public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException {
if (file.getFileName().toString().equals(IndexWriter.WRITE_LOCK_NAME)) {
// skip lock file, we don't need it
logger.trace("Skipping lock file: " + file.toString());
return FileVisitResult.CONTINUE;
}
Path relativeFile = src.relativize(file);
Path destFile = dests[randomInt(dests.length - 1)].resolve(indexName).resolve(relativeFile);
logger.trace("--> Moving " + relativeFile.toString() + " to " + destFile.toString());
Files.move(file, destFile);
assertFalse(Files.exists(file));
assertTrue(Files.exists(destFile));
return FileVisitResult.CONTINUE;
}
});
}
void unloadIndex(String indexName) throws Exception {
assertAcked(client().admin().indices().prepareDelete(indexName).get());
}
public void testAllVersionsTested() throws Exception {
SortedSet<String> expectedVersions = new TreeSet<>();
for (Version v : VersionUtils.allVersions()) {
if (v.snapshot()) continue; // snapshots are unreleased, so there is no backcompat yet
if (v.onOrBefore(Version.V_0_20_6)) continue; // we can only test back one major lucene version
if (v.equals(Version.CURRENT)) continue; // the current version is always compatible with itself
expectedVersions.add("index-" + v.toString() + ".zip");
}
for (String index : indexes) {
if (expectedVersions.remove(index) == false) {
logger.warn("Old indexes tests contain extra index: " + index);
}
}
if (expectedVersions.isEmpty() == false) {
StringBuilder msg = new StringBuilder("Old index tests are missing indexes:");
for (String expected : expectedVersions) {
msg.append("\n" + expected);
}
fail(msg.toString());
}
}
public void testOldIndexes() throws Exception {
setupCluster();
Collections.shuffle(indexes, getRandom());
for (String index : indexes) {
long startTime = System.currentTimeMillis();
logger.info("--> Testing old index " + index);
assertOldIndexWorks(index);
logger.info("--> Done testing " + index + ", took " + ((System.currentTimeMillis() - startTime) / 1000.0) + " seconds");
}
}
@Test
public void testHandlingOfUnsupportedDanglingIndexes() throws Exception {
setupCluster();
Collections.shuffle(unsupportedIndexes, getRandom());
for (String index : unsupportedIndexes) {
assertUnsupportedIndexHandling(index);
}
}
/**
* Waits for the index to show up in the cluster state in closed state
*/
void ensureClosed(final String index) throws InterruptedException {
assertTrue(awaitBusy(new Predicate<Object>() {
@Override
public boolean apply(Object o) {
ClusterState state = client().admin().cluster().prepareState().get().getState();
return state.metaData().hasIndex(index) && state.metaData().index(index).getState() == IndexMetaData.State.CLOSE;
}
}));
}
/**
* Checks that the given index cannot be opened due to incompatible version
*/
void assertUnsupportedIndexHandling(String index) throws Exception {
long startTime = System.currentTimeMillis();
logger.info("--> Testing old index " + index);
String indexName = loadIndex(index);
// force reloading dangling indices with a cluster state republish
client().admin().cluster().prepareReroute().get();
ensureClosed(indexName);
try {
client().admin().indices().prepareOpen(indexName).get();
fail("Shouldn't be able to open an old index");
} catch (IllegalStateException ex) {
assertThat(ex.getMessage(), containsString("was created before v0.90.0 and wasn't upgraded"));
}
unloadIndex(indexName);
logger.info("--> Done testing " + index + ", took " + ((System.currentTimeMillis() - startTime) / 1000.0) + " seconds");
}
void assertOldIndexWorks(String index) throws Exception {
Version version = extractVersion(index);
String indexName = loadIndex(index);
importIndex(indexName);
assertIndexSanity(indexName);
assertBasicSearchWorks(indexName);
assertBasicAggregationWorks(indexName);
assertRealtimeGetWorks(indexName);
assertNewReplicasWork(indexName);
assertUpgradeWorks(indexName, isLatestLuceneVersion(version));
assertDeleteByQueryWorked(indexName, version);
assertPositionOffsetGapDefaults(indexName, version);
unloadIndex(indexName);
}
Version extractVersion(String index) {
return Version.fromString(index.substring(index.indexOf('-') + 1, index.lastIndexOf('.')));
}
boolean isLatestLuceneVersion(Version version) {
return version.luceneVersion.major == Version.CURRENT.luceneVersion.major &&
version.luceneVersion.minor == Version.CURRENT.luceneVersion.minor;
}
void assertIndexSanity(String indexName) {
GetIndexResponse getIndexResponse = client().admin().indices().prepareGetIndex().addIndices(indexName).get();
assertEquals(1, getIndexResponse.indices().length);
assertEquals(indexName, getIndexResponse.indices()[0]);
ensureYellow(indexName);
SearchResponse test = client().prepareSearch(indexName).get();
assertThat(test.getHits().getTotalHits(), greaterThanOrEqualTo(1l));
}
void assertBasicSearchWorks(String indexName) {
logger.info("--> testing basic search");
SearchRequestBuilder searchReq = client().prepareSearch(indexName).setQuery(QueryBuilders.matchAllQuery());
SearchResponse searchRsp = searchReq.get();
ElasticsearchAssertions.assertNoFailures(searchRsp);
long numDocs = searchRsp.getHits().getTotalHits();
logger.info("Found " + numDocs + " in old index");
logger.info("--> testing basic search with sort");
searchReq.addSort("long_sort", SortOrder.ASC);
ElasticsearchAssertions.assertNoFailures(searchReq.get());
logger.info("--> testing exists filter");
searchReq = client().prepareSearch(indexName).setQuery(QueryBuilders.existsQuery("string"));
searchRsp = searchReq.get();
ElasticsearchAssertions.assertNoFailures(searchRsp);
assertEquals(numDocs, searchRsp.getHits().getTotalHits());
logger.info("--> testing missing filter");
// the field for the missing filter here needs to be different than the exists filter above, to avoid being found in the cache
searchReq = client().prepareSearch(indexName).setQuery(QueryBuilders.missingQuery("long_sort"));
searchRsp = searchReq.get();
ElasticsearchAssertions.assertNoFailures(searchRsp);
assertEquals(0, searchRsp.getHits().getTotalHits());
}
void assertBasicAggregationWorks(String indexName) {
// histogram on a long
SearchResponse searchRsp = client().prepareSearch(indexName).addAggregation(AggregationBuilders.histogram("histo").field("long_sort").interval(10)).get();
ElasticsearchAssertions.assertSearchResponse(searchRsp);
Histogram histo = searchRsp.getAggregations().get("histo");
assertNotNull(histo);
long totalCount = 0;
for (Histogram.Bucket bucket : histo.getBuckets()) {
totalCount += bucket.getDocCount();
}
assertEquals(totalCount, searchRsp.getHits().getTotalHits());
// terms on a boolean
searchRsp = client().prepareSearch(indexName).addAggregation(AggregationBuilders.terms("bool_terms").field("bool")).get();
Terms terms = searchRsp.getAggregations().get("bool_terms");
totalCount = 0;
for (Terms.Bucket bucket : terms.getBuckets()) {
totalCount += bucket.getDocCount();
}
assertEquals(totalCount, searchRsp.getHits().getTotalHits());
}
void assertRealtimeGetWorks(String indexName) {
assertAcked(client().admin().indices().prepareUpdateSettings(indexName).setSettings(Settings.builder()
.put("refresh_interval", -1)
.build()));
SearchRequestBuilder searchReq = client().prepareSearch(indexName).setQuery(QueryBuilders.matchAllQuery());
SearchHit hit = searchReq.get().getHits().getAt(0);
String docId = hit.getId();
// foo is new, it is not a field in the generated index
client().prepareUpdate(indexName, "doc", docId).setDoc("foo", "bar").get();
GetResponse getRsp = client().prepareGet(indexName, "doc", docId).get();
Map<String, Object> source = getRsp.getSourceAsMap();
assertThat(source, Matchers.hasKey("foo"));
assertAcked(client().admin().indices().prepareUpdateSettings(indexName).setSettings(Settings.builder()
.put("refresh_interval", EngineConfig.DEFAULT_REFRESH_INTERVAL)
.build()));
}
void assertNewReplicasWork(String indexName) throws Exception {
final int numReplicas = 1;
final long startTime = System.currentTimeMillis();
logger.debug("--> creating [{}] replicas for index [{}]", numReplicas, indexName);
assertAcked(client().admin().indices().prepareUpdateSettings(indexName).setSettings(Settings.builder()
.put("number_of_replicas", numReplicas)
).execute().actionGet());
ensureGreen(TimeValue.timeValueMinutes(2), indexName);
logger.debug("--> index [{}] is green, took [{}]", indexName, TimeValue.timeValueMillis(System.currentTimeMillis() - startTime));
logger.debug("--> recovery status:\n{}", XContentHelper.toString(client().admin().indices().prepareRecoveries(indexName).get()));
// TODO: do something with the replicas! query? index?
}
// #10067: create-bwc-index.py deleted any doc with long_sort:[10-20]
void assertDeleteByQueryWorked(String indexName, Version version) throws Exception {
if (version.onOrBefore(Version.V_1_0_0_Beta2)) {
// TODO: remove this once #10262 is fixed
return;
}
// these documents are supposed to be deleted by a delete by query operation in the translog
SearchRequestBuilder searchReq = client().prepareSearch(indexName).setQuery(QueryBuilders.queryStringQuery("long_sort:[10 TO 20]"));
assertEquals(0, searchReq.get().getHits().getTotalHits());
}
void assertPositionOffsetGapDefaults(String indexName, Version version) throws Exception {
if (version.before(Version.V_2_0_0_beta1)) {
StringFieldMapperPositionOffsetGapTests.assertGapIsZero(client(), indexName, "doc");
} else {
StringFieldMapperPositionOffsetGapTests.assertGapIsOneHundred(client(), indexName, "doc");
}
}
void assertUpgradeWorks(String indexName, boolean alreadyLatest) throws Exception {
if (alreadyLatest == false) {
UpgradeIT.assertNotUpgraded(client(), indexName);
}
assertNoFailures(client().admin().indices().prepareUpgrade(indexName).get());
UpgradeIT.assertUpgraded(client(), indexName);
}
}
| |
/*
* The MIT License (MIT)
*
* Copyright (c) 2014 Adam Alyyan
*
* Permission is hereby granted, free of charge, to any person obtaining a copy of this software
* and associated documentation files (the "Software"), to deal in the Software without restriction,
* including without limitation the rights to use, copy, modify, merge, publish, distribute,
* sublicense, and/or sell copies of the Software, and to permit persons to whom the Software
* is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all copies or
* substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING
* BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
* DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
package api;
import dto.staticdata.champion.ChampData;
import dto.staticdata.champion.Champion;
import dto.staticdata.champion.ChampionList;
import dto.staticdata.champion.ChampionQueryParams;
import dto.staticdata.item.Item;
import dto.staticdata.item.ItemList;
import dto.staticdata.item.ItemListData;
import dto.staticdata.item.ItemQueryParams;
import dto.staticdata.mastery.Mastery;
import dto.staticdata.mastery.MasteryList;
import dto.staticdata.mastery.MasteryListData;
import dto.staticdata.mastery.MasteryQueryParams;
import dto.staticdata.realm.Realm;
import dto.staticdata.rune.Rune;
import dto.staticdata.rune.RuneList;
import dto.staticdata.rune.RuneListData;
import dto.staticdata.rune.RuneQueryParams;
import dto.staticdata.summonerspell.SpellData;
import dto.staticdata.summonerspell.SummonerSpell;
import dto.staticdata.summonerspell.SummonerSpellList;
import dto.staticdata.summonerspell.SummonerSpellQueryParams;
import org.junit.Assert;
import org.junit.BeforeClass;
import org.junit.Test;
import java.util.Iterator;
import java.util.List;
public class StaticAPITests {
private static final String TEST_LOCALE = "en_US";
private static final String TEST_VERSION = "4.19.2";
private static Ulti ulti;
@BeforeClass
public static void setUp() throws Exception {
ulti = new Ulti("");
}
/**
* @see api.Ulti#getStaticChampionList()
*/
@Test
public void testGetStaticChampionList() {
ChampionList championList = ulti.getStaticChampionList();
Assert.assertNotNull(championList);
}
/**
* @see Ulti#getStaticChampionList(dto.staticdata.champion.ChampionQueryParams)
*/
@Test
public void testGetStaticChampionListWithParams() {
ChampionQueryParams params = new ChampionQueryParams.Builder()
.locale(TEST_LOCALE)
.version(TEST_VERSION)
.dataById(true)
.champData(ChampData.ALL)
.build();
ChampionList championList = ulti.getStaticChampionList(params);
Assert.assertNotNull(championList);
}
/**
* @see Ulti#getStaticChampion(int)
*/
@Test
public void testGetStaticChampion() {
ChampionList championList = ulti.getStaticChampionList();
Iterator<Champion> iterator = championList.getData().values().iterator();
while (iterator.hasNext()) {
Champion champion = iterator.next();
Assert.assertNotNull(ulti.getStaticChampion(champion.getId()));
}
}
/**
* @see api.Ulti#getStaticChampion(int, dto.staticdata.champion.ChampionQueryParams)
*/
@Test
public void testGetStaticChampionWithParams() {
ChampionList championList = ulti.getStaticChampionList();
Iterator<Champion> iterator = championList.getData().values().iterator();
ChampionQueryParams params = new ChampionQueryParams.Builder()
.locale(TEST_LOCALE)
.version(TEST_VERSION)
.champData(ChampData.ALLYTIPS, ChampData.ENEMYTIPS)
.build();
while (iterator.hasNext()) {
Champion champion = iterator.next();
Assert.assertNotNull(ulti.getStaticChampion(champion.getId(), params));
}
}
/**
* @see api.Ulti#getStaticItemList()
*/
@Test
public void testGetStaticItemList() {
ItemList itemList = ulti.getStaticItemList();
Assert.assertNotNull(itemList);
}
/**
* @see api.Ulti#getStaticItemList(dto.staticdata.item.ItemQueryParams)
*/
@Test
public void testGetStaticItemListWithParams() {
ItemQueryParams itemQueryParams = new ItemQueryParams.Builder()
.locale(TEST_LOCALE)
.version(TEST_VERSION)
.itemListData(ItemListData.ALL)
.build();
ItemList itemList = ulti.getStaticItemList(itemQueryParams);
Assert.assertNotNull(itemList);
}
/**
* @see Ulti#getStaticItem(int)
*/
@Test
public void testGetStaticItem() {
ItemList itemList = ulti.getStaticItemList();
Iterator<Item> iterator = itemList.getData().values().iterator();
while (iterator.hasNext()) {
Item item = iterator.next();
Assert.assertNotNull(ulti.getStaticItem(item.getId()));
}
}
/**
* @see api.Ulti#getStaticItem(int, dto.staticdata.item.ItemQueryParams)
*/
@Test
public void testGetStaticItemWithParams() {
ItemList itemList = ulti.getStaticItemList();
Iterator<Item> iterator = itemList.getData().values().iterator();
ItemQueryParams itemQueryParams = new ItemQueryParams.Builder()
.locale(TEST_LOCALE)
.version(TEST_VERSION)
.itemListData(ItemListData.COLLOQ, ItemListData.CONSUME_ON_FULL)
.build();
while (iterator.hasNext()) {
Item item = iterator.next();
Assert.assertNotNull(ulti.getStaticItem(item.getId(), itemQueryParams));
}
}
/**
* @see api.Ulti#getStaticMasteryList()
*/
@Test
public void testGetStaticMasteryList() {
MasteryList masteryList = ulti.getStaticMasteryList();
Assert.assertNotNull(masteryList);
}
/**
* @see api.Ulti#getStaticMasteryList(dto.staticdata.mastery.MasteryQueryParams
*/
@Test
public void testGetStaticMasteryListWithParams() {
MasteryQueryParams masteryQueryParams = new MasteryQueryParams.Builder()
.locale(TEST_LOCALE)
.version(TEST_VERSION)
.masteryListData(MasteryListData.ALL)
.build();
MasteryList masteryList = ulti.getStaticMasteryList(masteryQueryParams);
Assert.assertNotNull(masteryList);
}
/**
* @see Ulti#getStaticMastery(int)
*/
@Test
public void testGetStaticMastery() {
MasteryList masteryList = ulti.getStaticMasteryList();
Iterator<Mastery> iterator = masteryList.getData().values().iterator();
while (iterator.hasNext()) {
Mastery mastery = iterator.next();
Assert.assertNotNull(ulti.getStaticMastery(mastery.getId()));
}
}
/**
* @see api.Ulti#getStaticMastery(int, dto.staticdata.mastery.MasteryQueryParams)
*/
@Test
public void testGetStaticMasteryWithParams() {
MasteryList masteryList = ulti.getStaticMasteryList();
Iterator<Mastery> iterator = masteryList.getData().values().iterator();
MasteryQueryParams masteryQueryParams = new MasteryQueryParams.Builder()
.locale(TEST_LOCALE)
.version(TEST_VERSION)
.masteryListData(MasteryListData.IMAGE, MasteryListData.SANITIZED_DESCRIPTION)
.build();
while (iterator.hasNext()) {
Mastery mastery = iterator.next();
Assert.assertNotNull(ulti.getStaticMastery(mastery.getId(), masteryQueryParams));
}
}
/**
* @see api.Ulti#getStaticRealm()
*/
@Test
public void testGetStaticRealm() {
Realm r = ulti.getStaticRealm();
Assert.assertNotNull(r);
}
/**
* @see api.Ulti#getStaticRuneList()
*/
@Test
public void testGetStaticRuneList() {
RuneList runeList = ulti.getStaticRuneList();
Assert.assertNotNull(runeList);
}
/**
* @see api.Ulti#getStaticRuneList(dto.staticdata.rune.RuneQueryParams)
*/
@Test
public void testGetStaticRuneListWithParams() {
RuneQueryParams runeQueryParams = new RuneQueryParams.Builder()
.locale(TEST_LOCALE)
.version(TEST_VERSION)
.runeListData(RuneListData.ALL)
.build();
RuneList runeList = ulti.getStaticRuneList(runeQueryParams);
Assert.assertNotNull(runeList);
}
/**
* @see Ulti#getStaticRune(int)
*/
@Test
public void testGetStaticRune() {
RuneList runeList = ulti.getStaticRuneList();
Iterator<Rune> iterator = runeList.getData().values().iterator();
while (iterator.hasNext()) {
Rune rune = iterator.next();
Assert.assertNotNull(ulti.getStaticRune(rune.getId()));
}
}
/**
* @see api.Ulti#getStaticRune(int, dto.staticdata.rune.RuneQueryParams)
*/
@Test
public void testGetStaticRuneWithParams() {
RuneList runeList = ulti.getStaticRuneList();
Iterator<Rune> iterator = runeList.getData().values().iterator();
RuneQueryParams runeQueryParams = new RuneQueryParams.Builder()
.locale(TEST_LOCALE)
.version(TEST_VERSION)
.runeListData(RuneListData.HIDE_FROM_ALL, RuneListData.GOLD)
.build();
while (iterator.hasNext()) {
Rune rune = iterator.next();
Assert.assertNotNull(ulti.getStaticRune(rune.getId(), runeQueryParams));
}
}
/**
* @see api.Ulti#getStaticSummonerSpellList()
*/
@Test
public void testGetStaticSummonerSpellList() {
SummonerSpellList summonerSpellList = ulti.getStaticSummonerSpellList();
Assert.assertNotNull(summonerSpellList);
}
/**
* @see api.Ulti#getStaticSummonerSpellList(dto.staticdata.summonerspell.SummonerSpellQueryParams)
*/
@Test
public void testGetStaticSummonerSpellListWithParams() {
SummonerSpellQueryParams summonerSpellQueryParams = new SummonerSpellQueryParams.Builder()
.locale(TEST_LOCALE)
.version(TEST_VERSION)
.spellData(SpellData.ALL)
.build();
SummonerSpellList summonerSpellList = ulti.getStaticSummonerSpellList(summonerSpellQueryParams);
Assert.assertNotNull(summonerSpellList);
}
/**
* @see Ulti#getStaticSummonerSpell(int)
*/
@Test
public void testGetStaticSummonerSpell() {
SummonerSpellList summonerSpellList = ulti.getStaticSummonerSpellList();
Iterator<SummonerSpell> iterator = summonerSpellList.getData().values().iterator();
while (iterator.hasNext()) {
SummonerSpell summonerSpell = iterator.next();
Assert.assertNotNull(ulti.getStaticSummonerSpell(summonerSpell.getId()));
}
}
/**
* @see api.Ulti#getStaticSummonerSpell(int, dto.staticdata.summonerspell.SummonerSpellQueryParams)
*/
@Test
public void testGetStaticSummonerSpellWithParams() {
SummonerSpellList summonerSpellList = ulti.getStaticSummonerSpellList();
Iterator<SummonerSpell> iterator = summonerSpellList.getData().values().iterator();
SummonerSpellQueryParams summonerSpellQueryParams = new SummonerSpellQueryParams.Builder()
.locale(TEST_LOCALE)
.version(TEST_VERSION)
.spellData(SpellData.COOLDOWN_BURN, SpellData.COST_BURN, SpellData.EFFECT_BURN)
.build();
while (iterator.hasNext()) {
SummonerSpell summonerSpell = iterator.next();
Assert.assertNotNull(ulti.getStaticSummonerSpell(summonerSpell.getId(), summonerSpellQueryParams));
}
}
/**
* @see api.Ulti#getStaticVersions()
*/
@Test
public void testGetStaticVersions() {
List<String> v = ulti.getStaticVersions();
Assert.assertNotNull(v);
}
}
| |
// Copyright 2000-2020 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.openapi.vfs.impl.wsl;
import com.intellij.execution.configurations.PathEnvironmentVariableUtil;
import com.intellij.execution.process.OSProcessHandler;
import com.intellij.execution.process.ProcessOutputTypes;
import com.intellij.notification.NotificationListener;
import com.intellij.openapi.application.Application;
import com.intellij.openapi.application.ApplicationBundle;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.application.PathManager;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.util.Key;
import com.intellij.openapi.util.NlsContexts;
import com.intellij.openapi.util.NlsSafe;
import com.intellij.openapi.util.SystemInfo;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.openapi.util.text.Strings;
import com.intellij.openapi.vfs.local.FileWatcherNotificationSink;
import com.intellij.openapi.vfs.local.PluggableFileWatcher;
import com.intellij.openapi.vfs.newvfs.ManagingFS;
import com.intellij.util.io.BaseDataReader;
import com.intellij.util.io.BaseOutputReader;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.jetbrains.annotations.TestOnly;
import java.io.BufferedWriter;
import java.io.File;
import java.io.IOException;
import java.io.OutputStreamWriter;
import java.nio.charset.StandardCharsets;
import java.nio.file.Path;
import java.util.*;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.CopyOnWriteArrayList;
import java.util.concurrent.atomic.AtomicInteger;
import static com.intellij.execution.wsl.WSLDistribution.UNC_PREFIX;
public class WslFileWatcher extends PluggableFileWatcher {
private static Logger logger(@Nullable String vm) {
return vm == null ? Logger.getInstance(WslFileWatcher.class) : Logger.getInstance('#' + WslFileWatcher.class.getName() + '.' + vm);
}
private static final String FSNOTIFIER_WSL = "fsnotifier-wsl";
private static final int NAME_START = UNC_PREFIX.length();
private static final String ROOTS_COMMAND = "ROOTS";
private static final String EXIT_COMMAND = "EXIT";
private static final int MAX_PROCESS_LAUNCH_ATTEMPT_COUNT = 10;
private FileWatcherNotificationSink myNotificationSink;
private Path myExecutable;
private final Map<String, VmData> myVMs = new ConcurrentHashMap<>();
private final AtomicInteger mySettingRoots = new AtomicInteger(0);
private volatile boolean myShuttingDown = false;
private volatile boolean myTestStarted = false;
@Override
public void initialize(@NotNull ManagingFS managingFS, @NotNull FileWatcherNotificationSink notificationSink) {
myNotificationSink = notificationSink;
if (SystemInfo.isWin10OrNewer && PathEnvironmentVariableUtil.findInPath("wsl.exe") != null) {
myExecutable = PathManager.findBinFile(FSNOTIFIER_WSL);
if (myExecutable != null) {
logger(null).info("WSL file watcher: " + myExecutable);
}
}
}
private void notifyOnFailure(@NlsSafe String vm, @NlsContexts.NotificationContent String cause, @Nullable NotificationListener listener) {
myNotificationSink.notifyUserOnFailure("[" + vm + "] " + cause, listener);
}
@Override
public void dispose() {
myShuttingDown = true;
for (Map.Entry<String, VmData> entry : myVMs.entrySet()) {
shutdownProcess(entry.getValue());
}
}
@Override
public boolean isOperational() {
return myExecutable != null && (!ApplicationManager.getApplication().isUnitTestMode() || myTestStarted);
}
@Override
public boolean isSettingRoots() {
return isOperational() && mySettingRoots.get() > 0;
}
@Override
public void setWatchRoots(@NotNull List<String> recursive, @NotNull List<String> flat) {
if (myShuttingDown) return;
Map<String, VmData> newVMs = new HashMap<>();
List<String> ignored = new ArrayList<>();
sortRoots(recursive, newVMs, ignored, true);
sortRoots(flat, newVMs, ignored, false);
myNotificationSink.notifyManualWatchRoots(this, ignored);
for (Map.Entry<String, VmData> entry : newVMs.entrySet()) {
VmData upcoming = entry.getValue(), vm = myVMs.computeIfAbsent(entry.getKey(), k -> upcoming);
assert vm != null : entry;
if (vm == upcoming) {
setupProcess(vm);
}
else if (!vm.recursive.equals(upcoming.recursive) || !vm.flat.equals(upcoming.flat)) {
vm.reload(upcoming);
setupProcess(vm);
}
else {
myNotificationSink.notifyManualWatchRoots(this, vm.ignored);
}
}
for (Iterator<Map.Entry<String, VmData>> iterator = myVMs.entrySet().iterator(); iterator.hasNext(); ) {
Map.Entry<String, VmData> entry = iterator.next();
if (!newVMs.containsKey(entry.getKey())) {
iterator.remove();
shutdownProcess(entry.getValue());
}
}
}
private static void sortRoots(List<String> roots, Map<String, VmData> vms, List<String> ignored, boolean recursive) {
for (String root : roots) {
int nameEnd;
if (StringUtil.startsWithIgnoreCase(root, UNC_PREFIX) && (nameEnd = root.indexOf('\\', NAME_START)) > NAME_START) {
String prefix = root.substring(0, nameEnd);
String name = root.substring(NAME_START, nameEnd);
VmData vm = vms.computeIfAbsent(name, k -> new VmData(k, prefix));
String path = root.substring(nameEnd).replace('\\', '/');
(recursive ? vm.recursive : vm.flat).add(path);
}
else {
ignored.add(root);
}
}
}
private void setupProcess(VmData vm) {
if (myShuttingDown || vm.shuttingDown) return;
MyProcessHandler handler = vm.handler;
if (handler == null) {
if (vm.startAttemptCount.incrementAndGet() > MAX_PROCESS_LAUNCH_ATTEMPT_COUNT) {
notifyOnFailure(vm.name, ApplicationBundle.message("watcher.bailed.out.10x", vm.name), null);
return;
}
try {
Path toolName = myExecutable.getFileName(), toolDir = myExecutable.getParent();
Process process = new ProcessBuilder("wsl", "-d", vm.name, "-e", "./" + toolName).directory(toolDir.toFile()).start();
vm.handler = handler = new MyProcessHandler(process, vm);
handler.startNotify();
}
catch (IOException e) {
vm.logger.error(e);
vm.startAttemptCount.set(MAX_PROCESS_LAUNCH_ATTEMPT_COUNT);
notifyOnFailure(vm.name, ApplicationBundle.message("watcher.failed.to.start", vm.name), null);
return;
}
}
mySettingRoots.incrementAndGet();
try {
handler.writeLine(ROOTS_COMMAND);
for (String path : vm.recursive) handler.writeLine(path);
for (String path : vm.flat) handler.writeLine('|' + path);
handler.writeLine("#");
}
catch (IOException e) {
vm.logger.error(e);
}
}
private void shutdownProcess(VmData vm) {
MyProcessHandler processHandler = vm.handler;
if (processHandler != null && !processHandler.isProcessTerminated()) {
vm.shuttingDown = true;
try { processHandler.writeLine(EXIT_COMMAND); }
catch (IOException ignore) { }
if (!processHandler.waitFor(10)) {
Runnable r = () -> {
if (!processHandler.waitFor(500)) {
vm.logger.warn("WSL file watcher is still alive, doing a force quit.");
processHandler.destroyProcess();
}
};
if (myShuttingDown) {
new Thread(r, FSNOTIFIER_WSL + " shutdown").start();
}
else {
ApplicationManager.getApplication().executeOnPooledThread(r);
}
}
}
vm.handler = null;
}
private static final class VmData {
final String name;
final String prefix;
final List<String> recursive = new ArrayList<>();
final List<String> flat = new ArrayList<>();
final Logger logger;
final AtomicInteger startAttemptCount = new AtomicInteger(0);
volatile MyProcessHandler handler;
volatile List<String> ignored = Collections.emptyList();
volatile boolean shuttingDown;
VmData(String name, String prefix) {
this.name = name;
this.prefix = prefix;
this.logger = logger(name);
}
void reload(VmData other) {
recursive.clear();
recursive.addAll(other.recursive);
flat.clear();
flat.addAll(other.flat);
ignored = Collections.emptyList();
}
}
private static final BaseOutputReader.Options READER_OPTIONS = new BaseOutputReader.Options() {
@Override public BaseDataReader.SleepingPolicy policy() { return BaseDataReader.SleepingPolicy.BLOCKING; }
@Override public boolean sendIncompleteLines() { return false; }
@Override public boolean withSeparators() { return false; }
};
@SuppressWarnings("SpellCheckingInspection")
private enum WatcherOp {GIVEUP, RESET, UNWATCHEABLE, MESSAGE, CREATE, DELETE, STATS, CHANGE}
private final class MyProcessHandler extends OSProcessHandler {
private final BufferedWriter myWriter;
private final VmData myVm;
private WatcherOp myLastOp;
private final List<String> myLines = new ArrayList<>();
MyProcessHandler(Process process, VmData vm) {
super(process, FSNOTIFIER_WSL + " @ " + vm.name, StandardCharsets.UTF_8);
myWriter = new BufferedWriter(new OutputStreamWriter(process.getOutputStream(), StandardCharsets.UTF_8));
myVm = vm;
}
void writeLine(String line) throws IOException {
if (myVm.logger.isTraceEnabled()) myVm.logger.trace("<< " + line);
myWriter.write(line);
myWriter.write('\n');
myWriter.flush();
}
@Override
protected @NotNull BaseOutputReader.Options readerOptions() {
return READER_OPTIONS;
}
@Override
protected void notifyProcessTerminated(int exitCode) {
super.notifyProcessTerminated(exitCode);
String message = "Watcher terminated with exit code " + exitCode;
if (myShuttingDown || myVm.shuttingDown) myVm.logger.info(message); else myVm.logger.warn(message);
myVm.handler = null;
setupProcess(myVm);
}
@Override
public void notifyTextAvailable(@NotNull String line, @NotNull Key outputType) {
if (outputType == ProcessOutputTypes.STDERR) {
myVm.logger.warn(line);
}
if (outputType != ProcessOutputTypes.STDOUT) {
return;
}
if (myVm.logger.isTraceEnabled()) myVm.logger.trace(">> " + line);
if (myLastOp == null) {
WatcherOp watcherOp;
try {
watcherOp = WatcherOp.valueOf(line);
}
catch (IllegalArgumentException e) {
String message = "Illegal watcher command: '" + line + "'";
if (line.length() <= 20) message += " " + Arrays.toString(line.chars().toArray());
myVm.logger.error(message);
return;
}
if (watcherOp == WatcherOp.GIVEUP) {
notifyOnFailure(myVm.name, ApplicationBundle.message("watcher.gave.up"), null);
}
else if (watcherOp == WatcherOp.RESET) {
myNotificationSink.notifyReset(Strings.trimEnd(myVm.prefix, '\\'));
}
else {
myLastOp = watcherOp;
}
}
else if (myLastOp == WatcherOp.MESSAGE) {
String localized = Objects.requireNonNullElse(ApplicationBundle.INSTANCE.messageOrNull(line), line); //NON-NLS
myVm.logger.warn(localized);
notifyOnFailure(myVm.name, localized, NotificationListener.URL_OPENING_LISTENER);
myLastOp = null;
}
else if (myLastOp == WatcherOp.UNWATCHEABLE) {
if ("#".equals(line)) {
mySettingRoots.decrementAndGet();
processUnwatchable();
myLines.clear();
myLastOp = null;
}
else {
myLines.add(line);
}
}
else {
String path = StringUtil.trimEnd(line.replace('\0', '\n'), File.separator); // unescape
processChange(path, myLastOp);
myLastOp = null;
}
}
private void processUnwatchable() {
List<String> roots = new ArrayList<>(myLines.size());
for (String line : myLines) roots.add(myVm.prefix + line.replace('/', '\\'));
myVm.ignored = new CopyOnWriteArrayList<>(roots);
myNotificationSink.notifyManualWatchRoots(WslFileWatcher.this, roots);
}
private void processChange(String path, WatcherOp op) {
String root = myVm.prefix + path.replace('/', '\\');
if (op == WatcherOp.STATS || op == WatcherOp.CHANGE) {
myNotificationSink.notifyDirtyPath(root);
}
else if (op == WatcherOp.CREATE || op == WatcherOp.DELETE) {
myNotificationSink.notifyPathCreatedOrDeleted(root);
}
else {
myVm.logger.error("unexpected op: " + op);
}
}
}
//<editor-fold desc="Test stuff.">
@Override
@TestOnly
public void startup() {
Application app = ApplicationManager.getApplication();
if (app == null || !app.isUnitTestMode()) throw new IllegalStateException();
myTestStarted = true;
myShuttingDown = false;
}
@Override
@TestOnly
public void shutdown() {
Application app = ApplicationManager.getApplication();
if (app == null || !app.isUnitTestMode()) throw new IllegalStateException();
myTestStarted = false;
myShuttingDown = true;
myVMs.forEach((key, value) -> shutdownProcess(value));
myVMs.clear();
}
//</editor-fold>
}
| |
/**
*
* Licensed under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing
* permissions and limitations under the License.
*
*
* Implements the Hadoop FS interfaces to allow applications to store
*files in Kosmos File System (KFS).
*/
package org.apache.hadoop.fs.gtfs;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.*;
import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.util.Progressable;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.net.URI;
import org.apache.hadoop.fs.gtfs.GTFSImpl;
/**
* A FileSystem backed by GIGA+TableFS FileSystem.
*
*/
@InterfaceAudience.Public
@InterfaceStability.Stable
public class GTFileSystem extends FileSystem {
private URI uri;
private Path workingDir = new Path("/");
private GTFSImpl gtfs_impl;
private FileSystem hdfs;
private int threshold = 4096;
public GTFileSystem() {
}
@Override
public URI getUri() {
return uri;
}
@Override
public void initialize(URI uri, Configuration conf) throws IOException {
super.initialize(uri, conf);
try {
this.uri = URI.create(uri.getScheme() + "://"+ uri.getAuthority());
System.out.println("URI:"+uri.getScheme() + "://"+ uri.getAuthority());
this.hdfs = FileSystem.get(uri, conf);
this.workingDir = new Path("/");
this.gtfs_impl = new GTFSImpl();
setConf(conf);
} catch (Exception e) {
e.printStackTrace();
System.out.println("Unable to initialize GTFS");
System.exit(-1);
}
}
@Override
public Path getWorkingDirectory() {
return workingDir;
}
@Override
public void setWorkingDirectory(Path dir) {
workingDir = makeAbsolute(dir);
}
private Path makeAbsolute(Path path) {
if (path.isAbsolute()) {
return path;
}
return new Path(workingDir, path);
}
public boolean mkdir_recursive(Path path, FsPermission permission) {
Path parent_path = path.getParent();
if (parent_path != null) {
mkdir_recursive(parent_path, permission);
}
int res = gtfs_impl.mkDir(path.toString(), permission.toShort());
return res == 0;
}
@Override
public boolean mkdirs(Path path, FsPermission permission
) throws IOException {
Path absolute = makeAbsolute(path);
return mkdir_recursive(absolute, permission);
}
public boolean mknod(Path path, FsPermission permission) {
Path absolute = makeAbsolute(path);
return gtfs_impl.mkNod(absolute.toString(), permission.toShort()) == 0;
}
@Override
public FileStatus[] listStatus(Path path) throws IOException {
Path absolute = makeAbsolute(path);
return gtfs_impl.listStatus(path.toString());
}
public String getUser(int uid) {
return Integer.toString(uid);
}
public String getGroup(int gid) {
return Integer.toString(gid);
}
@Override
public FileStatus getFileStatus(Path path) throws IOException {
Path absolute = makeAbsolute(path);
GTFSImpl.Info info = new GTFSImpl.Info();
if (gtfs_impl.getInfo(absolute.toString(), info) == 0) {
return new FileStatus(info.size, info.is_dir > 0, 3, getDefaultBlockSize(),
info.ctime, info.atime,
new FsPermission((short) (info.permission)),
getUser(info.uid), getGroup(info.gid),
path.makeQualified(this.uri, path));
} else {
return null;
}
}
/**
* Set permission of a path.
* @param p
* @param permission
*/
public void setPermission(Path p, FsPermission permission
) throws IOException {
}
/**
* Set owner of a path (i.e. a file or a directory).
* The parameters username and groupname cannot both be null.
* @param p The path
* @param username If it is null, the original username remains unchanged.
* @param groupname If it is null, the original groupname remains unchanged.
*/
@Override
public void setOwner(Path p, String username, String groupname
) throws IOException {
}
/**
* Set access time of a file
* @param p The path
* @param mtime Set the modification time of this file.
* The number of milliseconds since Jan 1, 1970.
* A value of -1 means that this call should not set modification time.
* @param atime Set the access time of this file.
* The number of milliseconds since Jan 1, 1970.
* A value of -1 means that this call should not set access time.
*/
@Override
public void setTimes(Path p, long mtime, long atime
) throws IOException {
}
@Override
public FSDataOutputStream append(Path f, int bufferSize,
Progressable progress) throws IOException {
return null;
}
@Override
public FSDataOutputStream create(Path file, FsPermission permission,
boolean overwrite, int bufferSize,
short replication,
long blockSize, Progressable progress)
throws IOException {
int fd = gtfs_impl.create(file.toString(), permission.toShort());
if (fd < 0) {
throw new IOException("Cannot create the file:" + file);
}
int parent_id = gtfs_impl.getParentID(fd);
GTFSOutputStream out = new GTFSOutputStream(fd,
new Path("/"+parent_id+"/"+file.getName()),
permission, overwrite, bufferSize, replication, blockSize, 0,
threshold, this, this.gtfs_impl, progress);
return new FSDataOutputStream(out, statistics);
}
@Override
public FSDataInputStream open(Path path, int bufferSize) throws IOException
{
if (!exists(path))
throw new IOException("File does not exist: " + path);
byte [] buf = new byte[threshold];
GTFSImpl.FetchReply reply = new GTFSImpl.FetchReply();
gtfs_impl.fetch(path.toString(), buf, reply);
GTFSInputStream in;
if (reply.state == 1) {
in = new GTFSInputStream(buf, reply.buf_len);
} else {
in = new GTFSInputStream(hdfs.open(new Path(buf.toString())));
}
return new FSDataInputStream(in);
}
@Override
public boolean rename(Path src, Path dst) throws IOException {
return true;
}
// recursively delete the directory and its contents
@Override
public boolean delete(Path path, boolean recursive) throws IOException {
Path absolute = makeAbsolute(path);
if (!recursive) {
gtfs_impl.unlink(absolute.toString());
}
return true;
}
@Override
public short getDefaultReplication() {
return 3;
}
@Override
public boolean setReplication(Path path, short replication)
throws IOException {
return true;
}
// 64MB is the GTFS block size
@Override
public long getDefaultBlockSize() {
return 1 << 26;
}
@Deprecated
public void lock(Path path, boolean shared) throws IOException {
}
@Deprecated
public void release(Path path) throws IOException {
}
/**
* Return null if the file doesn't exist; otherwise, get the
* locations of the various chunks of the file file from KFS.
*/
@Override
public BlockLocation[] getFileBlockLocations(FileStatus file, long start,
long len) throws IOException {
if (file == null) {
return null;
}
return null;
}
static void testServerConnection() {
try {
GTFileSystem fs = new GTFileSystem();
fs.initialize(new URI("hdfs://localhost/"), new Configuration());
Path root = new Path("/");
byte buf[] = new byte[1024];
byte inbuf[] = new byte[4096];
for (int i = 0; i < 10; ++i) {
Path path = new Path(root, Integer.toString(i));
for (int j = 0; j < 1024; ++j)
buf[j] = (byte) i;
FSDataOutputStream outs = fs.create(path,
FsPermission.getFileDefault(), true,
4096, (short) 3, fs.getDefaultBlockSize(),
new GTFSProgress());
outs.write(buf, 0, 1024);
outs.close();
FSDataInputStream ins = fs.open(path, 4096);
int ins_len = ins.read(inbuf);
if (ins_len != 1024) {
System.out.println("length is not match.");
System.exit(1);
}
for (int j = 0; j < 1024; ++j)
if (inbuf[j] != i) {
System.out.println("read data is wrong:"+inbuf[j]);
System.exit(1);
}
FileStatus status = fs.getFileStatus(path);
System.out.println(status.isDirectory());
System.out.println(status.getLen());
}
for (int i = 0; i < 10; ++i) {
Path path = new Path(root, "dir"+i);
fs.mkdirs(path, FsPermission.getFileDefault());
FileStatus status = fs.getFileStatus(path);
System.out.println(status.isDirectory());
System.out.println(status.getLen());
}
System.out.println("Finish creating entries");
FileStatus[] readdir_result = fs.listStatus(root);
for (int i = 0; i < readdir_result.length; ++i) {
System.out.println(readdir_result[i].getPath());
}
} catch (Exception e) {
e.printStackTrace();
}
}
public static void main(String[] args) {
testServerConnection();
}
}
class GTFSProgress implements Progressable {
public void progress() {
}
}
| |
/**
* Licensed to Apereo under one or more contributor license
* agreements. See the NOTICE file distributed with this work
* for additional information regarding copyright ownership.
* Apereo licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a
* copy of the License at the following location:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apereo.portal.portlet.marketplace;
import java.util.List;
import javax.portlet.PortletRequest;
import javax.servlet.http.HttpServletRequest;
import org.apereo.portal.portlet.PortletUtils;
import org.apereo.portal.portlet.om.IPortletDefinition;
import org.apereo.portal.portlet.om.IPortletWindow;
import org.apereo.portal.portlet.om.IPortletWindowId;
import org.apereo.portal.portlet.registry.IPortletCategoryRegistry;
import org.apereo.portal.portlet.registry.IPortletDefinitionRegistry;
import org.apereo.portal.portlet.registry.IPortletWindowRegistry;
import org.apereo.portal.portlets.groupselector.EntityEnum;
import org.apereo.portal.portlets.search.IPortalSearchService;
import org.apereo.portal.search.SearchResult;
import org.apereo.portal.security.IAuthorizationService;
import org.apereo.portal.url.IPortalRequestUtils;
import org.apereo.portal.url.IPortalUrlBuilder;
import org.apereo.portal.url.IPortalUrlProvider;
import org.apereo.portal.url.IPortletUrlBuilder;
import org.apereo.portal.url.UrlType;
import org.apereo.portal.search.PortletUrl;
import org.apereo.portal.search.PortletUrlType;
import org.apereo.portal.search.SearchResults;
import org.apereo.portal.search.SearchRequest;
import org.apereo.portal.search.PortletUrlParameter;
import org.springframework.beans.factory.annotation.Autowired;
/**
* The search service that captures marketplace entries.
* @author vertein
*/
public class MarketplaceSearchService implements IPortalSearchService {
private IPortletDefinitionRegistry portletDefinitionRegistry;
private IPortalUrlProvider portalUrlProvider;
private IPortletWindowRegistry portletWindowRegistry;
private IPortalRequestUtils portalRequestUtils;
private IMarketplaceService marketplaceService;
private IPortletCategoryRegistry portletCategoryRegistry;
private IAuthorizationService authorizationService;
@Autowired
public void setPortletDefinitionRegistry(IPortletDefinitionRegistry portletDefinitionRegistry) {
this.portletDefinitionRegistry = portletDefinitionRegistry;
}
@Autowired
public void setPortalUrlProvider(IPortalUrlProvider urlProvider) {
this.portalUrlProvider = urlProvider;
}
@Autowired
public void setPortletWindowRegistry(IPortletWindowRegistry portletWindowRegistry) {
this.portletWindowRegistry = portletWindowRegistry;
}
@Autowired
public void setPortalRequestUtils(IPortalRequestUtils portalRequestUtils) {
this.portalRequestUtils = portalRequestUtils;
}
@Autowired
public void setPortletCategoryRegistry(IPortletCategoryRegistry portletCategoryRegistry) {
this.portletCategoryRegistry = portletCategoryRegistry;
}
@Autowired
public void setMarketplaceService(final IMarketplaceService marketplaceService) {
this.marketplaceService = marketplaceService;
}
@Autowired
public void setAuthorizationService(IAuthorizationService authorizationService) {
this.authorizationService = authorizationService;
}
/**
* Returns a list of search results that pertain to the marketplace
* query is the query to search
* will search name, title, description, fname, and captions
*/
@Override
public SearchResults getSearchResults(PortletRequest request,
SearchRequest query) {
final String queryString = query.getSearchTerms().toLowerCase();
final List<IPortletDefinition> portlets = portletDefinitionRegistry.getAllPortletDefinitions();
final HttpServletRequest httpServletRequest = this.portalRequestUtils.getPortletHttpRequest(request);
final SearchResults results = new SearchResults();
for (IPortletDefinition portlet : portlets) {
if (this.matches(queryString,
new MarketplacePortletDefinition(portlet,
this.marketplaceService, this.portletCategoryRegistry))) {
final SearchResult result = new SearchResult();
result.setTitle(portlet.getTitle());
result.setSummary(portlet.getDescription());
result.getType().add("marketplace");
final IPortletWindow portletWindow = this.portletWindowRegistry.getOrCreateDefaultPortletWindowByFname(httpServletRequest, portlet.getFName());
// portletWindow is null if user does not have access to portlet.
// If user does not have browse permission, exclude the portlet.
if (portletWindow != null && authorizationService.canPrincipalBrowse(
authorizationService.newPrincipal(request.getRemoteUser(), EntityEnum.PERSON.getClazz()),
portlet)) {
final IPortletWindowId portletWindowId = portletWindow.getPortletWindowId();
final IPortalUrlBuilder portalUrlBuilder = this.portalUrlProvider.getPortalUrlBuilderByPortletFName(httpServletRequest, portlet.getFName(), UrlType.RENDER);
final IPortletUrlBuilder portletUrlBuilder = portalUrlBuilder.getPortletUrlBuilder(portletWindowId);
portletUrlBuilder.setWindowState(PortletUtils.getWindowState("maximized"));
result.setExternalUrl(portalUrlBuilder.getUrlString());
PortletUrl url = new PortletUrl();
url.setType(PortletUrlType.RENDER);
url.setPortletMode("VIEW");
url.setWindowState("maximized");
PortletUrlParameter actionParam = new PortletUrlParameter();
actionParam.setName("action");
actionParam.getValue().add("view");
url.getParam().add(actionParam);
PortletUrlParameter fNameParam = new PortletUrlParameter();
fNameParam.setName("fName");
fNameParam.getValue().add(portlet.getFName());
url.getParam().add(fNameParam);
result.setPortletUrl(url);
//Add the result to list to return
results.getSearchResult().add(result);
}
}
}
return results;
}
/**
* @param query
* @param portlet
* @return boolean whether query matched criteria in the marketplace portlet definition
*/
protected boolean matches(String query, MarketplacePortletDefinition portlet) {
final String lcQuery = query.toLowerCase();
return portlet.getTitle().toLowerCase().contains(lcQuery) ||
portlet.getName().toLowerCase().contains(lcQuery) ||
(portlet.getDescription() != null && portlet.getDescription().toLowerCase().contains(lcQuery)) ||
portlet.getFName().toLowerCase().contains(lcQuery) ||
this.captionMatches(lcQuery, portlet.getScreenShots()) ||
this.releaseNotesMatches(lcQuery, portlet.getPortletReleaseNotes())
;
}
/**
* @param query
* @param screenShots
* @return boolean whether caption matches. Used by matches method
*/
protected boolean captionMatches(String query, List<ScreenShot> screenShots){
for(ScreenShot screenShot: screenShots){
for(String caption: screenShot.getCaptions()){
if(caption.toLowerCase().contains(query)){
return true;
}
}
}
return false;
}
/**
* @param query
* @param portletReleaseNotes
* @return boolean whether releaseNotes notes matches. Used by matches method
*/
protected boolean releaseNotesMatches(String query, PortletReleaseNotes portletReleaseNotes){
if(portletReleaseNotes.getReleaseNotes()!=null){
for(String notes : portletReleaseNotes.getReleaseNotes()){
if(notes.toLowerCase().contains(query)){
return true;
}
}
}
return false;
}
}
| |
/*
* Copyright 2012-2015 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.boot.autoconfigure.groovy.template;
import groovy.text.markup.MarkupTemplateEngine;
import java.io.File;
import java.io.StringWriter;
import java.io.Writer;
import java.util.Collections;
import java.util.HashMap;
import java.util.Locale;
import javax.servlet.http.HttpServletRequest;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.springframework.boot.test.EnvironmentTestUtils;
import org.springframework.context.i18n.LocaleContextHolder;
import org.springframework.core.io.ClassPathResource;
import org.springframework.mock.web.MockHttpServletRequest;
import org.springframework.mock.web.MockHttpServletResponse;
import org.springframework.mock.web.MockServletContext;
import org.springframework.web.context.support.AnnotationConfigWebApplicationContext;
import org.springframework.web.servlet.View;
import org.springframework.web.servlet.ViewResolver;
import org.springframework.web.servlet.support.RequestContext;
import org.springframework.web.servlet.view.groovy.GroovyMarkupConfig;
import org.springframework.web.servlet.view.groovy.GroovyMarkupConfigurer;
import org.springframework.web.servlet.view.groovy.GroovyMarkupViewResolver;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.notNullValue;
import static org.junit.Assert.assertThat;
/**
* Tests for {@link GroovyTemplateAutoConfiguration}.
*
* @author Dave Syer
*/
public class GroovyTemplateAutoConfigurationTests {
private AnnotationConfigWebApplicationContext context = new AnnotationConfigWebApplicationContext();
@Before
public void setupContext() {
this.context.setServletContext(new MockServletContext());
}
@After
public void close() {
LocaleContextHolder.resetLocaleContext();
if (this.context != null) {
this.context.close();
}
}
@Test
public void defaultConfiguration() {
registerAndRefreshContext();
assertThat(this.context.getBean(GroovyMarkupViewResolver.class), notNullValue());
}
@Test
public void emptyTemplateLocation() {
new File("target/test-classes/templates/empty-directory").mkdir();
registerAndRefreshContext("spring.groovy.template.prefix:"
+ "classpath:/templates/empty-directory/");
}
@Test
public void defaultViewResolution() throws Exception {
registerAndRefreshContext();
MockHttpServletResponse response = render("home");
String result = response.getContentAsString();
assertThat(result, containsString("home"));
assertThat(response.getContentType(), equalTo("text/html;charset=UTF-8"));
}
@Test
public void includesViewResolution() throws Exception {
registerAndRefreshContext();
MockHttpServletResponse response = render("includes");
String result = response.getContentAsString();
assertThat(result, containsString("here"));
assertThat(response.getContentType(), equalTo("text/html;charset=UTF-8"));
}
@Test
public void disableViewResolution() throws Exception {
EnvironmentTestUtils.addEnvironment(this.context,
"spring.groovy.template.enabled:false");
registerAndRefreshContext();
assertThat(this.context.getBeanNamesForType(ViewResolver.class).length,
equalTo(0));
}
@Test
public void localeViewResolution() throws Exception {
registerAndRefreshContext();
MockHttpServletResponse response = render("includes", Locale.FRENCH);
String result = response.getContentAsString();
assertThat(result, containsString("voila"));
assertThat(response.getContentType(), equalTo("text/html;charset=UTF-8"));
}
@Test
public void customContentType() throws Exception {
registerAndRefreshContext("spring.groovy.template.contentType:application/json");
MockHttpServletResponse response = render("home");
String result = response.getContentAsString();
assertThat(result, containsString("home"));
assertThat(response.getContentType(), equalTo("application/json;charset=UTF-8"));
}
@Test
public void customPrefix() throws Exception {
registerAndRefreshContext("spring.groovy.template.prefix:classpath:/templates/prefix/");
MockHttpServletResponse response = render("prefixed");
String result = response.getContentAsString();
assertThat(result, containsString("prefixed"));
}
@Test
public void customSuffix() throws Exception {
registerAndRefreshContext("spring.groovy.template.suffix:.groovytemplate");
MockHttpServletResponse response = render("suffixed");
String result = response.getContentAsString();
assertThat(result, containsString("suffixed"));
}
@Test
public void customTemplateLoaderPath() throws Exception {
registerAndRefreshContext("spring.groovy.template.prefix:classpath:/custom-templates/");
MockHttpServletResponse response = render("custom");
String result = response.getContentAsString();
assertThat(result, containsString("custom"));
}
@Test
public void disableCache() {
registerAndRefreshContext("spring.groovy.template.cache:false");
assertThat(this.context.getBean(GroovyMarkupViewResolver.class).getCacheLimit(),
equalTo(0));
}
@Test
public void renderTemplate() throws Exception {
registerAndRefreshContext();
GroovyMarkupConfig config = this.context.getBean(GroovyMarkupConfig.class);
MarkupTemplateEngine engine = config.getTemplateEngine();
Writer writer = new StringWriter();
engine.createTemplate(new ClassPathResource("templates/message.tpl").getFile())
.make(new HashMap<String, Object>(Collections.singletonMap("greeting",
"Hello World"))).writeTo(writer);
assertThat(writer.toString(), containsString("Hello World"));
}
@Test
public void customConfiguration() throws Exception {
registerAndRefreshContext("spring.groovy.template.configuration.auto-indent:true");
assertThat(this.context.getBean(GroovyMarkupConfigurer.class).isAutoIndent(),
is(true));
}
private void registerAndRefreshContext(String... env) {
EnvironmentTestUtils.addEnvironment(this.context, env);
this.context.register(GroovyTemplateAutoConfiguration.class);
this.context.refresh();
}
private MockHttpServletResponse render(String viewName) throws Exception {
return render(viewName, Locale.UK);
}
private MockHttpServletResponse render(String viewName, Locale locale)
throws Exception {
LocaleContextHolder.setLocale(locale);
GroovyMarkupViewResolver resolver = this.context
.getBean(GroovyMarkupViewResolver.class);
View view = resolver.resolveViewName(viewName, locale);
assertThat(view, notNullValue());
HttpServletRequest request = new MockHttpServletRequest();
request.setAttribute(RequestContext.WEB_APPLICATION_CONTEXT_ATTRIBUTE,
this.context);
MockHttpServletResponse response = new MockHttpServletResponse();
view.render(null, request, response);
return response;
}
}
| |
/*
Copyright (C) 2013 the original author or authors.
See the LICENSE.txt file distributed with this work for additional
information regarding copyright ownership.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package jcommon.process;
import jcommon.core.StringUtil;
import jcommon.core.platform.PlatformProviders;
import jcommon.process.platform.IProcessLauncher;
import java.util.*;
/**
* Configures a child process for execution using the builder pattern.
*
* This class is <b>not</b> thread safe.
*/
@SuppressWarnings("unused")
public class ProcessBuilder implements Cloneable {
private static final IProcessLauncher impl = PlatformProviders.find(IProcessLauncher.class, IProcessLauncher.DEFAULT);
private static IEnvironmentVariableBlock cached_parent_environment;
private IEnvironmentVariableBlock parent_environment;
private boolean inherit_parent_environment = true;
private String executable = StringUtil.empty;
private List<String> arguments = new LinkedList<String>();
private EnvironmentVariableBlockBuilder env = new EnvironmentVariableBlockBuilder();
private List<IProcessListener> listeners = new LinkedList<IProcessListener>();
/**
* Prevent outside instantiation of {@link ProcessBuilder} instances.
*/
private ProcessBuilder() {
this.parent_environment = cached_parent_environment;
}
static {
refreshCachedParentEnvironmentVariableBlock();
}
public static void refreshCachedParentEnvironmentVariableBlock() {
synchronized (ProcessBuilder.class) {
cached_parent_environment = impl.requestParentEnvironmentVariableBlock();
}
}
/**
* @see Cloneable#clone()
*/
@Override
public Object clone() throws CloneNotSupportedException {
return copy();
}
/**
* Creates a deep copy of this instance.
*
* @see Cloneable#clone()
*/
public ProcessBuilder copy() {
final ProcessBuilder builder = new ProcessBuilder();
builder.inherit_parent_environment = inherit_parent_environment;
builder.parent_environment = parent_environment;
builder.executable = executable;
builder.env = env.copy();
builder.arguments.addAll(arguments);
builder.listeners.addAll(listeners);
return builder;
}
/**
* Creates a new instance of {@link ProcessBuilder}.
*
* @return The new instance oaf {@link ProcessBuilder}.
*/
public static ProcessBuilder create() {
return new ProcessBuilder();
}
/**
* Creates a new instance of {@link ProcessBuilder}.
*
* @param executable The name of an executable on the system path or the path to an executable.
* @param arguments Zero or more arguments that will be passed to the executable.
* @return The new instance of {@link ProcessBuilder}.
*/
public static ProcessBuilder create(final String executable, final String...arguments) {
return new ProcessBuilder()
.withExecutable(executable)
.withArguments(arguments)
;
}
/**
* Indicates if the parent environment variables will be inherited by the child process.
*
* By default this is <code>true</code>.
*
* @return <code>true</code> if the child process will inherit its parent process'
* environment variables.
*/
public boolean isParentEnvironmentInherited() {
return this.inherit_parent_environment;
}
/**
* Provides an instance of {@link IEnvironmentVariableBlock} representing multiple
* environment variables defined for the current process that will be used when this
* child process runs.
*
* @return An instance of {@link IEnvironmentVariableBlock}.
*/
public IEnvironmentVariableBlock getParentEnvironmentVariableBlock() {
return parent_environment;
}
/**
* The name or path to the executable that the parent process will launch.
*
* @return The name or path to the executable.
*/
public String getExecutable() {
return this.executable;
}
/**
* Provides the list of arguments to the executable.
*
* @return A {@link String} array containing the list of arguments to the
* executable.
*/
public String[] getArguments() {
return this.arguments.toArray(new String[this.arguments.size()]);
}
/**
* Constructs the full command line with the executable and arguments.
* It does not include environment variables.
*
* @return A {@link String} array containing the executable and the list
* of arguments to the executable.
*/
public String[] getCommandLine() {
final String[] command_line = new String[arguments.size() + 1];
command_line[0] = executable;
int i = 1;
for(String arg : arguments) {
command_line[i] = arg;
++i;
}
return command_line;
}
/**
* Returns an unmodifiable {@link Map} holding the list of environment variables
* (their name and value) that will be provided to the child process upon creation.
*
* @return An unmodifiable {@link Map} containing the list of currently set environment
* variables.
*/
public Map<String, String> produceEnvironmentVariableMap() {
final Map<String, String> vars = new LinkedHashMap<String, String>(10, 0.8f);
env.coalescedView(getParentEnvironmentVariableBlock(), new EnvironmentVariableBlockBuilder.IVisitor() {
@Override
public boolean visit(String name, String value) {
vars.put(name, value);
return true;
}
});
return Collections.unmodifiableMap(vars);
}
/**
* Returns an array of {@link IEnvironmentVariable} instances representing environment
* variables (their name and value) that will be provided to the child process upon creation.
*
* @return An unmodifiable {@link Set} of {@link IEnvironmentVariable} instances representing the list of
* currently set environment variables.
*/
public Set<IEnvironmentVariable> getEnvironmentVariables() {
final Set<IEnvironmentVariable> vars = new LinkedHashSet<IEnvironmentVariable>(10, 0.8f);
env.coalescedView(getParentEnvironmentVariableBlock(), new EnvironmentVariableBlockBuilder.IVisitor() {
@Override
public boolean visit(String name, String value) {
vars.add(new EnvironmentVariable(name, value));
return true;
}
});
return Collections.unmodifiableSet(vars);
}
/**
* Returns an array of {@link IProcessListener} instances who are interested in events
* on the child process.
*
* @return An array of {@link IProcessListener} instances.
*/
public IProcessListener[] getListeners() {
return listeners.toArray(new IProcessListener[listeners.size()]);
}
/**
* Saves the name of an executable on the system path or the path to an executable.
* <br /><br />
* <b>Note:</b> this will overwrite the previously saved value.
*
* @param executable The name of an executable on the system path or the path to an executable.
* @return This instance of {@link ProcessBuilder}.
*/
public ProcessBuilder withExecutable(final String executable) {
if (StringUtil.isNullOrEmpty(executable)) {
throw new IllegalArgumentException("executable cannot be null or empty");
}
this.executable = executable;
return this;
}
/**
* Saves the provided list of arguments.
* <br /><br />
* <b>Note:</b> this will overwrite all previously saved arguments.
*
* @param arguments Zero or more arguments that will be passed to the executable.
* @return This instance of {@link ProcessBuilder}.
*/
public ProcessBuilder withArguments(final String...arguments) {
this.arguments.clear();
for(String arg : arguments) {
this.arguments.add(arg);
}
return this;
}
/**
* Appends to the list of arguments.
*
* @param arguments Zero or more arguments that will be passed to the executable.
* @return This instance of {@link ProcessBuilder}.
*/
public ProcessBuilder andArguments(final String...arguments) {
for(String arg : arguments) {
this.arguments.add(arg);
}
return this;
}
/**
* Appends to the list of arguments.
*
* @param argument A single additional argument that will be passed to the executable.
* @return This instance of {@link ProcessBuilder}.
*/
public ProcessBuilder andArgument(final String argument) {
this.arguments.add(argument);
return this;
}
/**
* Appends to the list of arguments.
*
* @param argument A single additional argument that will be passed to the executable.
* @return This instance of {@link ProcessBuilder}.
*/
public ProcessBuilder addArgument(final String argument) {
this.arguments.add(argument);
return this;
}
/**
* Appends to the list of arguments.
*
* @param arguments Zero or more arguments that will be passed to the executable.
* @return This instance of {@link ProcessBuilder}.
*/
public ProcessBuilder addArguments(final String...arguments) {
for(String arg : arguments) {
this.arguments.add(arg);
}
return this;
}
/**
* Removes all saved arguments.
*
* @return This instance of {@link ProcessBuilder}.
*/
public ProcessBuilder clearArguments() {
this.arguments.clear();
return this;
}
/**
* Considers the first value in the list to be the executable and then appends the
* rest to the list of arguments.
* <br /><br />
* <b>Note:</b> this will overwrite all previously saved values for both the executable
* and arguments.
*
* @param command_line One or more values that will set the executable and the rest to be passed to the executable.
* @return This instance of {@link ProcessBuilder}.
*/
public ProcessBuilder withCommandLine(final String...command_line) {
if (command_line == null || command_line.length < 1) {
throw new IllegalArgumentException("Command line must at least include an executable");
}
this.arguments.clear();
withExecutable(command_line[0]);
for(int i = 1; i < command_line.length; ++i) {
this.arguments.add(command_line[i]);
}
return this;
}
/**
* Adds an environment variable with the provided name and value to the list of environment
* variables to be set when the process is created.
*
* Unless configured otherwise, these will be appended to the parent process' environment
* variables.
*
* @param name The name of the environment variable to add.
* @param value The value of the environment variable to add.
* @return This instance of {@link ProcessBuilder}.
*/
public ProcessBuilder addEnvironmentVariable(final String name, final String value) {
if (StringUtil.isNullOrEmpty(name)) {
throw new IllegalArgumentException("name cannot be null or empty");
}
if (value == null) {
throw new IllegalArgumentException("value cannot be null");
}
this.env.addEnvironmentVariable(name, value);
return this;
}
/**
* Sets the list of environment variables when the process is created to be the provided name and value
* pairs.
* <br /><br />
* <b>Note:</b> this will overwrite all previously saved environment variables.
*
* Unless configured otherwise, these will be appended to the parent process' environment
* variables.
*
* @param name_value_pairs An alternating list of names and associated values.
* @return This instance of {@link ProcessBuilder}.
*/
public ProcessBuilder withEnvironmentVariables(final String...name_value_pairs) {
if (name_value_pairs == null || (name_value_pairs.length % 2) != 0) {
throw new IllegalArgumentException("There must be a matching name and value (there should be an even number of provided arguments)");
}
this.env.clear();
for(int i = 0; i < name_value_pairs.length; i += 2) {
addEnvironmentVariable(name_value_pairs[i], name_value_pairs[i + 1]);
}
return this;
}
/**
* Sets the list of environment variables when the process is created to be the provided name and value
* pairs.
* <br /><br />
* <b>Note:</b> this will overwrite all previously saved environment variables.
*
* Unless configured otherwise, these will be appended to the parent process' environment
* variables.
*
* @param name_value_pairs An alternating list of names and associated values.
* @return This instance of {@link ProcessBuilder}.
*/
public ProcessBuilder withEnvironmentVariables(final Map<String, String> name_value_pairs) {
if (name_value_pairs == null) {
throw new IllegalArgumentException("There must be a matching name and value (there should be an even number of provided arguments)");
}
this.env.clear();
for(Map.Entry<String, String> e : name_value_pairs.entrySet()) {
addEnvironmentVariable(e.getKey(), e.getValue());
}
return this;
}
/**
* Clears all environment variables.
*
* @return This instance of {@link ProcessBuilder}.
*/
public ProcessBuilder clearEnvironmentVariables() {
this.parent_environment = null;
this.env.clear();
return this;
}
/**
* Sets a flag indicating whether the created child process should inherit its parent
* environment variables.
*
* @param inherit <code>true</code> if the child process will also include its parent
* environment variables, <code>false</code> otherwise.
* @return This instance of {@link ProcessBuilder}.
*/
public ProcessBuilder inheritParentEnvironment(final boolean inherit) {
this.inherit_parent_environment = inherit;
this.parent_environment = (inherit ? cached_parent_environment : null);
return this;
}
/**
* Adds a single listener to the list of {@link IProcessListener} listeners
* interested in events on the child process.
*
* @param listener A single {@link IProcessListener} instance interested
* in events on the child process.
* @return This instance of {@link ProcessBuilder}.
*/
public ProcessBuilder addListener(final IProcessListener listener) {
this.listeners.add(listener);
return this;
}
/**
* Sets the list of {@link IProcessListener} listeners interested in events
* on the child process to a single {@link IProcessListener} instance.
* <br /><br />
* <b>Note:</b> this will overwrite all previously saved listeners.
*
* @param listener A single {@link IProcessListener} instance interested
* in events on the child process.
* @return This instance of {@link ProcessBuilder}.
*/
public ProcessBuilder withListener(final IProcessListener listener) {
this.listeners.clear();
this.listeners.add(listener);
return this;
}
/**
* Sets the list of {@link IProcessListener} listeners interested in events
* on the child process.
* <br /><br />
* <b>Note:</b> this will overwrite all previously saved listeners.
*
* @param listeners A list of {@link IProcessListener} instances interested
* in events on the child process.
* @return This instance of {@link ProcessBuilder}.
*/
public ProcessBuilder withListeners(final IProcessListener...listeners) {
this.listeners.clear();
for(IProcessListener listener : listeners) {
addListener(listener);
}
return this;
}
/**
* Clears all {@link IProcessListener} listeners.
*
* @return This instance of {@link ProcessBuilder}.
*/
public ProcessBuilder clearListeners() {
this.listeners.clear();
return this;
}
public IProcess start() {
if (executable == null) {
throw new IllegalStateException("executable must be set before launching a child process");
}
return impl.launch(inherit_parent_environment, env.toCoalescedEnvironmentVariableBlock(getParentEnvironmentVariableBlock()), getCommandLine(), getListeners());
}
}
| |
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
package org.elasticsearch.action.admin.cluster.node.stats;
import org.elasticsearch.action.support.nodes.BaseNodeResponse;
import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.cluster.node.DiscoveryNodeRole;
import org.elasticsearch.core.Nullable;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.ToXContentFragment;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.discovery.DiscoveryStats;
import org.elasticsearch.http.HttpStats;
import org.elasticsearch.index.stats.IndexingPressureStats;
import org.elasticsearch.indices.NodeIndicesStats;
import org.elasticsearch.indices.breaker.AllCircuitBreakerStats;
import org.elasticsearch.ingest.IngestStats;
import org.elasticsearch.monitor.fs.FsInfo;
import org.elasticsearch.monitor.jvm.JvmStats;
import org.elasticsearch.monitor.os.OsStats;
import org.elasticsearch.monitor.process.ProcessStats;
import org.elasticsearch.node.AdaptiveSelectionStats;
import org.elasticsearch.script.ScriptStats;
import org.elasticsearch.threadpool.ThreadPoolStats;
import org.elasticsearch.transport.TransportStats;
import java.io.IOException;
import java.util.Map;
/**
* Node statistics (dynamic, changes depending on when created).
*/
public class NodeStats extends BaseNodeResponse implements ToXContentFragment {
private long timestamp;
@Nullable
private NodeIndicesStats indices;
@Nullable
private OsStats os;
@Nullable
private ProcessStats process;
@Nullable
private JvmStats jvm;
@Nullable
private ThreadPoolStats threadPool;
@Nullable
private FsInfo fs;
@Nullable
private TransportStats transport;
@Nullable
private HttpStats http;
@Nullable
private AllCircuitBreakerStats breaker;
@Nullable
private ScriptStats scriptStats;
@Nullable
private DiscoveryStats discoveryStats;
@Nullable
private IngestStats ingestStats;
@Nullable
private AdaptiveSelectionStats adaptiveSelectionStats;
@Nullable
private IndexingPressureStats indexingPressureStats;
public NodeStats(StreamInput in) throws IOException {
super(in);
timestamp = in.readVLong();
if (in.readBoolean()) {
indices = new NodeIndicesStats(in);
}
os = in.readOptionalWriteable(OsStats::new);
process = in.readOptionalWriteable(ProcessStats::new);
jvm = in.readOptionalWriteable(JvmStats::new);
threadPool = in.readOptionalWriteable(ThreadPoolStats::new);
fs = in.readOptionalWriteable(FsInfo::new);
transport = in.readOptionalWriteable(TransportStats::new);
http = in.readOptionalWriteable(HttpStats::new);
breaker = in.readOptionalWriteable(AllCircuitBreakerStats::new);
scriptStats = in.readOptionalWriteable(ScriptStats::new);
discoveryStats = in.readOptionalWriteable(DiscoveryStats::new);
ingestStats = in.readOptionalWriteable(IngestStats::new);
adaptiveSelectionStats = in.readOptionalWriteable(AdaptiveSelectionStats::new);
indexingPressureStats = in.readOptionalWriteable(IndexingPressureStats::new);
}
public NodeStats(DiscoveryNode node, long timestamp, @Nullable NodeIndicesStats indices,
@Nullable OsStats os, @Nullable ProcessStats process, @Nullable JvmStats jvm, @Nullable ThreadPoolStats threadPool,
@Nullable FsInfo fs, @Nullable TransportStats transport, @Nullable HttpStats http,
@Nullable AllCircuitBreakerStats breaker,
@Nullable ScriptStats scriptStats,
@Nullable DiscoveryStats discoveryStats,
@Nullable IngestStats ingestStats,
@Nullable AdaptiveSelectionStats adaptiveSelectionStats,
@Nullable IndexingPressureStats indexingPressureStats) {
super(node);
this.timestamp = timestamp;
this.indices = indices;
this.os = os;
this.process = process;
this.jvm = jvm;
this.threadPool = threadPool;
this.fs = fs;
this.transport = transport;
this.http = http;
this.breaker = breaker;
this.scriptStats = scriptStats;
this.discoveryStats = discoveryStats;
this.ingestStats = ingestStats;
this.adaptiveSelectionStats = adaptiveSelectionStats;
this.indexingPressureStats = indexingPressureStats;
}
public long getTimestamp() {
return this.timestamp;
}
@Nullable
public String getHostname() {
return getNode().getHostName();
}
/**
* Indices level stats.
*/
@Nullable
public NodeIndicesStats getIndices() {
return this.indices;
}
/**
* Operating System level statistics.
*/
@Nullable
public OsStats getOs() {
return this.os;
}
/**
* Process level statistics.
*/
@Nullable
public ProcessStats getProcess() {
return process;
}
/**
* JVM level statistics.
*/
@Nullable
public JvmStats getJvm() {
return jvm;
}
/**
* Thread Pool level statistics.
*/
@Nullable
public ThreadPoolStats getThreadPool() {
return this.threadPool;
}
/**
* File system level stats.
*/
@Nullable
public FsInfo getFs() {
return fs;
}
@Nullable
public TransportStats getTransport() {
return this.transport;
}
@Nullable
public HttpStats getHttp() {
return this.http;
}
@Nullable
public AllCircuitBreakerStats getBreaker() {
return this.breaker;
}
@Nullable
public ScriptStats getScriptStats() {
return this.scriptStats;
}
@Nullable
public DiscoveryStats getDiscoveryStats() {
return this.discoveryStats;
}
@Nullable
public IngestStats getIngestStats() {
return ingestStats;
}
@Nullable
public AdaptiveSelectionStats getAdaptiveSelectionStats() {
return adaptiveSelectionStats;
}
@Nullable
public IndexingPressureStats getIndexingPressureStats() {
return indexingPressureStats;
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
out.writeVLong(timestamp);
if (indices == null) {
out.writeBoolean(false);
} else {
out.writeBoolean(true);
indices.writeTo(out);
}
out.writeOptionalWriteable(os);
out.writeOptionalWriteable(process);
out.writeOptionalWriteable(jvm);
out.writeOptionalWriteable(threadPool);
out.writeOptionalWriteable(fs);
out.writeOptionalWriteable(transport);
out.writeOptionalWriteable(http);
out.writeOptionalWriteable(breaker);
out.writeOptionalWriteable(scriptStats);
out.writeOptionalWriteable(discoveryStats);
out.writeOptionalWriteable(ingestStats);
out.writeOptionalWriteable(adaptiveSelectionStats);
out.writeOptionalWriteable(indexingPressureStats);
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.field("name", getNode().getName());
builder.field("transport_address", getNode().getAddress().toString());
builder.field("host", getNode().getHostName());
builder.field("ip", getNode().getAddress());
builder.startArray("roles");
for (DiscoveryNodeRole role : getNode().getRoles()) {
builder.value(role.roleName());
}
builder.endArray();
if (getNode().getAttributes().isEmpty() == false) {
builder.startObject("attributes");
for (Map.Entry<String, String> attrEntry : getNode().getAttributes().entrySet()) {
builder.field(attrEntry.getKey(), attrEntry.getValue());
}
builder.endObject();
}
if (getIndices() != null) {
getIndices().toXContent(builder, params);
}
if (getOs() != null) {
getOs().toXContent(builder, params);
}
if (getProcess() != null) {
getProcess().toXContent(builder, params);
}
if (getJvm() != null) {
getJvm().toXContent(builder, params);
}
if (getThreadPool() != null) {
getThreadPool().toXContent(builder, params);
}
if (getFs() != null) {
getFs().toXContent(builder, params);
}
if (getTransport() != null) {
getTransport().toXContent(builder, params);
}
if (getHttp() != null) {
getHttp().toXContent(builder, params);
}
if (getBreaker() != null) {
getBreaker().toXContent(builder, params);
}
if (getScriptStats() != null) {
getScriptStats().toXContent(builder, params);
}
if (getDiscoveryStats() != null) {
getDiscoveryStats().toXContent(builder, params);
}
if (getIngestStats() != null) {
getIngestStats().toXContent(builder, params);
}
if (getAdaptiveSelectionStats() != null) {
getAdaptiveSelectionStats().toXContent(builder, params);
}
if (getIndexingPressureStats() != null) {
getIndexingPressureStats().toXContent(builder, params);
}
return builder;
}
}
| |
/*
* Copyright 2010-2011 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.ec2.model;
/**
* <p>
* Represents an <i>Amazon Machine Image</i> (AMI) that can be run on an
* Amazon EC2 instance.
* </p>
*/
public class Image {
/**
* The unique ID of the AMI.
*/
private String imageId;
/**
* The location of the AMI.
*/
private String imageLocation;
/**
* Current state of the AMI. If the operation returns available, the
* image is successfully registered and available for launching. If the
* operation returns deregistered, the image is deregistered and no
* longer available for launching.
* <p>
* <b>Constraints:</b><br/>
* <b>Allowed Values: </b>available, deregistered
*/
private String state;
/**
* AWS Access Key ID of the image owner.
*/
private String ownerId;
/**
* True if this image has public launch permissions. False if it only has
* implicit and explicit launch permissions.
*/
private Boolean publicValue;
/**
* Product codes of the AMI.
*/
private java.util.List<ProductCode> productCodes;
/**
* The architecture of the image.
*/
private String architecture;
/**
* The type of image (machine, kernel, or ramdisk).
*/
private String imageType;
/**
* The kernel associated with the image, if any. Only applicable for
* machine images.
*/
private String kernelId;
/**
* The RAM disk associated with the image, if any. Only applicable for
* machine images.
*/
private String ramdiskId;
/**
* The operating platform of the AMI.
*/
private String platform;
/**
* The reason for the state change.
*/
private StateReason stateReason;
/**
* The AWS account alias (e.g., "amazon", "redhat", "self", etc.) or AWS
* account ID that owns the AMI.
*/
private String imageOwnerAlias;
/**
* The name of the AMI that was provided during image creation.
*/
private String name;
/**
* The description of the AMI that was provided during image creation.
*/
private String description;
/**
* The root device type used by the AMI. The AMI can use an Amazon EBS or
* instance store root device.
*/
private String rootDeviceType;
/**
* The root device name (e.g., <code>/dev/sda1</code>).
*/
private String rootDeviceName;
/**
* Specifies how block devices are exposed to the instance.
*/
private java.util.List<BlockDeviceMapping> blockDeviceMappings;
private String virtualizationType;
/**
* A list of tags for the Image.
*/
private java.util.List<Tag> tags;
/**
* The unique ID of the AMI.
*
* @return The unique ID of the AMI.
*/
public String getImageId() {
return imageId;
}
/**
* The unique ID of the AMI.
*
* @param imageId The unique ID of the AMI.
*/
public void setImageId(String imageId) {
this.imageId = imageId;
}
/**
* The unique ID of the AMI.
* <p>
* Returns a reference to this object so that method calls can be chained together.
*
* @param imageId The unique ID of the AMI.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*/
public Image withImageId(String imageId) {
this.imageId = imageId;
return this;
}
/**
* The location of the AMI.
*
* @return The location of the AMI.
*/
public String getImageLocation() {
return imageLocation;
}
/**
* The location of the AMI.
*
* @param imageLocation The location of the AMI.
*/
public void setImageLocation(String imageLocation) {
this.imageLocation = imageLocation;
}
/**
* The location of the AMI.
* <p>
* Returns a reference to this object so that method calls can be chained together.
*
* @param imageLocation The location of the AMI.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*/
public Image withImageLocation(String imageLocation) {
this.imageLocation = imageLocation;
return this;
}
/**
* Current state of the AMI. If the operation returns available, the
* image is successfully registered and available for launching. If the
* operation returns deregistered, the image is deregistered and no
* longer available for launching.
* <p>
* <b>Constraints:</b><br/>
* <b>Allowed Values: </b>available, deregistered
*
* @return Current state of the AMI. If the operation returns available, the
* image is successfully registered and available for launching. If the
* operation returns deregistered, the image is deregistered and no
* longer available for launching.
*
* @see ImageState
*/
public String getState() {
return state;
}
/**
* Current state of the AMI. If the operation returns available, the
* image is successfully registered and available for launching. If the
* operation returns deregistered, the image is deregistered and no
* longer available for launching.
* <p>
* <b>Constraints:</b><br/>
* <b>Allowed Values: </b>available, deregistered
*
* @param state Current state of the AMI. If the operation returns available, the
* image is successfully registered and available for launching. If the
* operation returns deregistered, the image is deregistered and no
* longer available for launching.
*
* @see ImageState
*/
public void setState(String state) {
this.state = state;
}
/**
* Current state of the AMI. If the operation returns available, the
* image is successfully registered and available for launching. If the
* operation returns deregistered, the image is deregistered and no
* longer available for launching.
* <p>
* Returns a reference to this object so that method calls can be chained together.
* <p>
* <b>Constraints:</b><br/>
* <b>Allowed Values: </b>available, deregistered
*
* @param state Current state of the AMI. If the operation returns available, the
* image is successfully registered and available for launching. If the
* operation returns deregistered, the image is deregistered and no
* longer available for launching.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*
* @see ImageState
*/
public Image withState(String state) {
this.state = state;
return this;
}
/**
* AWS Access Key ID of the image owner.
*
* @return AWS Access Key ID of the image owner.
*/
public String getOwnerId() {
return ownerId;
}
/**
* AWS Access Key ID of the image owner.
*
* @param ownerId AWS Access Key ID of the image owner.
*/
public void setOwnerId(String ownerId) {
this.ownerId = ownerId;
}
/**
* AWS Access Key ID of the image owner.
* <p>
* Returns a reference to this object so that method calls can be chained together.
*
* @param ownerId AWS Access Key ID of the image owner.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*/
public Image withOwnerId(String ownerId) {
this.ownerId = ownerId;
return this;
}
/**
* True if this image has public launch permissions. False if it only has
* implicit and explicit launch permissions.
*
* @return True if this image has public launch permissions. False if it only has
* implicit and explicit launch permissions.
*/
public Boolean isPublic() {
return publicValue;
}
/**
* True if this image has public launch permissions. False if it only has
* implicit and explicit launch permissions.
*
* @param publicValue True if this image has public launch permissions. False if it only has
* implicit and explicit launch permissions.
*/
public void setPublic(Boolean publicValue) {
this.publicValue = publicValue;
}
/**
* True if this image has public launch permissions. False if it only has
* implicit and explicit launch permissions.
* <p>
* Returns a reference to this object so that method calls can be chained together.
*
* @param publicValue True if this image has public launch permissions. False if it only has
* implicit and explicit launch permissions.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*/
public Image withPublic(Boolean publicValue) {
this.publicValue = publicValue;
return this;
}
/**
* True if this image has public launch permissions. False if it only has
* implicit and explicit launch permissions.
*
* @return True if this image has public launch permissions. False if it only has
* implicit and explicit launch permissions.
*/
public Boolean getPublic() {
return publicValue;
}
/**
* Product codes of the AMI.
*
* @return Product codes of the AMI.
*/
public java.util.List<ProductCode> getProductCodes() {
if (productCodes == null) {
productCodes = new java.util.ArrayList<ProductCode>();
}
return productCodes;
}
/**
* Product codes of the AMI.
*
* @param productCodes Product codes of the AMI.
*/
public void setProductCodes(java.util.Collection<ProductCode> productCodes) {
java.util.List<ProductCode> productCodesCopy = new java.util.ArrayList<ProductCode>();
if (productCodes != null) {
productCodesCopy.addAll(productCodes);
}
this.productCodes = productCodesCopy;
}
/**
* Product codes of the AMI.
* <p>
* Returns a reference to this object so that method calls can be chained together.
*
* @param productCodes Product codes of the AMI.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*/
public Image withProductCodes(ProductCode... productCodes) {
for (ProductCode value : productCodes) {
getProductCodes().add(value);
}
return this;
}
/**
* Product codes of the AMI.
* <p>
* Returns a reference to this object so that method calls can be chained together.
*
* @param productCodes Product codes of the AMI.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*/
public Image withProductCodes(java.util.Collection<ProductCode> productCodes) {
java.util.List<ProductCode> productCodesCopy = new java.util.ArrayList<ProductCode>();
if (productCodes != null) {
productCodesCopy.addAll(productCodes);
}
this.productCodes = productCodesCopy;
return this;
}
/**
* The architecture of the image.
*
* @return The architecture of the image.
*/
public String getArchitecture() {
return architecture;
}
/**
* The architecture of the image.
*
* @param architecture The architecture of the image.
*/
public void setArchitecture(String architecture) {
this.architecture = architecture;
}
/**
* The architecture of the image.
* <p>
* Returns a reference to this object so that method calls can be chained together.
*
* @param architecture The architecture of the image.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*/
public Image withArchitecture(String architecture) {
this.architecture = architecture;
return this;
}
/**
* The type of image (machine, kernel, or ramdisk).
*
* @return The type of image (machine, kernel, or ramdisk).
*/
public String getImageType() {
return imageType;
}
/**
* The type of image (machine, kernel, or ramdisk).
*
* @param imageType The type of image (machine, kernel, or ramdisk).
*/
public void setImageType(String imageType) {
this.imageType = imageType;
}
/**
* The type of image (machine, kernel, or ramdisk).
* <p>
* Returns a reference to this object so that method calls can be chained together.
*
* @param imageType The type of image (machine, kernel, or ramdisk).
*
* @return A reference to this updated object so that method calls can be chained
* together.
*/
public Image withImageType(String imageType) {
this.imageType = imageType;
return this;
}
/**
* The kernel associated with the image, if any. Only applicable for
* machine images.
*
* @return The kernel associated with the image, if any. Only applicable for
* machine images.
*/
public String getKernelId() {
return kernelId;
}
/**
* The kernel associated with the image, if any. Only applicable for
* machine images.
*
* @param kernelId The kernel associated with the image, if any. Only applicable for
* machine images.
*/
public void setKernelId(String kernelId) {
this.kernelId = kernelId;
}
/**
* The kernel associated with the image, if any. Only applicable for
* machine images.
* <p>
* Returns a reference to this object so that method calls can be chained together.
*
* @param kernelId The kernel associated with the image, if any. Only applicable for
* machine images.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*/
public Image withKernelId(String kernelId) {
this.kernelId = kernelId;
return this;
}
/**
* The RAM disk associated with the image, if any. Only applicable for
* machine images.
*
* @return The RAM disk associated with the image, if any. Only applicable for
* machine images.
*/
public String getRamdiskId() {
return ramdiskId;
}
/**
* The RAM disk associated with the image, if any. Only applicable for
* machine images.
*
* @param ramdiskId The RAM disk associated with the image, if any. Only applicable for
* machine images.
*/
public void setRamdiskId(String ramdiskId) {
this.ramdiskId = ramdiskId;
}
/**
* The RAM disk associated with the image, if any. Only applicable for
* machine images.
* <p>
* Returns a reference to this object so that method calls can be chained together.
*
* @param ramdiskId The RAM disk associated with the image, if any. Only applicable for
* machine images.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*/
public Image withRamdiskId(String ramdiskId) {
this.ramdiskId = ramdiskId;
return this;
}
/**
* The operating platform of the AMI.
*
* @return The operating platform of the AMI.
*/
public String getPlatform() {
return platform;
}
/**
* The operating platform of the AMI.
*
* @param platform The operating platform of the AMI.
*/
public void setPlatform(String platform) {
this.platform = platform;
}
/**
* The operating platform of the AMI.
* <p>
* Returns a reference to this object so that method calls can be chained together.
*
* @param platform The operating platform of the AMI.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*/
public Image withPlatform(String platform) {
this.platform = platform;
return this;
}
/**
* The reason for the state change.
*
* @return The reason for the state change.
*/
public StateReason getStateReason() {
return stateReason;
}
/**
* The reason for the state change.
*
* @param stateReason The reason for the state change.
*/
public void setStateReason(StateReason stateReason) {
this.stateReason = stateReason;
}
/**
* The reason for the state change.
* <p>
* Returns a reference to this object so that method calls can be chained together.
*
* @param stateReason The reason for the state change.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*/
public Image withStateReason(StateReason stateReason) {
this.stateReason = stateReason;
return this;
}
/**
* The AWS account alias (e.g., "amazon", "redhat", "self", etc.) or AWS
* account ID that owns the AMI.
*
* @return The AWS account alias (e.g., "amazon", "redhat", "self", etc.) or AWS
* account ID that owns the AMI.
*/
public String getImageOwnerAlias() {
return imageOwnerAlias;
}
/**
* The AWS account alias (e.g., "amazon", "redhat", "self", etc.) or AWS
* account ID that owns the AMI.
*
* @param imageOwnerAlias The AWS account alias (e.g., "amazon", "redhat", "self", etc.) or AWS
* account ID that owns the AMI.
*/
public void setImageOwnerAlias(String imageOwnerAlias) {
this.imageOwnerAlias = imageOwnerAlias;
}
/**
* The AWS account alias (e.g., "amazon", "redhat", "self", etc.) or AWS
* account ID that owns the AMI.
* <p>
* Returns a reference to this object so that method calls can be chained together.
*
* @param imageOwnerAlias The AWS account alias (e.g., "amazon", "redhat", "self", etc.) or AWS
* account ID that owns the AMI.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*/
public Image withImageOwnerAlias(String imageOwnerAlias) {
this.imageOwnerAlias = imageOwnerAlias;
return this;
}
/**
* The name of the AMI that was provided during image creation.
*
* @return The name of the AMI that was provided during image creation.
*/
public String getName() {
return name;
}
/**
* The name of the AMI that was provided during image creation.
*
* @param name The name of the AMI that was provided during image creation.
*/
public void setName(String name) {
this.name = name;
}
/**
* The name of the AMI that was provided during image creation.
* <p>
* Returns a reference to this object so that method calls can be chained together.
*
* @param name The name of the AMI that was provided during image creation.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*/
public Image withName(String name) {
this.name = name;
return this;
}
/**
* The description of the AMI that was provided during image creation.
*
* @return The description of the AMI that was provided during image creation.
*/
public String getDescription() {
return description;
}
/**
* The description of the AMI that was provided during image creation.
*
* @param description The description of the AMI that was provided during image creation.
*/
public void setDescription(String description) {
this.description = description;
}
/**
* The description of the AMI that was provided during image creation.
* <p>
* Returns a reference to this object so that method calls can be chained together.
*
* @param description The description of the AMI that was provided during image creation.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*/
public Image withDescription(String description) {
this.description = description;
return this;
}
/**
* The root device type used by the AMI. The AMI can use an Amazon EBS or
* instance store root device.
*
* @return The root device type used by the AMI. The AMI can use an Amazon EBS or
* instance store root device.
*/
public String getRootDeviceType() {
return rootDeviceType;
}
/**
* The root device type used by the AMI. The AMI can use an Amazon EBS or
* instance store root device.
*
* @param rootDeviceType The root device type used by the AMI. The AMI can use an Amazon EBS or
* instance store root device.
*/
public void setRootDeviceType(String rootDeviceType) {
this.rootDeviceType = rootDeviceType;
}
/**
* The root device type used by the AMI. The AMI can use an Amazon EBS or
* instance store root device.
* <p>
* Returns a reference to this object so that method calls can be chained together.
*
* @param rootDeviceType The root device type used by the AMI. The AMI can use an Amazon EBS or
* instance store root device.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*/
public Image withRootDeviceType(String rootDeviceType) {
this.rootDeviceType = rootDeviceType;
return this;
}
/**
* The root device name (e.g., <code>/dev/sda1</code>).
*
* @return The root device name (e.g., <code>/dev/sda1</code>).
*/
public String getRootDeviceName() {
return rootDeviceName;
}
/**
* The root device name (e.g., <code>/dev/sda1</code>).
*
* @param rootDeviceName The root device name (e.g., <code>/dev/sda1</code>).
*/
public void setRootDeviceName(String rootDeviceName) {
this.rootDeviceName = rootDeviceName;
}
/**
* The root device name (e.g., <code>/dev/sda1</code>).
* <p>
* Returns a reference to this object so that method calls can be chained together.
*
* @param rootDeviceName The root device name (e.g., <code>/dev/sda1</code>).
*
* @return A reference to this updated object so that method calls can be chained
* together.
*/
public Image withRootDeviceName(String rootDeviceName) {
this.rootDeviceName = rootDeviceName;
return this;
}
/**
* Specifies how block devices are exposed to the instance.
*
* @return Specifies how block devices are exposed to the instance.
*/
public java.util.List<BlockDeviceMapping> getBlockDeviceMappings() {
if (blockDeviceMappings == null) {
blockDeviceMappings = new java.util.ArrayList<BlockDeviceMapping>();
}
return blockDeviceMappings;
}
/**
* Specifies how block devices are exposed to the instance.
*
* @param blockDeviceMappings Specifies how block devices are exposed to the instance.
*/
public void setBlockDeviceMappings(java.util.Collection<BlockDeviceMapping> blockDeviceMappings) {
java.util.List<BlockDeviceMapping> blockDeviceMappingsCopy = new java.util.ArrayList<BlockDeviceMapping>();
if (blockDeviceMappings != null) {
blockDeviceMappingsCopy.addAll(blockDeviceMappings);
}
this.blockDeviceMappings = blockDeviceMappingsCopy;
}
/**
* Specifies how block devices are exposed to the instance.
* <p>
* Returns a reference to this object so that method calls can be chained together.
*
* @param blockDeviceMappings Specifies how block devices are exposed to the instance.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*/
public Image withBlockDeviceMappings(BlockDeviceMapping... blockDeviceMappings) {
for (BlockDeviceMapping value : blockDeviceMappings) {
getBlockDeviceMappings().add(value);
}
return this;
}
/**
* Specifies how block devices are exposed to the instance.
* <p>
* Returns a reference to this object so that method calls can be chained together.
*
* @param blockDeviceMappings Specifies how block devices are exposed to the instance.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*/
public Image withBlockDeviceMappings(java.util.Collection<BlockDeviceMapping> blockDeviceMappings) {
java.util.List<BlockDeviceMapping> blockDeviceMappingsCopy = new java.util.ArrayList<BlockDeviceMapping>();
if (blockDeviceMappings != null) {
blockDeviceMappingsCopy.addAll(blockDeviceMappings);
}
this.blockDeviceMappings = blockDeviceMappingsCopy;
return this;
}
/**
* Returns the value of the VirtualizationType property for this object.
* <p>
* <b>Constraints:</b><br/>
* <b>Allowed Values: </b>hvm, paravirtual
*
* @return The value of the VirtualizationType property for this object.
*
* @see VirtualizationType
*/
public String getVirtualizationType() {
return virtualizationType;
}
/**
* Sets the value of the VirtualizationType property for this object.
* <p>
* <b>Constraints:</b><br/>
* <b>Allowed Values: </b>hvm, paravirtual
*
* @param virtualizationType The new value for the VirtualizationType property for this object.
*
* @see VirtualizationType
*/
public void setVirtualizationType(String virtualizationType) {
this.virtualizationType = virtualizationType;
}
/**
* Sets the value of the VirtualizationType property for this object.
* <p>
* Returns a reference to this object so that method calls can be chained together.
* <p>
* <b>Constraints:</b><br/>
* <b>Allowed Values: </b>hvm, paravirtual
*
* @param virtualizationType The new value for the VirtualizationType property for this object.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*
* @see VirtualizationType
*/
public Image withVirtualizationType(String virtualizationType) {
this.virtualizationType = virtualizationType;
return this;
}
/**
* A list of tags for the Image.
*
* @return A list of tags for the Image.
*/
public java.util.List<Tag> getTags() {
if (tags == null) {
tags = new java.util.ArrayList<Tag>();
}
return tags;
}
/**
* A list of tags for the Image.
*
* @param tags A list of tags for the Image.
*/
public void setTags(java.util.Collection<Tag> tags) {
java.util.List<Tag> tagsCopy = new java.util.ArrayList<Tag>();
if (tags != null) {
tagsCopy.addAll(tags);
}
this.tags = tagsCopy;
}
/**
* A list of tags for the Image.
* <p>
* Returns a reference to this object so that method calls can be chained together.
*
* @param tags A list of tags for the Image.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*/
public Image withTags(Tag... tags) {
for (Tag value : tags) {
getTags().add(value);
}
return this;
}
/**
* A list of tags for the Image.
* <p>
* Returns a reference to this object so that method calls can be chained together.
*
* @param tags A list of tags for the Image.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*/
public Image withTags(java.util.Collection<Tag> tags) {
java.util.List<Tag> tagsCopy = new java.util.ArrayList<Tag>();
if (tags != null) {
tagsCopy.addAll(tags);
}
this.tags = tagsCopy;
return this;
}
/**
* Returns a string representation of this object; useful for testing and
* debugging.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
sb.append("ImageId: " + imageId + ", ");
sb.append("ImageLocation: " + imageLocation + ", ");
sb.append("State: " + state + ", ");
sb.append("OwnerId: " + ownerId + ", ");
sb.append("Public: " + publicValue + ", ");
sb.append("ProductCodes: " + productCodes + ", ");
sb.append("Architecture: " + architecture + ", ");
sb.append("ImageType: " + imageType + ", ");
sb.append("KernelId: " + kernelId + ", ");
sb.append("RamdiskId: " + ramdiskId + ", ");
sb.append("Platform: " + platform + ", ");
sb.append("StateReason: " + stateReason + ", ");
sb.append("ImageOwnerAlias: " + imageOwnerAlias + ", ");
sb.append("Name: " + name + ", ");
sb.append("Description: " + description + ", ");
sb.append("RootDeviceType: " + rootDeviceType + ", ");
sb.append("RootDeviceName: " + rootDeviceName + ", ");
sb.append("BlockDeviceMappings: " + blockDeviceMappings + ", ");
sb.append("VirtualizationType: " + virtualizationType + ", ");
sb.append("Tags: " + tags + ", ");
sb.append("}");
return sb.toString();
}
}
| |
package com.hackfmi.thejack.hackatron;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import android.app.Activity;
import android.content.Intent;
import android.hardware.Sensor;
import android.hardware.SensorManager;
import android.os.Bundle;
import android.os.Handler;
import android.util.Log;
import android.view.KeyEvent;
import android.view.View;
import android.view.WindowManager;
import android.widget.TextView;
import com.google.android.gms.common.ConnectionResult;
import com.google.android.gms.common.api.GoogleApiClient;
import com.google.android.gms.games.Games;
import com.google.android.gms.games.GamesActivityResultCodes;
import com.google.android.gms.games.multiplayer.Invitation;
import com.google.android.gms.games.multiplayer.Multiplayer;
import com.google.android.gms.games.multiplayer.Participant;
import com.google.android.gms.games.multiplayer.realtime.Room;
import com.google.example.games.basegameutils.BaseGameUtils;
import com.hackfmi.thejack.hackatron.Game.BodyPart;
public class AndroidLauncher extends Activity implements View.OnClickListener,
ConnectionHandler.ChangeListener {
final static String TAG = "ButtonClicker2000";
// Request codes for the UIs that we show with startActivityForResult:
final static int RC_SELECT_PLAYERS = 10000;
final static int RC_INVITATION_INBOX = 10001;
final static int RC_WAITING_ROOM = 10002;
// Request code used to invoke sign in user interactions.
private static final int RC_SIGN_IN = 9001;
// Are we playing in multiplayer mode?
boolean mMultiplayer = false;
// The participants in the currently active game
ArrayList<Participant> mParticipants = null;
// My participant ID in the currently active game
String mMyId = null;
// If non-null, this is the id of the invitation we received via the
// invitation listener
String mIncomingInvitationId = null;
// Game
private Game currentGame;
// Connection
private ConnectionHandler connectionHandler;
// Sensors
private SensorManager mSensorManager;
private Sensor mSensor;
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.android_launcher);
connectionHandler = ConnectionHandler.registerContext(this);
connectionHandler.registerListener(this);
// set up a click listener for everything we care about
for (int id : CLICKABLES) {
findViewById(id).setOnClickListener(this);
}
mParticipants = new ArrayList<Participant>();
}
private void useBodyPart(BodyPart bodyPart) {
currentGame.use(bodyPart);
}
@Override
public void onClick(View v) {
Intent intent;
switch (v.getId()) {
case R.id.button_single_player:
case R.id.button_single_player_2:
// play a single-player game
Intent i = new Intent(this, VoltronActivity.class);
startActivity(i);
resetGameVars();
startGame(false);
break;
case R.id.button_sign_in:
// start the sign-in flow
if (!BaseGameUtils.verifySampleSetup(this, R.string.app_id)) {
Log.w(TAG, "*** Warning: setup problems detected. Sign in may not work!");
}
Log.d(TAG, "Sign-in button clicked");
connectionHandler.mSignInClicked = true;
connectionHandler.connect();
break;
case R.id.button_sign_out:
// user wants to sign out
// sign out.
Log.d(TAG, "Sign-out button clicked");
connectionHandler.mSignInClicked = false;
connectionHandler.disconnect();
switchToScreen(R.id.screen_sign_in);
break;
case R.id.button_invite_players:
// show list of invitable players
intent = connectionHandler.getSelectOpponentsIntent();
switchToScreen(R.id.screen_wait);
startActivityForResult(intent, RC_SELECT_PLAYERS);
break;
case R.id.button_see_invitations:
// show list of pending invitations
intent = connectionHandler.getInvitationInboxIntent();
switchToScreen(R.id.screen_wait);
startActivityForResult(intent, RC_INVITATION_INBOX);
break;
case R.id.button_accept_popup_invitation:
// user wants to accept the invitation shown on the invitation popup
// (the one we got through the OnInvitationReceivedListener).
acceptInviteToRoom(mIncomingInvitationId);
mIncomingInvitationId = null;
break;
case R.id.button_quick_game:
// user wants to play against a random opponent right now
switchToScreen(R.id.screen_wait);
keepScreenOn();
resetGameVars();
connectionHandler.startQuickGame();
break;
case R.id.button_head:
useBodyPart(Game.BodyPart.HEAD);
break;
case R.id.button_left_hand:
useBodyPart(Game.BodyPart.LEFT_HAND);
break;
case R.id.button_right_hand:
useBodyPart(Game.BodyPart.RIGHT_HAND);
break;
case R.id.button_left_foot:
useBodyPart(Game.BodyPart.LEFT_FOOT);
break;
case R.id.button_right_foot:
useBodyPart(Game.BodyPart.RIGHT_FOOT);
break;
}
}
@Override
public void onActivityResult(int requestCode, int responseCode, Intent intent) {
super.onActivityResult(requestCode, responseCode, intent);
switch (requestCode) {
case RC_SELECT_PLAYERS:
// we got the result from the "select players" UI -- ready to create
// the room
handleSelectPlayersResult(responseCode, intent);
break;
case RC_INVITATION_INBOX:
// we got the result from the "select invitation" UI (invitation
// inbox). We're
// ready to accept the selected invitation:
handleInvitationInboxResult(responseCode, intent);
break;
case RC_WAITING_ROOM:
// we got the result from the "waiting room" UI.
if (responseCode == Activity.RESULT_OK) {
// ready to start playing
Log.d(TAG, "Starting game (waiting room returned OK).");
startGame(true);
} else if (responseCode == GamesActivityResultCodes.RESULT_LEFT_ROOM) {
// player indicated that they want to leave the room
leaveRoom();
} else if (responseCode == Activity.RESULT_CANCELED) {
// Dialog was cancelled (user pressed back key, for instance).
// In our game,
// this means leaving the room too. In more elaborate games,
// this could mean
// something else (like minimizing the waiting room UI).
leaveRoom();
}
break;
case RC_SIGN_IN:
Log.d(TAG, "onActivityResult with requestCode == RC_SIGN_IN, responseCode=" + responseCode
+ ", intent=" + intent);
connectionHandler.mSignInClicked = false;
connectionHandler.mResolvingConnectionFailure = false;
if (responseCode == RESULT_OK) {
connectionHandler.connect();
} else {
BaseGameUtils.showActivityResultError(this, requestCode, responseCode,
R.string.signin_failure, R.string.signin_other_error);
}
break;
}
super.onActivityResult(requestCode, responseCode, intent);
}
// Handle the result of the "Select players UI" we launched when the user
// clicked the
// "Invite friends" button. We react by creating a room with those players.
private void handleSelectPlayersResult(int response, Intent data) {
if (response != Activity.RESULT_OK) {
Log.w(TAG, "*** select players UI cancelled, " + response);
switchToMainScreen();
return;
}
Log.d(TAG, "Select players UI succeeded.");
// get the invitee list
final ArrayList<String> invitees = data.getStringArrayListExtra(Games.EXTRA_PLAYER_IDS);
Log.d(TAG, "Invitee count: " + invitees.size());
// get the automatch criteria
int minAutoMatchPlayers = data.getIntExtra(Multiplayer.EXTRA_MIN_AUTOMATCH_PLAYERS, 0);
int maxAutoMatchPlayers = data.getIntExtra(Multiplayer.EXTRA_MAX_AUTOMATCH_PLAYERS, 0);
// create the room
Log.d(TAG, "Creating room...");
connectionHandler.createRoom(invitees, minAutoMatchPlayers, maxAutoMatchPlayers);
switchToScreen(R.id.screen_wait);
keepScreenOn();
resetGameVars();
Log.d(TAG, "Room created, waiting for it to be ready...");
}
// Handle the result of the invitation inbox UI, where the player can pick
// an invitation
// to accept. We react by accepting the selected invitation, if any.
private void handleInvitationInboxResult(int response, Intent data) {
if (response != Activity.RESULT_OK) {
Log.w(TAG, "*** invitation inbox UI cancelled, " + response);
switchToMainScreen();
return;
}
Log.d(TAG, "Invitation inbox UI succeeded.");
Invitation inv = data.getExtras().getParcelable(Multiplayer.EXTRA_INVITATION);
// accept invitation
acceptInviteToRoom(inv.getInvitationId());
}
// Accept the given invitation.
void acceptInviteToRoom(String invId) {
// accept the invitation
keepScreenOn();
resetGameVars();
connectionHandler.acceptInviteToRoom(invId);
}
// Activity is going to the background. We have to leave the current room.
@Override
public void onStop() {
Log.d(TAG, "**** got onStop");
// if we're in a room, leave it.
leaveRoom();
// stop trying to keep the screen on
stopKeepingScreenOn();
if (connectionHandler.isConnected()) {
switchToScreen(R.id.screen_sign_in);
} else {
switchToScreen(R.id.screen_wait);
}
super.onStop();
}
// Activity just got to the foreground. We switch to the wait screen because
// we will now
// go through the sign-in flow (remember that, yes, every time the Activity
// comes back to the
// foreground we go through the sign-in flow -- but if the user is already
// authenticated,
// this flow simply succeeds and is imperceptible).
@Override
public void onStart() {
switchToScreen(R.id.screen_wait);
if (connectionHandler.isConnected()) {
Log.w(TAG, "GameHelper: client was already connected on onStart()");
} else {
Log.d(TAG, "Connecting client.");
connectionHandler.connect();
}
super.onStart();
}
// Handle back key to make sure we cleanly leave a game if we are in the
// middle of one
@Override
public boolean onKeyDown(int keyCode, KeyEvent e) {
if (keyCode == KeyEvent.KEYCODE_BACK && mCurScreen == R.id.screen_game) {
leaveRoom();
return true;
}
return super.onKeyDown(keyCode, e);
}
// Leave the room.
void leaveRoom() {
Log.d(TAG, "Leaving room.");
mSecondsLeft = 0;
stopKeepingScreenOn();
if (connectionHandler.leaveRoom()) {
switchToScreen(R.id.screen_wait);
} else {
switchToMainScreen();
}
}
// Show the waiting room UI to track the progress of other players as they
// enter the room and get connected.
void showWaitingRoom(Room room) {
// show waiting room UI
startActivityForResult(connectionHandler.getWaitingRoomIntent(room), RC_WAITING_ROOM);
}
@Override
public void onInvitationRemoved(String invitationId) {
switchToScreen(mCurScreen); // This will hide the invitation popup
}
/*
* CALLBACKS SECTION. This section shows how we implement the several games
* API callbacks.
*/
// Show error message about game being cancelled and return to main screen.
@Override
public void showGameError() {
BaseGameUtils.makeSimpleDialog(this, getString(R.string.game_problem));
switchToMainScreen();
}
@Override
public void updateRoom(Room room) {
if (room != null) {
mParticipants = room.getParticipants();
}
if (mParticipants != null) {
updatePeerScoresDisplay();
}
}
/*
* GAME LOGIC SECTION. Methods that implement the game's rules.
*/
// Current state of the game:
int mSecondsLeft = -1; // how long until the game ends (seconds)
final static int GAME_DURATION = 20; // game duration, seconds.
int mScore = 0; // user's current score
private float currentX;
private float currentY;
private float currentZ;
// Reset game variables in preparation for a new game.
void resetGameVars() {
mSecondsLeft = GAME_DURATION;
mScore = 0;
mParticipantScore.clear();
mFinishedParticipants.clear();
currentGame = new Game(participantsToIds(mParticipants), mMyId);
}
private List<String> participantsToIds(List<Participant> participants) {
List<String> result = new ArrayList<String>();
for (Participant participant : mParticipants) {
result.add(participant.getParticipantId());
}
return result;
}
// Start the gameplay phase of the game.
void startGame(boolean multiplayer) {
mMultiplayer = multiplayer;
updateScoreDisplay();
connectionHandler.broadcastScore(false);
// switchToScreen(R.id.screen_game);
connectionHandler.inGame = true;
Intent i = new Intent(this, VoltronActivity.class);
startActivity(i);
findViewById(R.id.button_head).setVisibility(View.VISIBLE);
findViewById(R.id.button_left_hand).setVisibility(View.VISIBLE);
findViewById(R.id.button_right_hand).setVisibility(View.VISIBLE);
findViewById(R.id.button_left_foot).setVisibility(View.VISIBLE);
findViewById(R.id.button_right_foot).setVisibility(View.VISIBLE);
// run the gameTick() method every second to update the game.
final Handler h = new Handler();
h.postDelayed(new Runnable() {
@Override
public void run() {
if (mSecondsLeft <= 0) {
return;
}
gameTick();
h.postDelayed(this, 1000);
}
}, 1000);
}
// Game tick -- update countdown, check if game ended.
void gameTick() {
if (mSecondsLeft > 0) {
--mSecondsLeft;
}
// update countdown
((TextView) findViewById(R.id.countdown)).setText("0:" + (mSecondsLeft < 10 ? "0" : "")
+ String.valueOf(mSecondsLeft));
if (mSecondsLeft <= 0) {
// finish game
findViewById(R.id.button_head).setEnabled(false);
findViewById(R.id.button_left_hand).setEnabled(false);
findViewById(R.id.button_right_hand).setEnabled(false);
findViewById(R.id.button_left_foot).setEnabled(false);
findViewById(R.id.button_right_foot).setEnabled(false);
connectionHandler.broadcastScore(true);
}
}
// indicates the player scored one point
void scoreOnePoint() {
if (mSecondsLeft <= 0) {
return; // too late!
}
++mScore;
updateScoreDisplay();
updatePeerScoresDisplay();
// broadcast our new score to our peers
connectionHandler.broadcastScore(false);
}
void moveInDirection(float x, float y, float z) {
if (mSecondsLeft <= 0) {
return; // too late!
}
currentX = x;
currentY = y;
currentZ = z;
updateScoreDisplay();
updatePeerScoresDisplay();
// broadcast our new score to our peers
connectionHandler.broadcastScore(false);
}
/*
* COMMUNICATIONS SECTION. Methods that implement the game's network protocol.
*/
// Score of other participants. We update this as we receive their scores
// from the network.
Map<String, Integer> mParticipantScore = new HashMap<String, Integer>();
// Participants who sent us their final score.
Set<String> mFinishedParticipants = new HashSet<String>();
// Called when we receive a real-time message from the network.
// Messages in our game are made up of 2 bytes: the first one is 'F' or 'U'
// indicating
// whether it's a final or interim score. The second byte is the score.
// There is also the
// 'S' message, which indicates that the game should start.
/*
* UI SECTION. Methods that implement the game's UI.
*/
// This array lists everything that's clickable, so we can install click
// event handlers.
final static int[] CLICKABLES = { R.id.button_accept_popup_invitation,
R.id.button_invite_players, R.id.button_quick_game, R.id.button_see_invitations,
R.id.button_sign_in, R.id.button_sign_out, R.id.button_head, R.id.button_left_hand,
R.id.button_right_hand, R.id.button_left_foot, R.id.button_right_foot,
R.id.button_single_player, R.id.button_single_player_2 };
// This array lists all the individual screens our game has.
final static int[] SCREENS = { R.id.screen_game, R.id.screen_main, R.id.screen_sign_in,
R.id.screen_wait };
int mCurScreen = -1;
void switchToScreen(int screenId) {
// make the requested screen visible; hide all others.
for (int id : SCREENS) {
findViewById(id).setVisibility(screenId == id ? View.VISIBLE : View.GONE);
}
mCurScreen = screenId;
// should we show the invitation popup?
boolean showInvPopup;
if (mIncomingInvitationId == null) {
// no invitation, so no popup
showInvPopup = false;
} else if (mMultiplayer) {
// if in multiplayer, only show invitation on main screen
showInvPopup = (mCurScreen == R.id.screen_main);
} else {
// single-player: show on main screen and gameplay screen
showInvPopup = (mCurScreen == R.id.screen_main || mCurScreen == R.id.screen_game);
}
findViewById(R.id.invitation_popup).setVisibility(showInvPopup ? View.VISIBLE : View.GONE);
}
@Override
public void switchToMainScreen() {
if (connectionHandler.isConnected()) {
switchToScreen(R.id.screen_main);
} else {
switchToScreen(R.id.screen_sign_in);
}
}
// updates the label that shows my score
void updateScoreDisplay() {
((TextView) findViewById(R.id.my_score)).setText(formatScore(currentX, currentY, currentZ));
}
// formats a score as a three-digit number
String formatScore(float x, float y, float z) {
return String.format("X:%f Y:%f Z:%f", x * 180 / Math.PI, y * 180 / Math.PI, z * 180 / Math.PI);
}
// formats a score as a three-digit number
String formatScore(int i) {
if (i < 0) {
i = 0;
}
String s = String.valueOf(i);
return s.length() == 1 ? "00" + s : s.length() == 2 ? "0" + s : s;
}
// updates the screen with the scores from our peers
void updatePeerScoresDisplay() {
((TextView) findViewById(R.id.score0)).setText(formatScore(mScore) + " - Me");
int[] arr = { R.id.score1, R.id.score2, R.id.score3 };
int i = 0;
if (connectionHandler.getRoomId() != null) {
for (Participant p : mParticipants) {
String pid = p.getParticipantId();
if (pid.equals(mMyId)) {
continue;
}
if (p.getStatus() != Participant.STATUS_JOINED) {
continue;
}
int score = mParticipantScore.containsKey(pid) ? mParticipantScore.get(pid) : 0;
((TextView) findViewById(arr[i])).setText(formatScore(score) + " - " + p.getDisplayName());
++i;
}
}
for (; i < arr.length; ++i) {
((TextView) findViewById(arr[i])).setText("");
}
}
/*
* MISC SECTION. Miscellaneous methods.
*/
// Sets the flag to keep this screen on. It's recommended to do that during
// the
// handshake when setting up a game, because if the screen turns off, the
// game will be
// cancelled.
void keepScreenOn() {
getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
}
// Clears the flag that keeps the screen on.
void stopKeepingScreenOn() {
getWindow().clearFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
}
@Override
public void onInvitation(Invitation invitation) {
((TextView) findViewById(R.id.incoming_invitation_text)).setText(invitation.getInviter()
.getDisplayName() + " " + getString(R.string.is_inviting_you));
switchToScreen(mCurScreen); // This will show the invitation popup
}
@Override
public void onInvitationRemoved() {
switchToScreen(mCurScreen);
}
@Override
public boolean onConnectionFailed(GoogleApiClient googleApiClient,
ConnectionResult connectionResult) {
return BaseGameUtils.resolveConnectionFailure(this, googleApiClient, connectionResult,
RC_SIGN_IN, getString(R.string.signin_other_error));
}
@Override
public void onSwitchToWaitScreen() {
// TODO Auto-generated method stub
}
@Override
public void onShowWaitingRoom(Room room) {
Intent i = connectionHandler.getWaitingRoomIntent(room);
// show waiting room UI
startActivityForResult(i, RC_WAITING_ROOM);
}
@Override
public void onPeerScoresDislay() {
// TODO Auto-generated method stub
}
@Override
public void switchToSignInScreen() {
switchToScreen(R.id.screen_sign_in);
}
@Override
public void connected() {
// TODO Auto-generated method stub
}
}
| |
/*
* Copyright 2017 The Netty Project
*
* The Netty Project licenses this file to you under the Apache License,
* version 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package io.netty.handler.ssl;
import io.netty.bootstrap.Bootstrap;
import io.netty.bootstrap.ServerBootstrap;
import io.netty.buffer.ByteBufAllocator;
import io.netty.channel.Channel;
import io.netty.channel.ChannelHandlerContext;
import io.netty.channel.ChannelInboundHandlerAdapter;
import io.netty.channel.ChannelInitializer;
import io.netty.channel.DefaultEventLoopGroup;
import io.netty.channel.EventLoopGroup;
import io.netty.channel.local.LocalAddress;
import io.netty.channel.local.LocalChannel;
import io.netty.channel.local.LocalServerChannel;
import io.netty.handler.ssl.util.InsecureTrustManagerFactory;
import io.netty.handler.ssl.util.SelfSignedCertificate;
import io.netty.handler.ssl.util.SimpleTrustManagerFactory;
import io.netty.util.ReferenceCountUtil;
import io.netty.util.concurrent.Promise;
import io.netty.util.internal.EmptyArrays;
import io.netty.util.internal.ThrowableUtil;
import org.junit.Assert;
import javax.net.ssl.ExtendedSSLSession;
import javax.net.ssl.KeyManager;
import javax.net.ssl.KeyManagerFactory;
import javax.net.ssl.KeyManagerFactorySpi;
import javax.net.ssl.ManagerFactoryParameters;
import javax.net.ssl.SNIHostName;
import javax.net.ssl.SNIMatcher;
import javax.net.ssl.SNIServerName;
import javax.net.ssl.SSLEngine;
import javax.net.ssl.SSLException;
import javax.net.ssl.SSLParameters;
import javax.net.ssl.SSLSession;
import javax.net.ssl.TrustManager;
import javax.net.ssl.TrustManagerFactory;
import javax.net.ssl.X509ExtendedKeyManager;
import javax.net.ssl.X509ExtendedTrustManager;
import java.io.IOException;
import java.net.Socket;
import java.security.InvalidAlgorithmParameterException;
import java.security.KeyStore;
import java.security.KeyStoreException;
import java.security.NoSuchAlgorithmException;
import java.security.Principal;
import java.security.PrivateKey;
import java.security.UnrecoverableKeyException;
import java.security.cert.CertificateException;
import java.security.cert.X509Certificate;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
/**
* In extra class to be able to run tests with java7 without trying to load classes that not exists in java7.
*/
final class SniClientJava8TestUtil {
private SniClientJava8TestUtil() { }
static void testSniClient(SslProvider sslClientProvider, SslProvider sslServerProvider, final boolean match)
throws Exception {
final String sniHost = "sni.netty.io";
SelfSignedCertificate cert = new SelfSignedCertificate();
LocalAddress address = new LocalAddress("test");
EventLoopGroup group = new DefaultEventLoopGroup(1);
SslContext sslServerContext = null;
SslContext sslClientContext = null;
Channel sc = null;
Channel cc = null;
try {
sslServerContext = SslContextBuilder.forServer(cert.key(), cert.cert())
.sslProvider(sslServerProvider).build();
final Promise<Void> promise = group.next().newPromise();
ServerBootstrap sb = new ServerBootstrap();
final SslContext finalContext = sslServerContext;
sc = sb.group(group).channel(LocalServerChannel.class).childHandler(new ChannelInitializer<Channel>() {
@Override
protected void initChannel(Channel ch) throws Exception {
SslHandler handler = finalContext.newHandler(ch.alloc());
SSLParameters parameters = handler.engine().getSSLParameters();
SNIMatcher matcher = new SNIMatcher(0) {
@Override
public boolean matches(SNIServerName sniServerName) {
return match;
}
};
parameters.setSNIMatchers(Collections.singleton(matcher));
handler.engine().setSSLParameters(parameters);
ch.pipeline().addFirst(handler);
ch.pipeline().addLast(new ChannelInboundHandlerAdapter() {
@Override
public void userEventTriggered(ChannelHandlerContext ctx, Object evt) throws Exception {
if (evt instanceof SslHandshakeCompletionEvent) {
SslHandshakeCompletionEvent event = (SslHandshakeCompletionEvent) evt;
if (match) {
if (event.isSuccess()) {
promise.setSuccess(null);
} else {
promise.setFailure(event.cause());
}
} else {
if (event.isSuccess()) {
promise.setFailure(new AssertionError("expected SSLException"));
} else {
Throwable cause = event.cause();
if (cause instanceof SSLException) {
promise.setSuccess(null);
} else {
promise.setFailure(
new AssertionError("cause not of type SSLException: "
+ ThrowableUtil.stackTraceToString(cause)));
}
}
}
}
}
});
}
}).bind(address).syncUninterruptibly().channel();
sslClientContext = SslContextBuilder.forClient().trustManager(InsecureTrustManagerFactory.INSTANCE)
.sslProvider(sslClientProvider).build();
SslHandler sslHandler = new SslHandler(
sslClientContext.newEngine(ByteBufAllocator.DEFAULT, sniHost, -1));
Bootstrap cb = new Bootstrap();
cc = cb.group(group).channel(LocalChannel.class).handler(sslHandler)
.connect(address).syncUninterruptibly().channel();
promise.syncUninterruptibly();
sslHandler.handshakeFuture().syncUninterruptibly();
} finally {
if (cc != null) {
cc.close().syncUninterruptibly();
}
if (sc != null) {
sc.close().syncUninterruptibly();
}
ReferenceCountUtil.release(sslServerContext);
ReferenceCountUtil.release(sslClientContext);
cert.delete();
group.shutdownGracefully();
}
}
static void assertSSLSession(boolean clientSide, SSLSession session, String name) {
assertSSLSession(clientSide, session, new SNIHostName(name));
}
private static void assertSSLSession(boolean clientSide, SSLSession session, SNIServerName name) {
Assert.assertNotNull(session);
if (session instanceof ExtendedSSLSession) {
ExtendedSSLSession extendedSSLSession = (ExtendedSSLSession) session;
List<SNIServerName> names = extendedSSLSession.getRequestedServerNames();
Assert.assertEquals(1, names.size());
Assert.assertEquals(name, names.get(0));
Assert.assertTrue(extendedSSLSession.getLocalSupportedSignatureAlgorithms().length > 0);
if (clientSide) {
Assert.assertEquals(0, extendedSSLSession.getPeerSupportedSignatureAlgorithms().length);
} else {
Assert.assertTrue(extendedSSLSession.getPeerSupportedSignatureAlgorithms().length >= 0);
}
}
}
static TrustManagerFactory newSniX509TrustmanagerFactory(String name) {
return new SniX509TrustmanagerFactory(new SNIHostName(name));
}
private static final class SniX509TrustmanagerFactory extends SimpleTrustManagerFactory {
private final SNIServerName name;
SniX509TrustmanagerFactory(SNIServerName name) {
this.name = name;
}
@Override
protected void engineInit(KeyStore keyStore) throws Exception {
// NOOP
}
@Override
protected void engineInit(ManagerFactoryParameters managerFactoryParameters) throws Exception {
// NOOP
}
@Override
protected TrustManager[] engineGetTrustManagers() {
return new TrustManager[] { new X509ExtendedTrustManager() {
@Override
public void checkClientTrusted(X509Certificate[] x509Certificates, String s, Socket socket)
throws CertificateException {
Assert.fail();
}
@Override
public void checkServerTrusted(X509Certificate[] x509Certificates, String s, Socket socket)
throws CertificateException {
Assert.fail();
}
@Override
public void checkClientTrusted(X509Certificate[] x509Certificates, String s, SSLEngine sslEngine)
throws CertificateException {
Assert.fail();
}
@Override
public void checkServerTrusted(X509Certificate[] x509Certificates, String s, SSLEngine sslEngine)
throws CertificateException {
assertSSLSession(sslEngine.getUseClientMode(), sslEngine.getHandshakeSession(), name);
}
@Override
public void checkClientTrusted(X509Certificate[] x509Certificates, String s)
throws CertificateException {
Assert.fail();
}
@Override
public void checkServerTrusted(X509Certificate[] x509Certificates, String s)
throws CertificateException {
Assert.fail();
}
@Override
public X509Certificate[] getAcceptedIssuers() {
return EmptyArrays.EMPTY_X509_CERTIFICATES;
}
} };
}
}
static KeyManagerFactory newSniX509KeyManagerFactory(SelfSignedCertificate cert, String hostname)
throws NoSuchAlgorithmException, KeyStoreException, UnrecoverableKeyException,
IOException, CertificateException {
return new SniX509KeyManagerFactory(
new SNIHostName(hostname), SslContext.buildKeyManagerFactory(
new X509Certificate[] { cert.cert() }, cert.key(), null, null));
}
private static final class SniX509KeyManagerFactory extends KeyManagerFactory {
SniX509KeyManagerFactory(final SNIServerName name, final KeyManagerFactory factory) {
super(new KeyManagerFactorySpi() {
@Override
protected void engineInit(KeyStore keyStore, char[] chars)
throws KeyStoreException, NoSuchAlgorithmException, UnrecoverableKeyException {
factory.init(keyStore, chars);
}
@Override
protected void engineInit(ManagerFactoryParameters managerFactoryParameters)
throws InvalidAlgorithmParameterException {
factory.init(managerFactoryParameters);
}
@Override
protected KeyManager[] engineGetKeyManagers() {
List<KeyManager> managers = new ArrayList<KeyManager>();
for (final KeyManager km: factory.getKeyManagers()) {
if (km instanceof X509ExtendedKeyManager) {
managers.add(new X509ExtendedKeyManager() {
@Override
public String[] getClientAliases(String s, Principal[] principals) {
return ((X509ExtendedKeyManager) km).getClientAliases(s, principals);
}
@Override
public String chooseClientAlias(String[] strings, Principal[] principals,
Socket socket) {
return ((X509ExtendedKeyManager) km).chooseClientAlias(strings, principals, socket);
}
@Override
public String[] getServerAliases(String s, Principal[] principals) {
return ((X509ExtendedKeyManager) km).getServerAliases(s, principals);
}
@Override
public String chooseServerAlias(String s, Principal[] principals, Socket socket) {
return ((X509ExtendedKeyManager) km).chooseServerAlias(s, principals, socket);
}
@Override
public X509Certificate[] getCertificateChain(String s) {
return ((X509ExtendedKeyManager) km).getCertificateChain(s);
}
@Override
public PrivateKey getPrivateKey(String s) {
return ((X509ExtendedKeyManager) km).getPrivateKey(s);
}
@Override
public String chooseEngineClientAlias(String[] strings, Principal[] principals,
SSLEngine sslEngine) {
return ((X509ExtendedKeyManager) km)
.chooseEngineClientAlias(strings, principals, sslEngine);
}
@Override
public String chooseEngineServerAlias(String s, Principal[] principals,
SSLEngine sslEngine) {
SSLSession session = sslEngine.getHandshakeSession();
assertSSLSession(sslEngine.getUseClientMode(), session, name);
return ((X509ExtendedKeyManager) km)
.chooseEngineServerAlias(s, principals, sslEngine);
}
});
} else {
managers.add(km);
}
}
return managers.toArray(new KeyManager[0]);
}
}, factory.getProvider(), factory.getAlgorithm());
}
}
}
| |
package de.mhu.os.fr.model;
import java.util.HashSet;
import java.util.LinkedList;
import de.mhus.lib.MEventHandler;
import de.mhus.lib.MThread;
public class Surface {
public static final byte MAX_VALUE = 10;
private int sleepTime = 100;
private int h;
private int w;
private byte[] values;
private HashSet<Creature>[] creaturesOnSurface;
private HashSet<Creature> creatures;
private LinkedList<Source> sources;
private MEventHandler<Listener> eventHandler = new MEventHandler<Listener>();
private int removedChildCnt;
private int addedChildCnt;
private boolean finishRound;
public Surface( int pWidth, int pHeight ) {
w = pWidth;
h = pHeight;
values = new byte[ w * h ];
creatures = new HashSet<Creature>();
creaturesOnSurface = new HashSet[ w * h ];
for ( int i = 0; i < creaturesOnSurface.length; i++ )
creaturesOnSurface[i] = new HashSet<Creature>();
sources = new LinkedList<Source>();
}
public int transformX( int x ) {
while ( x < 0 ) x = w+x;
return x % w;
}
public int transformY( int y ) {
while ( y < 0 ) y = h+y;
return y % h;
}
public byte getValue( int x, int y ) {
x = transformX( x );
y = transformY( y );
return values[ x * h + y ];
}
public void setValue( int x, int y, byte value ) {
x = transformX( x );
y = transformY( y );
if ( values[ x * h + y ] != value ) {
values[ x * h + y ] = value;
fireChanged( x, y );
}
}
public void addSource( int x, int y, Source s ) {
x = transformX( x );
y = transformY( y );
s.setPosition(x, y);
sources.add( s );
}
public boolean addCreature( Creature c, int x, int y ) {
x = transformX( x );
y = transformY( y );
synchronized ( this ) {
if ( creatures.contains( c ) ) return false;
addedChildCnt++;
creatures.add( c );
toPos( c, x, y );
}
return true;
}
public boolean moveCreature( Creature c, int x, int y ) {
x = transformX( x );
y = transformY( y );
synchronized ( this ) {
if ( ! creatures.contains( c ) ) return false;
removePos( c );
toPos( c, x, y );
}
return true;
}
private void removePos(Creature c) {
int x = c.getPositionX();
int y = c.getPositionY();
if ( x < 0 || y < 0 ) return;
creaturesOnSurface[ x * h + y ].remove( c );
c.setPosition( -1, -1 );
fireChanged( x, y );
}
private void toPos( Creature c, int x, int y ) {
creaturesOnSurface[ x * h + y ].add( c );
c.setPosition( x, y );
fireChanged( x, y );
}
public int getHeight() {
return h;
}
public int getWidth() {
return w;
}
public int getCreatureSize(int x, int y) {
x = transformX( x );
y = transformY( y );
synchronized ( this ) {
return creaturesOnSurface[ x * h + y ].size();
}
}
public HashSet<Creature> getCreatures() {
synchronized ( this ) {
return (HashSet<Creature>)creatures.clone();
}
}
public byte take( int x, int y ) {
x = transformX( x );
y = transformY( y );
byte v = values[ x * h + y ];
values[ x * h + y ] = 0;
fireChanged( x, y );
return v;
}
public boolean drop( int x, int y, byte v ) {
if ( v < 0 || v >= MAX_VALUE ) return false;
x = transformX( x );
y = transformY( y );
if ( values[ x * h + y ] != 0 ) return false;
if ( values[ x * h + y ] == 0 ) return true;
values[ x * h + y ] = v;
fireChanged( x, y );
return true;
}
public void removeCreature( Creature c ) {
synchronized ( this ) {
if ( creatures.remove( c ) ) {
removePos( c );
removedChildCnt++;
}
}
}
public int getCreatureSize() {
return creatures.size();
}
public MEventHandler<Listener> eventHandler() {
return eventHandler;
}
private void fireChanged( int x, int y) {
Object[] list = eventHandler.getListenersArray();
for ( Object o : list )
((Listener)o).eventChanged( x, y );
}
private void fireChangedAll() {
Object[] list = eventHandler.getListenersArray();
for ( Object o : list )
((Listener)o).eventChangedAll();
}
private void fireRoundFinished(int round, long age, int childSize, int addedChildCnt, int removedChildCnt, long healty, int healtyMin, int healtyMax) {
Object[] list = eventHandler.getListenersArray();
for ( Object o : list )
((Listener)o).eventRoundFinished( round, age, childSize, addedChildCnt, removedChildCnt, healty, healtyMin, healtyMax );
}
public static interface Listener {
void eventChanged(int x, int y);
void eventRoundFinished(int round, long age, int childSize, int addedChildCnt,
int removedChildCnt, long healty, int healtyMin, int healtyMax);
void eventChangedAll();
}
public SerializedSurface serialize() {
SerializedSurface out = new SerializedSurface();
out.w = w;
out.h = h;
out.values = new byte[ values.length ];
System.arraycopy( values, 0, out.values, 0, values.length );
return out;
}
public void restore(SerializedSurface save) throws Exception {
if ( w != save.w || h != save.h ) throw new Exception( "Can't restore surface" );
System.arraycopy( save.values, 0, values, 0, values.length );
fireChangedAll();
}
public Creature[] getCreatures(int x, int y) {
x = transformX( x );
y = transformY( y );
synchronized ( this ) {
return creaturesOnSurface[ x * h + y ].toArray( new Creature[ creaturesOnSurface[ x * h + y ].size() ] );
}
}
public void loop( int round ) {
long age = 0;
finishRound = false;
for ( Source s : sources )
s.reset();
while ( !finishRound && getCreatureSize() > 0 ) {
if ( sleepTime >= 0 ) {
for ( Source s : sources )
s.action( this );
long healty = 0;
int healtyMin = Integer.MAX_VALUE;
int healtyMax = 0;
removedChildCnt = 0;
addedChildCnt = 0;
for ( Creature c : getCreatures() ) {
c.move( this );
int h = c.getHealty();
healtyMin = Math.min( healtyMin, h );
healtyMax = Math.max( healtyMax, h );
healty+=h;
}
age++;
//System.out.println( "Size: " + s.getCreatureSize() + " (" + healty + ")" );
fireRoundFinished( round, age, getCreatureSize(), addedChildCnt, removedChildCnt, healty, healtyMin, healtyMax );
//System.out.println( "Age: " + age );
MThread.sleep( sleepTime+1 );
} else {
MThread.sleep( 200 );
}
}
}
public void setSleepTime(int val) {
sleepTime = val;
}
public void finishRound() {
finishRound = true;
}
public void clearCreatures() {
synchronized ( this ) {
for ( int i = 0; i < creaturesOnSurface.length; i++ )
creaturesOnSurface[i].clear();
creatures.clear();
}
}
}
| |
package io.scribeapp.input.suggestions;
import android.content.Context;
import android.content.res.Resources;
import android.graphics.Canvas;
import android.graphics.Paint;
import android.graphics.Rect;
import android.graphics.drawable.Drawable;
import android.view.GestureDetector;
import android.view.MotionEvent;
import android.view.View;
import java.util.ArrayList;
import java.util.List;
import io.scribeapp.R;
import io.scribeapp.input.MainInputService;
public class SuggestionView extends View {
private static final int OUT_OF_BOUNDS = -1;
private MainInputService service;
private List<String> mSuggestions;
private int selectedIndex;
private int mTouchX = OUT_OF_BOUNDS;
private Drawable mSelectionHighlight;
private boolean mTypedWordValid;
private Rect mBgPadding;
private static final int MAX_SUGGESTIONS = 32;
private static final int SCROLL_PIXELS = 20;
private int[] mWordWidth = new int[MAX_SUGGESTIONS];
private int[] mWordX = new int[MAX_SUGGESTIONS];
private static final int X_GAP = 10;
private static final List<String> EMPTY_LIST = new ArrayList<String>();
private int mColorNormal;
private int mColorRecommended;
private int mColorOther;
private int mVerticalPadding;
private Paint mPaint;
private boolean mScrolled;
private int mTargetScrollX;
private int mTotalWidth;
private GestureDetector gestureDetector;
public SuggestionView(Context context) {
super(context);
mSelectionHighlight = context.getResources().getDrawable(
android.R.drawable.list_selector_background);
mSelectionHighlight.setState(new int[] {
android.R.attr.state_enabled,
android.R.attr.state_focused,
android.R.attr.state_window_focused,
android.R.attr.state_pressed
});
Resources r = context.getResources();
setBackgroundColor(r.getColor(R.color.candidate_background));
mColorNormal = r.getColor(R.color.candidate_normal);
mColorRecommended = r.getColor(R.color.candidate_recommended);
mColorOther = r.getColor(R.color.candidate_other);
mVerticalPadding = r.getDimensionPixelSize(R.dimen.candidate_vertical_padding);
mPaint = new Paint();
mPaint.setColor(mColorNormal);
mPaint.setAntiAlias(true);
mPaint.setTextSize(r.getDimensionPixelSize(R.dimen.candidate_font_height));
mPaint.setStrokeWidth(0);
gestureDetector = new GestureDetector(new GestureDetector.SimpleOnGestureListener() {
@Override
public boolean onScroll(MotionEvent e1, MotionEvent e2,
float distanceX, float distanceY) {
mScrolled = true;
int sx = getScrollX();
sx += distanceX;
if (sx < 0) {
sx = 0;
}
if (sx + getWidth() > mTotalWidth) {
sx -= distanceX;
}
mTargetScrollX = sx;
scrollTo(sx, getScrollY());
invalidate();
return true;
}
});
setHorizontalFadingEdgeEnabled(true);
setWillNotDraw(false);
setHorizontalScrollBarEnabled(false);
setVerticalScrollBarEnabled(false);
}
public void setService(MainInputService listener) {
service = listener;
}
@Override
public int computeHorizontalScrollRange() {
return mTotalWidth;
}
@Override
protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
int measuredWidth = resolveSize(50, widthMeasureSpec);
Rect padding = new Rect();
mSelectionHighlight.getPadding(padding);
final int desiredHeight = ((int)mPaint.getTextSize()) + mVerticalPadding
+ padding.top + padding.bottom;
setMeasuredDimension(measuredWidth,
resolveSize(desiredHeight, heightMeasureSpec));
}
@Override
protected void onDraw(Canvas canvas) {
if (canvas != null) {
super.onDraw(canvas);
}
mTotalWidth = 0;
if (mSuggestions == null) return;
if (mBgPadding == null) {
mBgPadding = new Rect(0, 0, 0, 0);
if (getBackground() != null) {
getBackground().getPadding(mBgPadding);
}
}
int x = 0;
final int count = mSuggestions.size();
final int height = getHeight();
final Rect bgPadding = mBgPadding;
final Paint paint = mPaint;
final int touchX = mTouchX;
final int scrollX = getScrollX();
final boolean scrolled = mScrolled;
final boolean typedWordValid = mTypedWordValid;
final int y = (int) (((height - mPaint.getTextSize()) / 2) - mPaint.ascent());
for (int i = 0; i < count; i++) {
String suggestion = mSuggestions.get(i);
float textWidth = paint.measureText(suggestion);
final int wordWidth = (int) textWidth + X_GAP * 2;
mWordX[i] = x;
mWordWidth[i] = wordWidth;
paint.setColor(mColorNormal);
if (touchX + scrollX >= x && touchX + scrollX < x + wordWidth && !scrolled) {
if (canvas != null) {
canvas.translate(x, 0);
mSelectionHighlight.setBounds(0, bgPadding.top, wordWidth, height);
mSelectionHighlight.draw(canvas);
canvas.translate(-x, 0);
}
selectedIndex = i;
}
if (canvas != null) {
if ((i == 1 && !typedWordValid) || (i == 0 && typedWordValid)) {
paint.setFakeBoldText(true);
paint.setColor(mColorRecommended);
} else if (i != 0) {
paint.setColor(mColorOther);
}
canvas.drawText(suggestion, x + X_GAP, y, paint);
paint.setColor(mColorOther);
canvas.drawLine(x + wordWidth + 0.5f, bgPadding.top,
x + wordWidth + 0.5f, height + 1, paint);
paint.setFakeBoldText(false);
}
x += wordWidth;
}
mTotalWidth = x;
if (mTargetScrollX != getScrollX()) {
scrollToTarget();
}
}
private void scrollToTarget() {
int sx = getScrollX();
if (mTargetScrollX > sx) {
sx += SCROLL_PIXELS;
if (sx >= mTargetScrollX) {
sx = mTargetScrollX;
requestLayout();
}
} else {
sx -= SCROLL_PIXELS;
if (sx <= mTargetScrollX) {
sx = mTargetScrollX;
requestLayout();
}
}
scrollTo(sx, getScrollY());
invalidate();
}
public void setSuggestions(List<String> suggestions, boolean typedWordValid) {
clear();
if (suggestions != null) {
mSuggestions = new ArrayList<String>(suggestions);
}
mTypedWordValid = typedWordValid;
scrollTo(0, 0);
mTargetScrollX = 0;
onDraw(null);
invalidate();
requestLayout();
}
public void clear() {
mSuggestions = EMPTY_LIST;
mTouchX = OUT_OF_BOUNDS;
selectedIndex = -1;
invalidate();
}
@Override
public boolean onTouchEvent(MotionEvent me) {
if (gestureDetector.onTouchEvent(me)) {
return true;
}
int action = me.getAction();
int x = (int) me.getX();
int y = (int) me.getY();
mTouchX = x;
switch (action) {
case MotionEvent.ACTION_DOWN:
mScrolled = false;
invalidate();
break;
case MotionEvent.ACTION_MOVE:
if (y <= 0) {
if (selectedIndex >= 0) {
service.pickSuggestion(mSuggestions.get(selectedIndex));
selectedIndex = -1;
}
}
invalidate();
break;
case MotionEvent.ACTION_UP:
if (!mScrolled) {
if (selectedIndex >= 0) {
service.pickSuggestion(mSuggestions.get(selectedIndex));
}
}
selectedIndex = -1;
removeHighlight();
requestLayout();
break;
}
return true;
}
public void takeSuggestionAt(float x) {
mTouchX = (int) x;
onDraw(null);
if (selectedIndex >= 0) {
service.pickSuggestion(mSuggestions.get(selectedIndex));
}
invalidate();
}
private void removeHighlight() {
mTouchX = OUT_OF_BOUNDS;
invalidate();
}
}
| |
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package org.apache.cloudstack.engine.cloud.entity.api;
import java.util.List;
import java.util.Map;
import java.util.UUID;
import javax.inject.Inject;
import org.apache.cloudstack.affinity.dao.AffinityGroupVMMapDao;
import org.apache.cloudstack.engine.cloud.entity.api.db.VMEntityVO;
import org.apache.cloudstack.engine.cloud.entity.api.db.VMReservationVO;
import org.apache.cloudstack.engine.cloud.entity.api.db.dao.VMEntityDao;
import org.apache.cloudstack.engine.cloud.entity.api.db.dao.VMReservationDao;
import org.apache.cloudstack.engine.subsystem.api.storage.DataStoreManager;
import org.apache.cloudstack.storage.datastore.db.PrimaryDataStoreDao;
import org.apache.log4j.Logger;
import org.springframework.stereotype.Component;
import com.cloud.dc.DataCenter;
import com.cloud.deploy.DataCenterDeployment;
import com.cloud.deploy.DeployDestination;
import com.cloud.deploy.DeploymentPlan;
import com.cloud.deploy.DeploymentPlanner;
import com.cloud.deploy.DeploymentPlanningManager;
import com.cloud.deploy.DeploymentPlanner.ExcludeList;
import com.cloud.exception.AffinityConflictException;
import com.cloud.exception.AgentUnavailableException;
import com.cloud.exception.ConcurrentOperationException;
import com.cloud.exception.InsufficientCapacityException;
import com.cloud.exception.InsufficientServerCapacityException;
import com.cloud.exception.OperationTimedoutException;
import com.cloud.exception.ResourceUnavailableException;
import com.cloud.network.dao.NetworkDao;
import com.cloud.org.Cluster;
import com.cloud.service.dao.ServiceOfferingDao;
import com.cloud.storage.StoragePool;
import com.cloud.storage.VolumeVO;
import com.cloud.storage.dao.DiskOfferingDao;
import com.cloud.storage.dao.VMTemplateDao;
import com.cloud.storage.dao.VolumeDao;
import com.cloud.user.dao.AccountDao;
import com.cloud.user.dao.UserDao;
import com.cloud.utils.exception.CloudRuntimeException;
import com.cloud.vm.VMInstanceVO;
import com.cloud.vm.VirtualMachine;
import com.cloud.vm.VirtualMachineManager;
import com.cloud.vm.VirtualMachineProfile;
import com.cloud.vm.VirtualMachineProfileImpl;
import com.cloud.vm.dao.VMInstanceDao;
@Component
public class VMEntityManagerImpl implements VMEntityManager {
private static final Logger s_logger = Logger.getLogger(VMEntityManagerImpl.class);
@Inject
protected VMInstanceDao _vmDao;
@Inject
protected VMTemplateDao _templateDao = null;
@Inject
protected ServiceOfferingDao _serviceOfferingDao;
@Inject
protected DiskOfferingDao _diskOfferingDao = null;
@Inject
protected NetworkDao _networkDao;
@Inject
protected AccountDao _accountDao = null;
@Inject
protected UserDao _userDao = null;
@Inject
protected VMEntityDao _vmEntityDao;
@Inject
protected VMReservationDao _reservationDao;
@Inject
protected VirtualMachineManager _itMgr;
@Inject
protected List<DeploymentPlanner> _planners;
@Inject
protected VolumeDao _volsDao;
@Inject
protected PrimaryDataStoreDao _storagePoolDao;
@Inject
DataStoreManager dataStoreMgr;
@Inject
DeploymentPlanningManager _dpMgr;
@Inject
protected AffinityGroupVMMapDao _affinityGroupVMMapDao;
@Override
public VMEntityVO loadVirtualMachine(String vmId) {
// TODO Auto-generated method stub
return _vmEntityDao.findByUuid(vmId);
}
@Override
public void saveVirtualMachine(VMEntityVO entity) {
_vmEntityDao.persist(entity);
}
protected boolean areAffinityGroupsAssociated(VirtualMachineProfile<? extends VirtualMachine> vmProfile) {
VirtualMachine vm = vmProfile.getVirtualMachine();
long vmGroupCount = _affinityGroupVMMapDao.countAffinityGroupsForVm(vm.getId());
if (vmGroupCount > 0) {
return true;
}
return false;
}
@Override
public String reserveVirtualMachine(VMEntityVO vmEntityVO, String plannerToUse, DeploymentPlan planToDeploy, ExcludeList exclude)
throws InsufficientCapacityException, ResourceUnavailableException {
//call planner and get the deployDestination.
//load vm instance and offerings and call virtualMachineManagerImpl
//FIXME: profile should work on VirtualMachineEntity
VMInstanceVO vm = _vmDao.findByUuid(vmEntityVO.getUuid());
VirtualMachineProfileImpl<VMInstanceVO> vmProfile = new VirtualMachineProfileImpl<VMInstanceVO>(vm);
DataCenterDeployment plan = new DataCenterDeployment(vm.getDataCenterId(), vm.getPodIdToDeployIn(), null, null, null, null);
if(planToDeploy != null && planToDeploy.getDataCenterId() != 0){
plan = new DataCenterDeployment(planToDeploy.getDataCenterId(), planToDeploy.getPodId(), planToDeploy.getClusterId(), planToDeploy.getHostId(), planToDeploy.getPoolId(), planToDeploy.getPhysicalNetworkId());
}
boolean planChangedByReadyVolume = false;
List<VolumeVO> vols = _volsDao.findReadyRootVolumesByInstance(vm.getId());
if(!vols.isEmpty()){
VolumeVO vol = vols.get(0);
StoragePool pool = (StoragePool)this.dataStoreMgr.getPrimaryDataStore(vol.getPoolId());
if (!pool.isInMaintenance()) {
long rootVolDcId = pool.getDataCenterId();
Long rootVolPodId = pool.getPodId();
Long rootVolClusterId = pool.getClusterId();
if (planToDeploy != null && planToDeploy.getDataCenterId() != 0) {
Long clusterIdSpecified = planToDeploy.getClusterId();
if (clusterIdSpecified != null && rootVolClusterId != null) {
if (rootVolClusterId.longValue() != clusterIdSpecified.longValue()) {
// cannot satisfy the plan passed in to the
// planner
throw new ResourceUnavailableException("Root volume is ready in different cluster, Deployment plan provided cannot be satisfied, unable to create a deployment for "
+ vm, Cluster.class, clusterIdSpecified);
}
}
plan = new DataCenterDeployment(planToDeploy.getDataCenterId(), planToDeploy.getPodId(), planToDeploy.getClusterId(), planToDeploy.getHostId(), vol.getPoolId(), null, null);
}else{
plan = new DataCenterDeployment(rootVolDcId, rootVolPodId, rootVolClusterId, null, vol.getPoolId(), null, null);
planChangedByReadyVolume = true;
}
}
}
while (true) {
DeployDestination dest = null;
try {
dest = _dpMgr.planDeployment(vmProfile, plan, exclude, null);
} catch (AffinityConflictException e) {
throw new CloudRuntimeException(
"Unable to create deployment, affinity rules associted to the VM conflict");
}
if (dest != null) {
String reservationId = _dpMgr.finalizeReservation(dest, vmProfile, plan, exclude);
if(reservationId != null){
return reservationId;
} else {
if (s_logger.isDebugEnabled()) {
s_logger.debug("Cannot finalize the VM reservation for this destination found, retrying");
}
exclude.addHost(dest.getHost().getId());
continue;
}
} else if (planChangedByReadyVolume) {
// we could not reserve in the Volume's cluster - let the deploy
// call retry it.
return UUID.randomUUID().toString();
} else {
throw new InsufficientServerCapacityException("Unable to create a deployment for " + vmProfile,
DataCenter.class, plan.getDataCenterId(), areAffinityGroupsAssociated(vmProfile));
}
}
}
@Override
public void deployVirtualMachine(String reservationId, VMEntityVO vmEntityVO, String caller, Map<VirtualMachineProfile.Param, Object> params) throws InsufficientCapacityException, ResourceUnavailableException{
//grab the VM Id and destination using the reservationId.
VMInstanceVO vm = _vmDao.findByUuid(vmEntityVO.getUuid());
VMReservationVO vmReservation = _reservationDao.findByReservationId(reservationId);
if(vmReservation != null){
// Pass it down
DataCenterDeployment reservedPlan = new DataCenterDeployment(vm.getDataCenterId(),
vmReservation.getPodId(), vmReservation.getClusterId(), vmReservation.getHostId(), null, null);
try {
VMInstanceVO vmDeployed = _itMgr.start(vm, params, _userDao.findById(new Long(caller)),
_accountDao.findById(vm.getAccountId()), reservedPlan);
} catch (Exception ex) {
// Retry the deployment without using the reservation plan
DataCenterDeployment plan = new DataCenterDeployment(0, null, null, null, null, null);
if (reservedPlan.getAvoids() != null) {
plan.setAvoids(reservedPlan.getAvoids());
}
_itMgr.start(vm, params, _userDao.findById(new Long(caller)), _accountDao.findById(vm.getAccountId()),
plan);
}
} else {
// no reservation found. Let VirtualMachineManager retry
_itMgr.start(vm, params, _userDao.findById(new Long(caller)), _accountDao.findById(vm.getAccountId()), null);
}
}
@Override
public boolean stopvirtualmachine(VMEntityVO vmEntityVO, String caller) throws ResourceUnavailableException {
VMInstanceVO vm = _vmDao.findByUuid(vmEntityVO.getUuid());
return _itMgr.stop(vm, _userDao.findById(new Long(caller)), _accountDao.findById(vm.getAccountId()));
}
@Override
public boolean destroyVirtualMachine(VMEntityVO vmEntityVO, String caller) throws AgentUnavailableException, OperationTimedoutException, ConcurrentOperationException{
VMInstanceVO vm = _vmDao.findByUuid(vmEntityVO.getUuid());
return _itMgr.destroy(vm, _userDao.findById(new Long(caller)), _accountDao.findById(vm.getAccountId()));
}
}
| |
package org.jchlabs.gharonda.domain.pom.base;
import org.hibernate.Hibernate;
import org.hibernate.Session;
import org.hibernate.criterion.Order;
import org.jchlabs.gharonda.domain.pom.dao.iface.NeightbourhoodDAO;
/**
* This is an automatically generated DAO class which should not be edited.
*/
public abstract class BaseNeightbourhoodDAO extends org.jchlabs.gharonda.domain.pom.dao._RootDAO {
public BaseNeightbourhoodDAO() {
}
public BaseNeightbourhoodDAO(Session session) {
super(session);
}
// query name references
public static NeightbourhoodDAO instance;
/**
* Return a singleton of the DAO
*/
public static NeightbourhoodDAO getInstance() {
if (null == instance)
instance = new org.jchlabs.gharonda.domain.pom.dao.NeightbourhoodDAO();
return instance;
}
public Class getReferenceClass() {
return org.jchlabs.gharonda.domain.model.Neightbourhood.class;
}
public Order getDefaultOrder() {
return null;
}
/**
* Cast the object as a com.sahibidenev.domain.model.Neightbourhood
*/
public org.jchlabs.gharonda.domain.model.Neightbourhood cast(Object object) {
return (org.jchlabs.gharonda.domain.model.Neightbourhood) object;
}
public org.jchlabs.gharonda.domain.model.Neightbourhood get(java.lang.Integer key) {
return (org.jchlabs.gharonda.domain.model.Neightbourhood) get(getReferenceClass(), key);
}
public org.jchlabs.gharonda.domain.model.Neightbourhood get(java.lang.Integer key, Session s) {
return (org.jchlabs.gharonda.domain.model.Neightbourhood) get(getReferenceClass(), key, s);
}
public org.jchlabs.gharonda.domain.model.Neightbourhood load(java.lang.Integer key) {
return (org.jchlabs.gharonda.domain.model.Neightbourhood) load(getReferenceClass(), key);
}
public org.jchlabs.gharonda.domain.model.Neightbourhood load(java.lang.Integer key, Session s) {
return (org.jchlabs.gharonda.domain.model.Neightbourhood) load(getReferenceClass(), key, s);
}
public org.jchlabs.gharonda.domain.model.Neightbourhood loadInitialize(java.lang.Integer key, Session s) {
org.jchlabs.gharonda.domain.model.Neightbourhood obj = load(key, s);
if (!Hibernate.isInitialized(obj)) {
Hibernate.initialize(obj);
}
return obj;
}
/* Generic methods */
/**
* Return all objects related to the implementation of this DAO with no filter.
*/
public java.util.List<org.jchlabs.gharonda.domain.model.Neightbourhood> findAll() {
return super.findAll();
}
/**
* Return all objects related to the implementation of this DAO with no filter.
*/
public java.util.List<org.jchlabs.gharonda.domain.model.Neightbourhood> findAll(Order defaultOrder) {
return super.findAll(defaultOrder);
}
/**
* Return all objects related to the implementation of this DAO with no filter. Use the session given.
* @param s the Session
*/
public java.util.List<org.jchlabs.gharonda.domain.model.Neightbourhood> findAll(Session s, Order defaultOrder) {
return super.findAll(s, defaultOrder);
}
/**
* Persist the given transient instance, first assigning a generated identifier. (Or using the current value of the
* identifier property if the assigned generator is used.)
* @param neightbourhood a transient instance of a persistent class
* @return the class identifier
*/
public java.lang.Integer save(org.jchlabs.gharonda.domain.model.Neightbourhood neightbourhood) {
return (java.lang.Integer) super.save(neightbourhood);
}
/**
* Persist the given transient instance, first assigning a generated identifier. (Or using the current value of the
* identifier property if the assigned generator is used.) Use the Session given.
* @param neightbourhood a transient instance of a persistent class
* @param s the Session
* @return the class identifier
*/
public java.lang.Integer save(org.jchlabs.gharonda.domain.model.Neightbourhood neightbourhood, Session s) {
return (java.lang.Integer) save((Object) neightbourhood, s);
}
/**
* Either save() or update() the given instance, depending upon the value of its identifier property. By default the
* instance is always saved. This behaviour may be adjusted by specifying an unsaved-value attribute of the
* identifier property mapping.
* @param neightbourhood a transient instance containing new or updated state
*/
public void saveOrUpdate(org.jchlabs.gharonda.domain.model.Neightbourhood neightbourhood) {
saveOrUpdate((Object) neightbourhood);
}
/**
* Either save() or update() the given instance, depending upon the value of its identifier property. By default the
* instance is always saved. This behaviour may be adjusted by specifying an unsaved-value attribute of the
* identifier property mapping. Use the Session given.
* @param neightbourhood a transient instance containing new or updated state.
* @param s the Session.
*/
public void saveOrUpdate(org.jchlabs.gharonda.domain.model.Neightbourhood neightbourhood, Session s) {
saveOrUpdate((Object) neightbourhood, s);
}
/**
* Update the persistent state associated with the given identifier. An exception is thrown if there is a persistent
* instance with the same identifier in the current session.
* @param neightbourhood a transient instance containing updated state
*/
public void update(org.jchlabs.gharonda.domain.model.Neightbourhood neightbourhood) {
update((Object) neightbourhood);
}
/**
* Update the persistent state associated with the given identifier. An exception is thrown if there is a persistent
* instance with the same identifier in the current session. Use the Session given.
* @param neightbourhood a transient instance containing updated state
* @param the Session
*/
public void update(org.jchlabs.gharonda.domain.model.Neightbourhood neightbourhood, Session s) {
update((Object) neightbourhood, s);
}
/**
* Remove a persistent instance from the datastore. The argument may be an instance associated with the receiving
* Session or a transient instance with an identifier associated with existing persistent state.
* @param id the instance ID to be removed
*/
public void delete(java.lang.Integer id) {
delete((Object) load(id));
}
/**
* Remove a persistent instance from the datastore. The argument may be an instance associated with the receiving
* Session or a transient instance with an identifier associated with existing persistent state. Use the Session
* given.
* @param id the instance ID to be removed
* @param s the Session
*/
public void delete(java.lang.Integer id, Session s) {
delete((Object) load(id, s), s);
}
/**
* Remove a persistent instance from the datastore. The argument may be an instance associated with the receiving
* Session or a transient instance with an identifier associated with existing persistent state.
* @param neightbourhood the instance to be removed
*/
public void delete(org.jchlabs.gharonda.domain.model.Neightbourhood neightbourhood) {
delete((Object) neightbourhood);
}
/**
* Remove a persistent instance from the datastore. The argument may be an instance associated with the receiving
* Session or a transient instance with an identifier associated with existing persistent state. Use the Session
* given.
* @param neightbourhood the instance to be removed
* @param s the Session
*/
public void delete(org.jchlabs.gharonda.domain.model.Neightbourhood neightbourhood, Session s) {
delete((Object) neightbourhood, s);
}
/**
* Re-read the state of the given instance from the underlying database. It is inadvisable to use this to implement
* long-running sessions that span many business tasks. This method is, however, useful in certain special
* circumstances. For example
* <ul>
* <li>where a database trigger alters the object state upon insert or update</li>
* <li>after executing direct SQL (eg. a mass update) in the same session</li>
* <li>after inserting a Blob or Clob</li>
* </ul>
*/
public void refresh(org.jchlabs.gharonda.domain.model.Neightbourhood neightbourhood, Session s) {
refresh((Object) neightbourhood, s);
}
}
| |
/*
* Copyright 2011-2021 B2i Healthcare Pte Ltd, http://b2i.sg
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.b2international.snowowl.snomed.core.tree;
import static com.google.common.collect.Maps.newHashMap;
import static com.google.common.collect.Sets.newHashSet;
import java.util.*;
import java.util.stream.Collectors;
import com.b2international.collections.PrimitiveSets;
import com.b2international.collections.longs.LongCollection;
import com.b2international.commons.AlphaNumericComparator;
import com.b2international.commons.CompareUtils;
import com.b2international.commons.collect.LongSets;
import com.b2international.commons.http.ExtendedLocale;
import com.b2international.snowowl.core.ApplicationContext;
import com.b2international.snowowl.core.ResourceURI;
import com.b2international.snowowl.core.domain.IComponent;
import com.b2international.snowowl.eventbus.IEventBus;
import com.b2international.snowowl.snomed.common.SnomedConstants.Concepts;
import com.b2international.snowowl.snomed.core.domain.SnomedConcept;
import com.b2international.snowowl.snomed.datastore.request.SnomedRequests;
import com.google.common.collect.*;
/**
* @since 4.6
*/
abstract class TreeBuilderImpl implements TreeBuilder {
private final List<ExtendedLocale> locales;
private Collection<SnomedConcept> topLevelConcepts;
TreeBuilderImpl(List<ExtendedLocale> locales) {
this.locales = locales;
}
abstract String getForm();
@Override
public final TreeBuilder withTopLevelConcepts(final Collection<SnomedConcept> topLevelConcepts) {
this.topLevelConcepts = topLevelConcepts;
return this;
}
@Override
public TerminologyTree build(final ResourceURI resource, final Iterable<SnomedConcept> nodes, final String snomedDescriptionExpand) {
final Collection<SnomedConcept> topLevelConcepts = this.topLevelConcepts == null ?
getDefaultTopLevelConcepts(resource, snomedDescriptionExpand) : this.topLevelConcepts;
final Map<String, SnomedConcept> treeItemsById = newHashMap();
// all matching concepts should be in the componentMap
treeItemsById.putAll(FluentIterable.from(nodes).uniqueIndex(IComponent::getId));
final Collection<String> requiredTopLevelConceptIds = topLevelConcepts.stream().map(IComponent::getId).collect(Collectors.toSet());
// compute subType and superType maps for the tree
final SetMultimap<String, String> superTypeMap = HashMultimap.create();
final SetMultimap<String, String> subTypeMap = HashMultimap.create();
for (SnomedConcept entry : nodes) {
final LongCollection parentIds = getParents(entry);
final LongCollection ancestorIds = getAncestors(entry);
if (parentIds != null) {
final Collection<String> parents = LongSets.toStringSet(parentIds);
final Collection<String> selectedParents = newHashSet();
// if the parent is not a match or TOP level
for (String parent : parents) {
if (treeItemsById.containsKey(parent) || requiredTopLevelConceptIds.contains(parent)) {
selectedParents.add(parent);
}
}
if (selectedParents.isEmpty()) {
findParentInAncestors(entry, treeItemsById, requiredTopLevelConceptIds, subTypeMap, superTypeMap);
} else {
for (String parent : selectedParents) {
subTypeMap.put(parent, entry.getId());
superTypeMap.put(entry.getId(), parent);
}
}
} else if (ancestorIds != null) {
findParentInAncestors(entry, treeItemsById, requiredTopLevelConceptIds, subTypeMap, superTypeMap);
} else {
// no parents or ancestors, root element
subTypeMap.put(null, entry.getId());
}
}
// add TOP levels
for (SnomedConcept entry : topLevelConcepts) {
if (!Concepts.ROOT_CONCEPT.equals(entry.getId()) && !treeItemsById.containsKey(entry.getId())) {
if (subTypeMap.containsKey(entry.getId())) {
treeItemsById.put(entry.getId(), entry);
}
}
}
for (SnomedConcept entry : topLevelConcepts) {
if (Concepts.ROOT_CONCEPT.equals(entry.getId())) {
// find all top level child and connect them with the root
for (SnomedConcept tl : topLevelConcepts) {
if (!Concepts.ROOT_CONCEPT.equals(tl.getId()) && treeItemsById.containsKey(tl.getId())) {
subTypeMap.put(entry.getId(), tl.getId());
superTypeMap.put(tl.getId(), entry.getId());
}
}
// only add root concept if the tree contains top level concepts
if (subTypeMap.containsKey(Concepts.ROOT_CONCEPT)) {
treeItemsById.put(entry.getId(), entry);
subTypeMap.put(null, entry.getId());
}
break;
}
}
// fetch all missing components to build the remaining part of the FULL tree
final Set<String> allRequiredComponents = newHashSet();
allRequiredComponents.addAll(superTypeMap.keySet());
allRequiredComponents.addAll(subTypeMap.keySet());
allRequiredComponents.removeAll(treeItemsById.keySet());
allRequiredComponents.remove(null);
// fetch required data for all unknown items
for (SnomedConcept entry : getComponents(resource, allRequiredComponents, snomedDescriptionExpand)) {
treeItemsById.put(entry.getId(), entry);
}
return new TerminologyTree(treeItemsById, subTypeMap, superTypeMap);
}
private Iterable<SnomedConcept> getComponents(ResourceURI resource, Set<String> componentIds, final String snomedDescriptionExpand) {
if (CompareUtils.isEmpty(componentIds)) {
return Collections.emptySet();
}
return SnomedRequests.prepareSearchConcept()
.all()
.filterByIds(ImmutableSet.copyOf(componentIds))
.setLocales(locales)
.setExpand(snomedDescriptionExpand + ",parentIds(),ancestorIds()")
.build(resource)
.execute(getBus())
.getSync();
}
private Collection<SnomedConcept> getDefaultTopLevelConcepts(final ResourceURI resource, final String snomedDescriptionExpand) {
final SnomedConcept root = SnomedRequests.prepareGetConcept(Concepts.ROOT_CONCEPT)
.setExpand(String.format("%2$s,%s(direct:true,expand(%2$s))", Trees.STATED_FORM.equals(getForm()) ? "statedDescendants" : "descendants", snomedDescriptionExpand))
.setLocales(locales)
.build(resource)
.execute(getBus())
.getSync();
final Collection<SnomedConcept> requiredTreeItemConcepts = newHashSet();
requiredTreeItemConcepts.add(root);
requiredTreeItemConcepts.addAll(root.getDescendants().getItems());
return requiredTreeItemConcepts;
}
private IEventBus getBus() {
return ApplicationContext.getInstance().getService(IEventBus.class);
}
private LongCollection getParents(SnomedConcept entry) {
switch (getForm()) {
case Trees.INFERRED_FORM: return PrimitiveSets.newLongOpenHashSet(entry.getParentIds());
case Trees.STATED_FORM: return PrimitiveSets.newLongOpenHashSet(entry.getStatedParentIds());
default: return null;
}
}
private LongCollection getAncestors(SnomedConcept entry) {
switch (getForm()) {
case Trees.INFERRED_FORM: return PrimitiveSets.newLongOpenHashSet(entry.getAncestorIds());
case Trees.STATED_FORM: return PrimitiveSets.newLongOpenHashSet(entry.getStatedAncestorIds());
default: return null;
}
}
private void findParentInAncestors(final SnomedConcept entry, final Map<String, SnomedConcept> treeItemsById,
final Collection<String> requiredTopLevelConceptIds, final SetMultimap<String, String> subTypeMap, final SetMultimap<String, String> superTypeMap) {
// try to find a single matching ancestor and hook into that, otherwise we will require additional parentage info about the ancestors
final Collection<String> ancestors = LongSets.toStringSet(getAncestors(entry));
final Collection<String> selectedAncestors = newHashSet();
for (String ancestor : ancestors) {
if (!requiredTopLevelConceptIds.contains(ancestor) && treeItemsById.containsKey(ancestor)) {
selectedAncestors.add(ancestor);
}
}
if (selectedAncestors.isEmpty()) {
// no matching ancestor, try to find the TOP level and hook into that
for (String ancestor : ancestors) {
if (requiredTopLevelConceptIds.contains(ancestor) && !Concepts.ROOT_CONCEPT.equals(ancestor)) {
selectedAncestors.add(ancestor);
}
}
// still no matching ancestor hook into the ROOT if it's in the ancestor list
if (selectedAncestors.isEmpty() && ancestors.contains(Concepts.ROOT_CONCEPT)) {
selectedAncestors.add(Concepts.ROOT_CONCEPT);
}
}
if (selectedAncestors.isEmpty()) {
subTypeMap.put(null, entry.getId());
} else if (selectedAncestors.size() == 1) {
final String singleAncestor = selectedAncestors.iterator().next();
subTypeMap.put(singleAncestor, entry.getId());
superTypeMap.put(entry.getId(), singleAncestor);
} else {
final String firstAncestor = Ordering.from(new AlphaNumericComparator()).min(selectedAncestors);
subTypeMap.put(firstAncestor, entry.getId());
superTypeMap.put(entry.getId(), firstAncestor);
}
}
}
| |
package at.jku.sea.cloud.listeners;
import java.io.Serializable;
import java.rmi.RemoteException;
import java.util.Collection;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import at.jku.sea.cloud.implementation.events.ArtifactCommitedEvent;
import at.jku.sea.cloud.implementation.events.ArtifactEvent;
import at.jku.sea.cloud.implementation.events.CollectionAddedElementEvent;
import at.jku.sea.cloud.implementation.events.CollectionAddedElementsEvent;
import at.jku.sea.cloud.implementation.events.CollectionRemovedElementEvent;
import at.jku.sea.cloud.implementation.events.MapClearedEvent;
import at.jku.sea.cloud.implementation.events.MapPutEvent;
import at.jku.sea.cloud.implementation.events.MapRemovedElementEvent;
import at.jku.sea.cloud.implementation.events.PrivateVersionAddedEvent;
import at.jku.sea.cloud.implementation.events.PrivateVersionDeletedEvent;
import at.jku.sea.cloud.implementation.events.PrivateVersionParentSetEvent;
import at.jku.sea.cloud.implementation.events.PrivateVersionRebasedEvent;
import at.jku.sea.cloud.implementation.events.PropertyAliveSetEvent;
import at.jku.sea.cloud.implementation.events.PropertyCommitedEvent;
import at.jku.sea.cloud.implementation.events.PropertyDeletedEvent;
import at.jku.sea.cloud.implementation.events.PropertyMapSetEvent;
import at.jku.sea.cloud.implementation.events.PropertyReferenceSetEvent;
import at.jku.sea.cloud.implementation.events.PropertyValueSetEvent;
import at.jku.sea.cloud.implementation.events.VersionCommitedEvent;
import at.jku.sea.cloud.implementation.events.VersionDeletedEvent;
public class DataStorageAdapter implements DataStorageListener, Serializable {
private static final long serialVersionUID = 1L;
private static Logger logger = LoggerFactory.getLogger(DataStorageAdapter.class);
private final boolean log;
public DataStorageAdapter() {
this(false);
}
public DataStorageAdapter(final boolean log) {
super();
this.log = log;
}
@Override
public void artifactEvent(Collection<ArtifactEvent> events) throws RemoteException {
if (this.log) {
for (ArtifactEvent event : events) {
logger.debug("DataStorageAdapter: artifactEvent(eventType={}, version={}, owner={}, tool={}, id={}, type={}, packageId={}, alive={}, isDeceased={})", new Object[] { event.eventType,
event.privateVersion, event.owner, event.tool, event.id, event.type, event.packageId, event.alive, event.isDeceased });
}
}
}
@Override
public void collectionAddedElement(Collection<CollectionAddedElementEvent> events) throws RemoteException {
if (this.log) {
for (CollectionAddedElementEvent event : events) {
logger.debug("DataStorageAdapter: collectionAddedElement(version={}, owner={}, tool={}, collectionId={}, elem={})", new Object[] { event.privateVersion, event.owner, event.tool,
event.collectionId, event.elem });
}
}
}
@Override
public void collectionAddedElements(Collection<CollectionAddedElementsEvent> events) throws RemoteException {
if (this.log) {
for (CollectionAddedElementsEvent event : events) {
logger.debug("DataStorageAdapter: collectionAddedElement(version={}, owner={}, tool={}, collectionId={}, elem={})", new Object[] { event.privateVersion, event.owner, event.tool,
event.collectionId, event.elem });
}
}
}
@Override
public void collectionRemovedElement(Collection<CollectionRemovedElementEvent> events) throws RemoteException {
if (this.log) {
for (CollectionRemovedElementEvent event : events) {
logger.debug("DataStorageAdapter: collectionRemovedElement(version={}, owner={}, tool={}, collectionId={}, elem={})", new Object[] { event.privateVersion, event.owner, event.tool,
event.collectionId, event.elem });
}
}
}
@Override
public void mapCleared(Collection<MapClearedEvent> events) throws RemoteException {
if (this.log) {
for (MapClearedEvent event : events) {
logger.debug("DataStorageAdapter: mapCleared(version={}, owner={}, tool={}, mapId={})", new Object[] { event.privateVersion, event.owner, event.tool, event.mapId });
}
}
}
@Override
public void mapPut(Collection<MapPutEvent> events) throws RemoteException {
if (this.log) {
for (MapPutEvent event : events) {
logger.debug("DataStorageAdapter: mapPut(version={}, owner={}, tool={}, mapId={}, key={}, keyReference={}, oldValue={}, oldValueReference={}, newValue={}, newValueReference={}, isAdded={})",
new Object[] { event.privateVersion, event.owner, event.tool, event.mapId, event.key, event.isKeyReference, event.oldValue, event.isOldValueReference, event.newValue,
event.isNewValueReference, event.isAdded });
}
}
}
@Override
public void mapRemovedElement(Collection<MapRemovedElementEvent> events) throws RemoteException {
if (this.log) {
for (MapRemovedElementEvent event : events) {
logger.debug("DataStorageAdapter: mapRemovedElement(version={}, owner={}, tool={}, mapId={}, key={}, keyReference={}, value={}, valueReference={})", new Object[] { event.privateVersion,
event.owner, event.tool, event.mapId, event.key, event.isKeyReference, event.value, event.isValueReference });
}
}
}
@Override
public void artifactCommited(ArtifactCommitedEvent event) throws RemoteException {
if (this.log) {
logger.debug("DataStorageAdapter: artifactCommited(privateVersion={}, id={}, type={}, message={}, version={})", new Object[] { event.privateVersion, event.id, event.type, event.message,
event.version });
}
}
@Override
public void propertyAliveSet(Collection<PropertyAliveSetEvent> events) {
if (this.log) {
for (PropertyAliveSetEvent event : events) {
logger.debug("DataStorageAdapter: propertyAliveSet(version={}, owner={}, tool={}, artifactId={}, property={}, alive={})", new Object[] { event.version, event.owner, event.tool,
event.artifactId, event.propertyName, event.alive });
}
}
}
@Override
public void propertyReferenceSet(Collection<PropertyReferenceSetEvent> events) {
if (this.log) {
for (PropertyReferenceSetEvent event : events) {
logger.debug("DataStorageAdapter: propertyReferenceSet(version={}, owner={}, tool={}, id={}, property={}, oldReferenceId={}, newReferenceId={})", new Object[] { event.version, event.owner,
event.tool, event.artifactId, event.propertyName, event.oldReferenceId, event.newReferenceId });
}
}
}
@Override
public void propertyValueSet(Collection<PropertyValueSetEvent> events) {
if (this.log) {
for (PropertyValueSetEvent event : events) {
logger.debug("DataStorageAdapter: propertyValueSet(version={}, owner={}, tool={}, id={}, property={}, oldValue={}, newValue={}, wasReference={})", new Object[] { event.origin, event.owner,
event.tool, event.artifactId, event.propertyName, event.oldValue, event.value });
}
}
}
@Override
public void propertyMapsSet(Collection<PropertyMapSetEvent> events) throws RemoteException {
if (this.log) {
for (PropertyMapSetEvent event : events) {
logger.debug("DataStorageAdapter: propertyMapsSet(valueMap={}, referenceMap={}", new Object[] { event.valueSet, event.referenceSet });
}
}
}
@Override
public void propertyCommited(PropertyCommitedEvent event) {
if (this.log) {
logger.debug("DataStorageAdapter: propertyCommited(privateVersion={}, id={}, property={}, message={}, version={})", new Object[] { event.privateVersion, event.id, event.property, event.message,
event.version });
}
}
@Override
public void versionCommited(VersionCommitedEvent event) {
if (this.log) {
logger.debug("DataStorageAdapter: versionCommited(privateVersion={}, message={}, version={})", new Object[] { event.privateVersion, event.message, event.version });
}
}
@Override
public void propertyDeleted(Collection<PropertyDeletedEvent> events) {
if (this.log) {
for (PropertyDeletedEvent event : events) {
logger.debug("DataStorageAdapter: propertyDeleted(version={}, id={}, property={})", new Object[] { event.version, event.artifactId, event.propertyName });
}
}
}
@Override
public void versionDeleted(VersionDeletedEvent event) {
if (this.log) {
logger.debug("DataStorageAdapter: versionDeleted(version={})", new Object[] { event.privateVersion });
}
}
@Override
public void privateVersionDeleted(PrivateVersionDeletedEvent event) {
if (this.log) {
logger.debug("DataStorageAdapter: privateVersionDeleted(privateVersion={})", new Object[] { event.privateVersion });
}
}
@Override
public void privateVersionAdded(PrivateVersionAddedEvent event) {
if (this.log) {
logger.debug("DataStorageAdapter: privateVersionAdded(privateVersion={})", new Object[] { event.privateVersion });
}
}
@Override
public void privateVersionRebased(PrivateVersionRebasedEvent event) throws RemoteException {
if (this.log) {
logger.debug("DataStorageAdapter: privateVersionRebased(privateVersion={}, oldBaseVersion={}, newBaseVersion={})", new Object[] { event.privateVersion, event.oldBaseVersion,
event.newBaseVersion });
}
}
@Override
public void privateVersionParentSet(PrivateVersionParentSetEvent event) throws RemoteException {
if (this.log) {
logger.debug("DataStorageAdapter: privateVersionSetParent(privateVersion={})", new Object[] { event.privateVersion, event.newParent });
}
}
}
| |
/*
* Copyright 2000-2016 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.codeInsight.daemon.lambda;
import com.intellij.codeInsight.daemon.LightDaemonAnalyzerTestCase;
import com.intellij.codeInspection.LocalInspectionTool;
import com.intellij.codeInspection.deadCode.UnusedDeclarationInspection;
import com.intellij.codeInspection.uncheckedWarnings.UncheckedWarningLocalInspection;
import com.intellij.codeInspection.unusedImport.UnusedImportLocalInspection;
import com.intellij.openapi.projectRoots.JavaSdkVersion;
import com.intellij.openapi.roots.LanguageLevelProjectExtension;
import com.intellij.pom.java.LanguageLevel;
import com.intellij.testFramework.IdeaTestUtil;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.annotations.NotNull;
//javac option to dump bounds: -XDdumpInferenceGraphsTo=
public class GenericsHighlighting8Test extends LightDaemonAnalyzerTestCase {
@NonNls private static final String BASE_PATH = "/codeInsight/daemonCodeAnalyzer/genericsHighlighting8";
@Override
protected void setUp() throws Exception {
super.setUp();
enableInspectionTool(new UnusedDeclarationInspection());
}
@NotNull
@Override
protected LocalInspectionTool[] configureLocalInspectionTools() {
return new LocalInspectionTool[]{new UncheckedWarningLocalInspection(), new UnusedImportLocalInspection()};
}
public void testReferenceTypeParams() {
doTest();
}
public void testTypeParameterBoundsList() {
doTest();
}
public void testClassInheritance() {
doTest();
}
public void testTypeInference() {
doTest();
}
public void testRaw() {
doTest(true);
}
public void testExceptions() {
doTest();
}
public void testExplicitMethodParameters() {
doTest();
}
public void testInferenceWithBounds() {
doTest();
}
public void testInferenceWithSuperBounds() {
doTest();
}
public void testInferenceWithUpperBoundPromotion() {
doTest();
}
public void testVariance() {
doTest();
}
public void testForeachTypes() {
doTest();
}
public void testRawOverridingMethods() {
doTest();
}
public void testAutoboxing() {
doTest();
}
public void testAutoboxingMethods() {
doTest();
}
public void testAutoboxingConstructors() {
doTest();
}
public void testEnumWithAbstractMethods() {
doTest();
}
public void testEnum() { doTest(); }
public void testEnum56239() {
doTest();
}
public void testSameErasure() {
doTest();
}
public void testMethods() {
doTest();
}
public void testFields() {
doTest();
}
public void testStaticImports() {
doTest(true);
}
public void testUncheckedCasts() {
doTest(true);
}
public void testUncheckedOverriding() {
doTest(true);
}
public void testWildcardTypes() {
doTest(true);
}
public void testConvertibleTypes() {
doTest(true);
}
public void testIntersectionTypes() {
doTest(true);
}
public void testVarargs() {
doTest(true);
}
public void testTypeArgsOnRaw() {
doTest();
}
public void testConditionalExpression() {
doTest();
}
public void testUnused() {
doTest(true);
}
public void testIDEADEV7337() {
doTest(true);
}
public void testIDEADEV10459() {
doTest(true);
}
public void testIDEADEV12951() {
doTest(true);
}
public void testIDEADEV13011() {
doTest(true);
}
public void testIDEADEV14006() {
doTest(true);
}
public void testIDEADEV14103() {
doTest(true);
}
public void testIDEADEV15534() {
doTest(true);
}
public void testIDEADEV23157() {
doTest(true);
}
public void testIDEADEV24166() {
doTest(true);
}
public void testIDEADEV57343() {
doTest();
}
public void testSOE() {
doTest(true);
}
public void testGenericExtendException() {
doTest();
}
public void testSameErasureDifferentReturnTypes() {
doTest();
}
public void testDeepConflictingReturnTypes() {
doTest();
}
public void testInheritFromTypeParameter() {
doTest();
}
public void testAnnotationsAsPartOfModifierList() {
doTest();
}
public void testImplementAnnotation() {
doTest();
}
public void testOverrideAtLanguageLevel6() {
doTest();
}
public void testSuperMethodCallWithErasure() {
doTest();
}
public void testWildcardCastConversion() {
doTest();
}
public void testTypeWithinItsWildcardBound() {
doTest();
}
public void testMethodSignatureEquality() {
doTest();
}
public void testInnerClassRef() {
doTest();
}
public void testPrivateInnerClassRef() {
doTest();
}
public void testWideningCastToTypeParam() {
doTest();
}
public void testCapturedWildcardAssignments() {
doTest();
}
public void testTypeParameterBoundVisibility() {
doTest();
}
public void testUncheckedWarningsLevel6() {
doTest(true);
}
public void testIDEA77991() {
doTest();
}
public void testIDEA80386() {
doTest();
}
public void testIDEA66311() {
doTest();
}
public void testIDEA67672() {
doTest();
}
public void testIDEA88895() {
doTest();
}
public void testIDEA67667() {
doTest();
}
public void testIDEA66311_16() {
doTest();
}
public void testIDEA76283() {
doTest();
}
public void testIDEA74899() {
doTest();
}
public void testIDEA63291() {
doTest();
}
public void testIDEA72912() {
doTest();
}
public void testIllegalGenericTypeInInstanceof() {
doTest();
}
public void testIDEA57339() {
doTest();
}
public void testIDEA57340() {
doTest();
}
public void testIDEA89771() {
doTest();
}
public void testIDEA89801() {
doTest();
}
public void testIDEA67681() {
doTest();
}
public void testIDEA67599() {
doTest();
}
public void testIDEA57668() {
doTest();
}
public void testIDEA57667() {
doTest();
}
public void testIDEA57650() {
doTest();
}
public void testIDEA57378() {
doTest();
}
public void testIDEA57557() {
doTest();
}
public void testIDEA57563() {
doTest();
}
public void testIDEA57275() {
doTest();
}
public void testIDEA57533() {
doTest();
}
public void testIDEA57509() {
doTest();
}
public void testIDEA57410() {
doTest();
}
public void testIDEA57411() {
doTest();
}
public void testIDEA57484() {
doTest();
}
public void testIDEA57485() {
doTest();
}
public void testIDEA57486() {
doTest();
}
//compiles with java 6
public void _testIDEA57492() {
doTest();
}
//compiles with java 6
public void _testIDEA57493() {
doTest();
}
public void testIDEA57495() {
doTest();
}
public void testIDEA57494() {
doTest();
}
public void testIDEA57496() {
doTest();
}
public void testIDEA57264() {
doTest();
}
public void testIDEA57315() {
doTest();
}
public void testIDEA57346() {
doTest();
}
public void testIDEA57284() {
doTest();
}
public void testIDEA57286() {
doTest();
}
public void testIDEA57307() {
doTest(true);
}
public void testIDEA57308() {
doTest();
}
public void testIDEA57310() {
doTest();
}
public void testIDEA57311() {
doTest();
}
public void testIDEA57309() {
doTest();
}
public void testIDEA90802() {
doTest();
}
public void testIDEA70370() {
doTest(true);
}
public void testInaccessibleThroughWildcard() {
doTest();
}
public void testInconvertibleTypes() {
doTest();
}
public void testIncompatibleReturnType() {
doTest();
}
public void testContinueInferenceAfterFirstRawResult() {
doTest();
}
public void testDoNotAcceptLowerBoundIfRaw() {
doTest();
}
public void testStaticOverride() {
doTest();
}
public void testTypeArgumentsGivenOnRawType() {
doTest();
}
public void testSelectFromTypeParameter() {
doTest();
}
public void testTypeArgumentsGivenOnAnonymousClassCreation() {
doTest();
}
public void testIDEA94011() {
doTest();
}
public void testDifferentTypeParamsInOverloadedMethods() {
doTest(true);
}
public void testIDEA91626() {
doTest(true);
}
public void testIDEA92022() {
doTest();
}
public void testRawOnParameterized() {
doTest();
}
public void testFailedInferenceWithBoxing() {
doTest();
}
public void testFixedFailedInferenceWithBoxing() {
doTest();
}
public void testInferenceWithBoxingCovariant() {
doTest();
}
public void testSuperWildcardIsNotWithinItsBound() {
doTest();
}
public void testSpecificReturnType() {
doTest();
}
public void testParameterizedParameterBound() {
doTest();
}
public void testInstanceClassInStaticContextAccess() {
doTest();
}
public void testFlattenIntersectionType() {
doTest();
}
public void testIDEA97276() {
doTest();
}
public void testWildcardsBoundsIntersection() {
doTest();
}
public void testOverrideWithMoreSpecificReturn() {
doTest();
}
public void testIDEA97888() {
doTest();
}
public void testMethodCallParamsOnRawType() {
doTest();
}
public void testIDEA98421() {
doTest();
}
public void testErasureTypeParameterBound() {
doTest();
}
public void testThisAsAccessObject() {
doTest();
}
public void testIDEA67861() {
doTest();
}
public void testIDEA67597() {
doTest();
}
public void testIDEA57539() {
doTest();
}
public void testIDEA67570() {
doTest();
}
public void testIDEA99061() {
doTest();
}
public void testIDEA99347() {
doTest();
}
public void testIDEA86875() {
doTest();
}
public void testIDEA103760(){
doTest();
}
public void testIDEA105846(){
doTest();
}
public void testIDEA105695(){
doTest();
}
public void testIDEA104992(){
doTest();
}
public void testIDEA57446(){
doTest();
}
public void testIDEA67677(){
doTest();
}
public void testIDEA67798(){
doTest();
}
public void testIDEA57534(){
doTest();
}
public void testIDEA57482(){
doTest();
}
public void testIDEA67577(){
doTest();
}
public void testIDEA57413(){
doTest();
}
public void testIDEA57265(){
doTest();
}
public void testIDEA57271(){
doTest();
}
public void testIDEA57272(){
doTest();
}
public void testIDEA57285(){
doTest();
}
public void testIDEA65066(){
doTest();
}
public void testIDEA67998(){
doTest();
}
public void testIDEA18425(){
doTest();
}
public void testIDEA27080(){
doTest();
}
public void testIDEA22079(){
doTest();
}
public void testIDEA21602(){
doTest();
}
public void testIDEA21602_7(){
doTest();
}
public void testIDEA21597() throws Exception {
doTest();
}
public void testIDEA20573() throws Exception {
doTest();
}
public void testIDEA20244() throws Exception {
doTest();
}
public void testIDEA22005() throws Exception {
doTest();
}
public void testIDEA57259() throws Exception {
doTest();
}
public void testIDEA107957() throws Exception {
doTest();
}
public void testIDEA109875() throws Exception {
doTest();
}
public void testIDEA106964() throws Exception {
doTest();
}
public void testIDEA107782() throws Exception {
doTest();
}
public void testInheritedWithDifferentArgsInTypeParams() throws Exception {
doTest();
}
public void testIllegalForwardReferenceInTypeParameterDefinition() throws Exception {
doTest();
}
public void testIDEA57877() throws Exception {
doTest();
}
public void testIDEA110568() throws Exception {
doTest();
}
public void testSelfRef() throws Exception {
doTest();
}
public void testTypeParamsCyclicInference() throws Exception {
doTest();
}
public void testCaptureTopLevelWildcardsForConditionalExpression() throws Exception {
doTest();
}
public void testGenericsOverrideMethodInRawInheritor() throws Exception {
doTest();
}
public void testIDEA107654() throws Exception {
doTest();
}
public void testIDEA55510() throws Exception {
doTest();
}
public void testIDEA27185(){
doTest();
}
public void testIDEA67571(){
doTest();
}
public void testTypeArgumentsOnRawType(){
doTest();
}
public void testTypeArgumentsOnRawType17(){
doTest();
}
public void testWildcardsOnRawTypes() {
doTest();
}
public void testDisableWithinBoundsCheckForSuperWildcards() {
doTest();
}
public void testIDEA108287() throws Exception {
doTest();
}
public void testIDEA77128() throws Exception {
doTest();
}
public void testDisableCastingToNestedWildcards() throws Exception {
doTest();
}
public void testBooleanInferenceFromIfCondition() throws Exception {
doTest();
}
public void testMethodCallOnRawTypesExtended() throws Exception {
doTest();
}
public void testIDEA104100() {
doTest();
}
public void testIDEA104160() {
doTest();
}
public void testSOEInLeastUpperClass() {
doTest();
}
public void testIDEA57334() {
doTest();
}
public void testIDEA57325() {
doTest();
}
public void testIDEA67835() {
doTest();
}
public void testIDEA67744() {
doTest();
}
public void testIDEA67682() {
doTest();
}
public void testIDEA57391() {
doTest();
}
public void testIDEA110869() {
doTest();
}
public void testIDEA110947() { doTest(false); }
public void testIDEA112122() {
doTest();
}
public void testNoInferenceFromTypeCast() {
doTest();
}
public void testCaptureWildcardsInTypeCasts() {
doTest();
}
public void testIDEA111085() {
doTest();
}
public void testIDEA109556() {
doTest();
}
public void testIDEA107440() {
doTest();
}
public void testIDEA57289() {
doTest();
}
public void testIDEA57439() {
doTest();
}
public void testIDEA57312() {
doTest();
}
public void testIDEA67865() {
doTest();
}
public void testBoxingSpecific() {
doTest();
}
public void testIDEA67843() { //fixme need to change test
doTest();
}
public void testAmbiguousTypeParamVsConcrete() {
doTest();
}
public void testRawAssignments() throws Exception {
doTest();
}
public void testIDEA87860() throws Exception {
doTest();
}
public void testIDEA114797() throws Exception {
doTest();
}
public void testCastToIntersectionType() throws Exception {
doTest();
}
public void testCastToIntersection() throws Exception {
doTest();
}
public void testIDEA122401() throws Exception {
doTest();
}
public void testCaptureInsideNestedCalls() throws Exception {
doTest();
}
public void testSuperWildcardWithBoundPromotion() { doTest();}
public void testErasure() throws Exception { doTest(); }
public void testWildcardBoundsCombination() throws Exception {
doTest();
}
public void testIDEA128333() throws Exception {
doTest();
}
public void testIDEA78402() { doTest(); }
public void testUncheckedWarningInsideLambdaReturnStatement() throws Exception {
doTest(true);
}
public void testInferredParameterInBoundsInRecursiveGenerics() {
doTest(false);
}
public void testSuperWildcardCapturedSuperExtendsWildcardCapturedExtends() throws Exception {
doTest(false);
}
public void testRejectContradictingEqualsBounds() throws Exception {
doTest(false);
}
public void testRejectEqualsBoundsContradictingLowerBound() throws Exception {
doTest(false);
}
public void testSuperInterfaceMethodCalledByMatterOfInterface() throws Exception {
doTest(false);
}
private void doTest() {
doTest(false);
}
private void doTest(boolean warnings) {
LanguageLevelProjectExtension.getInstance(getJavaFacade().getProject()).setLanguageLevel(LanguageLevel.JDK_1_8);
IdeaTestUtil.setTestVersion(JavaSdkVersion.JDK_1_8, getModule(), getTestRootDisposable());
doTest(BASE_PATH + "/" + getTestName(false) + ".java", warnings, false);
}
public void testIDEA67584() throws Exception {
doTest();
}
public void testIDEA113225() throws Exception {
doTest();
}
public void testIDEA139069() throws Exception {
doTest();
}
public void testIDEA67745() throws Exception {
doTest();
}
public void testIDEA57313() throws Exception {
doTest();
}
public void testIDEA57387() throws Exception {
doTest();
}
public void testIDEA57314() throws Exception {
doTest();
}
public void testIDEA57322() throws Exception {
doTest();
}
public void testIDEA57362() throws Exception {
doTest();
}
public void testIDEA57320() throws Exception {
doTest();
}
public void testIDEA139090() throws Exception {
doTest();
}
public void testIDEA57502() throws Exception {
doTest();
}
public void testIDEA67746() throws Exception {
doTest();
}
public void testIDEA67592() throws Exception {
doTest();
}
public void testIDEA93713() throws Exception {
doTest();
}
public void testIDEA107713() throws Exception {
doTest();
}
public void testExceptionCollectionWithLambda() throws Exception {
doTest();
}
public void testUncheckedWarningsWhenInferredTypeLeadsToRawRoGenericAssignment() throws Exception {
doTest(true);
}
public void testExpectedTypeBasedOnArrayCreationWithoutExplicitType() throws Exception {
doTest();
}
public void testIDEA148348() throws Exception {
doTest();
}
public void testIDEA148361() throws Exception {
doTest();
}
public void testIDEA134059() throws Exception {
doTest();
}
public void testIDEA139222() throws Exception {
doTest();
}
public void testIDEA139156() throws Exception {
doTest();
}
public void testIDEA139169() throws Exception {
doTest();
}
public void testIDEA131686() throws Exception {
doTest();
}
public void testIDEA56754() throws Exception {
doTest();
}
public void testAccessClassForWildcardCaptureType() throws Exception {
doTest();
}
public void testDistinguishTypeArgs() throws Exception {
doTest();
}
public void testRecursiveCapturedWildcardTypes() throws Exception {
doTest();
}
public void testRecursiveCapturedWildcardTypesIDEA139167() throws Exception {
doTest();
}
public void testRecursiveCapturedWildcardTypesIDEA139157() throws Exception {
doTest();
}
public void testIDEA146897() throws Exception {
doTest();
}
public void testIDEA139096() throws Exception {
doTest();
}
public void testCastingCapturedWildcardToPrimitive() throws Exception {
doTest();
}
public void testCastingCapturedWildcardToArray() throws Exception {
doTest();
}
public void testCheckUncheckedAssignmentDuringVariablesResaolution() throws Exception {
doTest(true);
}
public void testRetrieveInferenceErrorsFromContainingCallsIfCurrentDoesNotProvideAny() throws Exception {
doTest();
}
public void testForeachOverCapturedWildcardWithCollectionUpperBound() throws Exception {
doTest();
}
public void testCapturedWildcardWithPrimitiveTypesChecks() throws Exception {
doTest();
}
public void testCapturedWildcardPackageLocalAccess() throws Exception {
doTest();
}
public void testCapturedWildcardPassedThroughMethodCallChain() throws Exception {
doTest();
}
public void testIDEA152179() throws Exception {
doTest();
}
public void testLooseInvocationContextForProperPrimitiveTypes() throws Exception {
doTest();
}
public void testUncheckedWarningsInsideIncorporationPhase() throws Exception {
doTest();
}
public void testUnifiedSubstitutorUpInTheHierarchy() throws Exception {
doTest();
}
public void testNestedCaptures() throws Exception {
doTest();
}
public void testErasureOfReturnTypeOfNonGenericMethod() throws Exception {
doTest();
}
public void testUncheckedCastWithCapturedWildcards() throws Exception {
doTest(true);
}
public void testReifiableCapturedWildcards() throws Exception {
doTest(true);
}
public void testUncheckedWarningsInsideLambda() throws Exception {
doTest(true);
}
public void testLowerBoundOfCapturedWildcardInSubtypingConstraint() throws Exception {
doTest(true);
}
public void testMembersContainedInCapturedWildcardType() throws Exception {
doTest();
}
public void testTypeParameterBoundsWithSubstitutionWhenMethodHierarchyIsChecked() throws Exception {
doTest();
}
public void testBoundsPromotionForDerivedType() throws Exception {
doTest();
}
public void testSameErasureForStaticMethodsInInterfaces() throws Exception {
doTest();
}
}
| |
/*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.litho.widget;
import static com.facebook.yoga.YogaAlign.CENTER;
import static com.facebook.yoga.YogaEdge.START;
import android.content.Context;
import android.graphics.Canvas;
import android.graphics.ColorFilter;
import android.graphics.Paint;
import android.graphics.Path;
import android.graphics.PixelFormat;
import android.graphics.Point;
import android.graphics.Rect;
import android.graphics.drawable.Drawable;
import android.os.Build;
import android.util.TypedValue;
import android.view.View;
import android.widget.AdapterView;
import android.widget.ArrayAdapter;
import android.widget.ListPopupWindow;
import android.widget.PopupWindow;
import androidx.annotation.ColorInt;
import androidx.annotation.RequiresApi;
import com.facebook.litho.AccessibilityRole;
import com.facebook.litho.ClickEvent;
import com.facebook.litho.Component;
import com.facebook.litho.ComponentContext;
import com.facebook.litho.EventHandler;
import com.facebook.litho.Row;
import com.facebook.litho.StateValue;
import com.facebook.litho.annotations.FromEvent;
import com.facebook.litho.annotations.LayoutSpec;
import com.facebook.litho.annotations.OnCreateInitialState;
import com.facebook.litho.annotations.OnCreateLayout;
import com.facebook.litho.annotations.OnEvent;
import com.facebook.litho.annotations.OnUpdateState;
import com.facebook.litho.annotations.Param;
import com.facebook.litho.annotations.Prop;
import com.facebook.litho.annotations.PropDefault;
import com.facebook.litho.annotations.ResType;
import com.facebook.litho.annotations.State;
import com.facebook.yoga.YogaJustify;
import java.util.List;
import javax.annotation.Nullable;
/**
* A simple spinner (dropdown) component. Derived from the standard Android {@link
* android.widget.Spinner}
*
* <p>Additionally added logic to flip the caret vertically once menu is shown.
*
* <p>If no optional values are provided the component will look like it's material design
* counterpart.
*
* @uidocs
* @prop-required selectedOption The initially selected option for the spinner
* @prop-required options The options available from the dropdown
* @prop-required onItemSelectedListener The listener for dropdown selections
* @prop-optional itemLayout The item layout for the drop down list
* android.R.layout.simple_dropdown_item_1line is used by default
* @prop-optional caret The spinner caret icon i.e. arrow at the far right. Notice that this
* drawable will be flipped vertically when the dropdown menu is shown
* @prop-optional selectedTextSize The text size of the selected value
* @prop-optional selectedTextColor The text color of the selected value
*/
@LayoutSpec(events = ItemSelectedEvent.class)
@RequiresApi(Build.VERSION_CODES.HONEYCOMB)
public class SpinnerSpec {
private static final float MARGIN_SMALL = 8;
private static final int DEFAULT_CARET_COLOR = 0x8A000000; // 54% Black
private static final int DEFAULT_TEXT_SIZE_SP = 16;
private static final int SPINNER_HEIGHT = 48;
@PropDefault static final int itemLayout = android.R.layout.simple_dropdown_item_1line;
@PropDefault static final float selectedTextSize = -1;
@PropDefault static final int selectedTextColor = 0xDE000000; // 87% Black
@OnCreateInitialState
static void onCreateInitialState(StateValue<String> selection, @Prop String selectedOption) {
selection.set(selectedOption);
}
@OnCreateLayout
static Component onCreateLayout(
ComponentContext c,
@State String selection,
@State boolean isShowingDropDown,
@Prop(resType = ResType.DIMEN_TEXT, optional = true) float selectedTextSize,
@Prop(resType = ResType.COLOR, optional = true) int selectedTextColor,
@Prop(resType = ResType.DRAWABLE, optional = true) @Nullable Drawable caret) {
caret = caret == null ? new CaretDrawable(c.getAndroidContext(), DEFAULT_CARET_COLOR) : caret;
selectedTextSize =
selectedTextSize == -1
? spToPx(c.getAndroidContext(), DEFAULT_TEXT_SIZE_SP)
: selectedTextSize;
return Row.create(c)
.minHeightDip(SPINNER_HEIGHT)
.justifyContent(YogaJustify.SPACE_BETWEEN)
.paddingDip(START, MARGIN_SMALL)
.backgroundAttr(android.R.attr.selectableItemBackground)
.clickHandler(Spinner.onClick(c))
.child(createSelectedItemText(c, selection, (int) selectedTextSize, selectedTextColor))
.child(createCaret(c, caret, isShowingDropDown))
.accessibilityRole(AccessibilityRole.DROP_DOWN_LIST)
.build();
}
private static Component createCaret(
ComponentContext c, Drawable icon, boolean isShowingDropDown) {
return Image.create(c)
.drawable(icon)
.widthDip(SPINNER_HEIGHT)
.heightDip(SPINNER_HEIGHT)
.flexShrink(0)
.flexGrow(0)
.scale(isShowingDropDown ? -1 : 1)
.build();
}
private static Component createSelectedItemText(
ComponentContext c, String selection, int textSizePx, @ColorInt int textColor) {
return Text.create(c)
.text(selection)
.alignSelf(CENTER)
.textSizePx(textSizePx)
.textColor(textColor)
.build();
}
@OnEvent(ClickEvent.class)
static void onClick(
final ComponentContext c,
@FromEvent final View view,
@Prop final List<String> options,
@Prop(resType = ResType.INT, optional = true) int itemLayout) {
final EventHandler eventHandler = Spinner.getItemSelectedEventHandler(c);
final ListPopupWindow popup = new ListPopupWindow(c.getAndroidContext());
popup.setAnchorView(view);
popup.setModal(true);
popup.setPromptPosition(ListPopupWindow.POSITION_PROMPT_ABOVE);
popup.setAdapter(new ArrayAdapter<>(c.getAndroidContext(), itemLayout, options));
popup.setOnItemClickListener(
new AdapterView.OnItemClickListener() {
@Override
public void onItemClick(AdapterView<?> parent, View view, int position, long id) {
final String newSelection = options.get(position);
if (eventHandler != null) {
Spinner.dispatchItemSelectedEvent(eventHandler, newSelection);
}
popup.dismiss();
Spinner.updateSelectionSync(c, newSelection);
}
});
popup.setOnDismissListener(
new PopupWindow.OnDismissListener() {
@Override
public void onDismiss() {
Spinner.updateIsShowingDropDownSync(c, false);
}
});
popup.show();
Spinner.updateIsShowingDropDownSync(c, true);
}
@OnUpdateState
static void updateSelection(StateValue<String> selection, @Param String newSelection) {
selection.set(newSelection);
}
@OnUpdateState
static void updateIsShowingDropDown(
StateValue<Boolean> isShowingDropDown, @Param boolean isShowing) {
isShowingDropDown.set(isShowing);
}
private static float spToPx(Context context, int spValue) {
return TypedValue.applyDimension(
TypedValue.COMPLEX_UNIT_SP, spValue, context.getResources().getDisplayMetrics());
}
private static float dpToPx(Context context, int dpValue) {
return TypedValue.applyDimension(
TypedValue.COMPLEX_UNIT_DIP, dpValue, context.getResources().getDisplayMetrics());
}
/** Draws a simple triangle caret depicting if the Spinner is expanded or collapsed. */
private static class CaretDrawable extends Drawable {
private static final int CARET_WIDTH_DP = 5;
private static final int CARET_HEIGHT_DP = 3;
private final Paint paint = new Paint();
private final int mWidth;
private final int mHeight;
// Triangle geometry
private final Path mTrianglePath = new Path();
private final Point mP1 = new Point();
private final Point mP2 = new Point();
private final Point mP3 = new Point();
CaretDrawable(Context context, @ColorInt int caretColor) {
paint.setColor(caretColor);
paint.setFlags(Paint.ANTI_ALIAS_FLAG);
mWidth = (int) dpToPx(context, CARET_WIDTH_DP);
mHeight = (int) dpToPx(context, CARET_HEIGHT_DP);
}
@Override
protected void onBoundsChange(Rect bounds) {
super.onBoundsChange(bounds);
final int cx = bounds.centerX();
final int cy = bounds.centerY();
// Setup points
mP1.set(cx - mWidth, cy - mHeight);
mP2.set(cx + mWidth, cy - mHeight);
mP3.set(cx, cy + mHeight);
// Setup triangle
mTrianglePath.reset();
mTrianglePath.setFillType(Path.FillType.EVEN_ODD);
mTrianglePath.moveTo(mP1.x, mP1.y);
mTrianglePath.lineTo(mP2.x, mP2.y);
mTrianglePath.lineTo(mP3.x, mP3.y);
mTrianglePath.close();
}
@Override
public void draw(Canvas canvas) {
canvas.drawPath(mTrianglePath, paint);
}
@Override
public void setAlpha(int alpha) {
throw new RuntimeException("Not supported");
}
@Override
public void setColorFilter(@Nullable ColorFilter colorFilter) {
throw new RuntimeException("Not supported");
}
@Override
public int getOpacity() {
return PixelFormat.OPAQUE;
}
}
}
| |
/*
* Copyright 2000-2015 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.diff.merge;
import com.intellij.diff.DiffContext;
import com.intellij.diff.FrameDiffTool;
import com.intellij.diff.comparison.ByLine;
import com.intellij.diff.comparison.ComparisonMergeUtil;
import com.intellij.diff.comparison.ComparisonPolicy;
import com.intellij.diff.comparison.DiffTooBigException;
import com.intellij.diff.comparison.iterables.FairDiffIterable;
import com.intellij.diff.contents.DiffContent;
import com.intellij.diff.contents.DocumentContent;
import com.intellij.diff.fragments.MergeLineFragment;
import com.intellij.diff.requests.ContentDiffRequest;
import com.intellij.diff.requests.SimpleDiffRequest;
import com.intellij.diff.tools.simple.ThreesideTextDiffViewerEx;
import com.intellij.diff.tools.util.DiffNotifications;
import com.intellij.diff.tools.util.KeyboardModifierListener;
import com.intellij.diff.tools.util.base.TextDiffViewerUtil;
import com.intellij.diff.util.*;
import com.intellij.icons.AllIcons;
import com.intellij.openapi.actionSystem.*;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.command.UndoConfirmationPolicy;
import com.intellij.openapi.command.undo.*;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.diff.DiffBundle;
import com.intellij.openapi.editor.Caret;
import com.intellij.openapi.editor.Document;
import com.intellij.openapi.editor.Editor;
import com.intellij.openapi.editor.event.DocumentEvent;
import com.intellij.openapi.editor.ex.EditorEx;
import com.intellij.openapi.fileEditor.TextEditor;
import com.intellij.openapi.fileEditor.impl.text.TextEditorProvider;
import com.intellij.openapi.progress.ProcessCanceledException;
import com.intellij.openapi.progress.ProgressIndicator;
import com.intellij.openapi.progress.ProgressManager;
import com.intellij.openapi.progress.Task;
import com.intellij.openapi.project.DumbAware;
import com.intellij.openapi.project.DumbAwareAction;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.ui.MessageType;
import com.intellij.openapi.ui.Messages;
import com.intellij.openapi.ui.popup.Balloon;
import com.intellij.openapi.ui.popup.BalloonBuilder;
import com.intellij.openapi.ui.popup.JBPopupFactory;
import com.intellij.openapi.util.BooleanGetter;
import com.intellij.openapi.util.Condition;
import com.intellij.openapi.util.Disposer;
import com.intellij.openapi.util.Pair;
import com.intellij.ui.HyperlinkAdapter;
import com.intellij.ui.awt.RelativePoint;
import com.intellij.util.Function;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.util.containers.hash.HashSet;
import com.intellij.util.ui.JBUI;
import org.jetbrains.annotations.CalledInAwt;
import org.jetbrains.annotations.CalledWithWriteLock;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import javax.swing.event.HyperlinkEvent;
import javax.swing.event.HyperlinkListener;
import java.awt.*;
import java.awt.event.ActionEvent;
import java.util.ArrayList;
import java.util.BitSet;
import java.util.List;
import java.util.Set;
public class TextMergeTool implements MergeTool {
public static final TextMergeTool INSTANCE = new TextMergeTool();
public static final Logger LOG = Logger.getInstance(TextMergeTool.class);
@NotNull
@Override
public MergeViewer createComponent(@NotNull MergeContext context, @NotNull MergeRequest request) {
return new TextMergeViewer(context, ((TextMergeRequest)request));
}
@Override
public boolean canShow(@NotNull MergeContext context, @NotNull MergeRequest request) {
return request instanceof TextMergeRequest;
}
public static class TextMergeViewer implements MergeViewer {
@NotNull private final MergeContext myMergeContext;
@NotNull private final TextMergeRequest myMergeRequest;
@NotNull private final DiffContext myDiffContext;
@NotNull private final ContentDiffRequest myDiffRequest;
@NotNull private final MyThreesideViewer myViewer;
public TextMergeViewer(@NotNull MergeContext context, @NotNull TextMergeRequest request) {
myMergeContext = context;
myMergeRequest = request;
myDiffContext = new MergeUtil.ProxyDiffContext(myMergeContext);
myDiffRequest = new SimpleDiffRequest(myMergeRequest.getTitle(),
getDiffContents(myMergeRequest),
getDiffContentTitles(myMergeRequest));
myDiffRequest.putUserData(DiffUserDataKeys.FORCE_READ_ONLY_CONTENTS, new boolean[]{true, false, true});
myViewer = new MyThreesideViewer(myDiffContext, myDiffRequest);
}
@NotNull
private static List<DiffContent> getDiffContents(@NotNull TextMergeRequest mergeRequest) {
List<DocumentContent> contents = mergeRequest.getContents();
final DocumentContent left = ThreeSide.LEFT.select(contents);
final DocumentContent right = ThreeSide.RIGHT.select(contents);
final DocumentContent output = mergeRequest.getOutputContent();
return ContainerUtil.<DiffContent>list(left, output, right);
}
@NotNull
private static List<String> getDiffContentTitles(@NotNull TextMergeRequest mergeRequest) {
List<String> titles = MergeUtil.notNullizeContentTitles(mergeRequest.getContentTitles());
titles.set(ThreeSide.BASE.getIndex(), "Result");
return titles;
}
//
// Impl
//
@NotNull
@Override
public JComponent getComponent() {
return myViewer.getComponent();
}
@Nullable
@Override
public JComponent getPreferredFocusedComponent() {
return myViewer.getPreferredFocusedComponent();
}
@Override
public ToolbarComponents init() {
ToolbarComponents components = new ToolbarComponents();
FrameDiffTool.ToolbarComponents init = myViewer.init();
components.statusPanel = init.statusPanel;
components.toolbarActions = init.toolbarActions;
components.closeHandler = new BooleanGetter() {
@Override
public boolean get() {
return MergeUtil.showExitWithoutApplyingChangesDialog(getComponent(), myMergeRequest, myMergeContext);
}
};
return components;
}
@Nullable
@Override
public Action getResolveAction(@NotNull MergeResult result) {
return myViewer.getResolveAction(result);
}
@Override
public void dispose() {
Disposer.dispose(myViewer);
}
//
// Getters
//
@NotNull
public MyThreesideViewer getViewer() {
return myViewer;
}
//
// Viewer
//
public class MyThreesideViewer extends ThreesideTextDiffViewerEx {
@NotNull private final ModifierProvider myModifierProvider;
@Nullable private final UndoManager myUndoManager;
// all changes - both applied and unapplied ones
@NotNull private final List<TextMergeChange> myAllMergeChanges = new ArrayList<TextMergeChange>();
private boolean myInitialRediffStarted;
private boolean myInitialRediffFinished;
private boolean myContentModified;
@Nullable private MergeCommandAction myCurrentMergeCommand;
private int myBulkChangeUpdateDepth;
private final Set<TextMergeChange> myChangesToUpdate = new HashSet<TextMergeChange>();
public MyThreesideViewer(@NotNull DiffContext context, @NotNull ContentDiffRequest request) {
super(context, request);
myModifierProvider = new ModifierProvider();
myUndoManager = getProject() != null ? UndoManager.getInstance(getProject()) : UndoManager.getGlobalInstance();
DiffUtil.registerAction(new ApplySelectedChangesAction(Side.LEFT, true), myPanel);
DiffUtil.registerAction(new ApplySelectedChangesAction(Side.RIGHT, true), myPanel);
DiffUtil.registerAction(new IgnoreSelectedChangesAction(Side.LEFT, true), myPanel);
DiffUtil.registerAction(new IgnoreSelectedChangesAction(Side.RIGHT, true), myPanel);
if (myUndoManager != null) {
new UndoRedoAction(true).register();
new UndoRedoAction(false).register();
}
}
@Override
protected void onInit() {
super.onInit();
myModifierProvider.init();
}
@Override
protected void onDispose() {
LOG.assertTrue(myBulkChangeUpdateDepth == 0);
super.onDispose();
}
@NotNull
@Override
protected List<AnAction> createToolbarActions() {
List<AnAction> group = new ArrayList<AnAction>();
group.add(new MyToggleAutoScrollAction());
group.add(myEditorSettingsAction);
group.add(Separator.getInstance());
group.add(new ShowLeftBasePartialDiffAction());
group.add(new ShowBaseRightPartialDiffAction());
group.add(new ShowLeftRightPartialDiffAction());
group.add(Separator.getInstance());
group.add(new ApplyNonConflictsAction());
group.add(new ApplySideNonConflictsAction(Side.LEFT));
group.add(new ApplySideNonConflictsAction(Side.RIGHT));
return group;
}
@NotNull
@Override
protected List<AnAction> createEditorPopupActions() {
List<AnAction> group = new ArrayList<AnAction>();
group.add(new ApplySelectedChangesAction(Side.LEFT, false));
group.add(new ApplySelectedChangesAction(Side.RIGHT, false));
group.add(new IgnoreSelectedChangesAction(Side.LEFT, false));
group.add(new IgnoreSelectedChangesAction(Side.RIGHT, false));
group.add(Separator.getInstance());
group.addAll(TextDiffViewerUtil.createEditorPopupActions());
return group;
}
@Nullable
@Override
protected List<AnAction> createPopupActions() {
List<AnAction> group = new ArrayList<AnAction>();
group.add(Separator.getInstance());
group.add(new MyToggleAutoScrollAction());
return group;
}
@Nullable
public Action getResolveAction(@NotNull final MergeResult result) {
String caption = MergeUtil.getResolveActionTitle(result, myMergeRequest, myMergeContext);
return new AbstractAction(caption) {
@Override
public void actionPerformed(ActionEvent e) {
if ((result == MergeResult.LEFT || result == MergeResult.RIGHT) && myContentModified &&
Messages.showYesNoDialog(myPanel.getRootPane(),
DiffBundle.message("merge.dialog.resolve.side.with.discard.message", result == MergeResult.LEFT ? 0 : 1),
DiffBundle.message("merge.dialog.resolve.side.with.discard.title"), Messages.getQuestionIcon()) != Messages.YES) {
return;
}
if (result == MergeResult.RESOLVED) {
if ((getChangesCount() != 0 || getConflictsCount() != 0) &&
Messages.showYesNoDialog(myPanel.getRootPane(),
DiffBundle.message("merge.dialog.apply.partially.resolved.changes.confirmation.message", getChangesCount(), getConflictsCount()),
DiffBundle.message("apply.partially.resolved.merge.dialog.title"),
Messages.getQuestionIcon()) != Messages.YES) {
return;
}
}
if (result == MergeResult.CANCEL &&
!MergeUtil.showExitWithoutApplyingChangesDialog(getComponent(), myMergeRequest, myMergeContext)) {
return;
}
destroyChangedBlocks();
myMergeContext.finishMerge(result);
}
};
}
//
// Diff
//
private void setInitialOutputContent() {
final Document baseDocument = ThreeSide.BASE.select(myMergeRequest.getContents()).getDocument();
final Document outputDocument = myMergeRequest.getOutputContent().getDocument();
DiffUtil.executeWriteCommand(outputDocument, getProject(), "Init merge content", new Runnable() {
@Override
public void run() {
outputDocument.setText(baseDocument.getCharsSequence());
if (myUndoManager != null) {
DocumentReference ref = DocumentReferenceManager.getInstance().create(outputDocument);
myUndoManager.nonundoableActionPerformed(ref, false);
}
}
});
}
@Override
@CalledInAwt
public void rediff(boolean trySync) {
if (myInitialRediffStarted) return;
myInitialRediffStarted = true;
assert myAllMergeChanges.isEmpty();
doRediff();
}
@NotNull
@Override
protected Runnable performRediff(@NotNull ProgressIndicator indicator) {
throw new UnsupportedOperationException();
}
@CalledInAwt
private void doRediff() {
myStatusPanel.setBusy(true);
// This is made to reduce unwanted modifications before rediff is finished.
// It could happen between this init() EDT chunk and invokeLater().
getEditor(ThreeSide.BASE).setViewer(true);
setInitialOutputContent();
// we have to collect contents here, because someone can modify document while we're starting rediff
List<DiffContent> contents = myRequest.getContents();
final List<CharSequence> sequences = ContainerUtil.map(contents, new Function<DiffContent, CharSequence>() {
@Override
public CharSequence fun(DiffContent diffContent) {
return ((DocumentContent)diffContent).getDocument().getImmutableCharSequence();
}
});
final long outputModificationStamp = myMergeRequest.getOutputContent().getDocument().getModificationStamp();
// we need invokeLater() here because viewer is partially-initialized (ex: there are no toolbar or status panel)
// user can see this state while we're showing progress indicator, so we want let init() to finish.
ApplicationManager.getApplication().invokeLater(new Runnable() {
@Override
public void run() {
ProgressManager.getInstance().run(new Task.Modal(getProject(), "Computing differences...", true) {
private Runnable myCallback;
@Override
public void run(@NotNull ProgressIndicator indicator) {
myCallback = doPerformRediff(sequences, outputModificationStamp, indicator);
}
@Override
public void onCancel() {
myMergeContext.finishMerge(MergeResult.CANCEL);
}
@Override
public void onSuccess() {
myCallback.run();
}
});
}
});
}
@NotNull
protected Runnable doPerformRediff(@NotNull List<CharSequence> sequences,
long outputModificationStamp,
@NotNull ProgressIndicator indicator) {
try {
indicator.checkCanceled();
FairDiffIterable fragments1 = ByLine.compareTwoStepFair(sequences.get(1), sequences.get(0), ComparisonPolicy.DEFAULT, indicator);
FairDiffIterable fragments2 = ByLine.compareTwoStepFair(sequences.get(1), sequences.get(2), ComparisonPolicy.DEFAULT, indicator);
List<MergeLineFragment> mergeFragments = ComparisonMergeUtil.buildFair(fragments1, fragments2, indicator);
return apply(mergeFragments, outputModificationStamp);
}
catch (DiffTooBigException e) {
return applyNotification(DiffNotifications.DIFF_TOO_BIG);
}
catch (ProcessCanceledException e) {
throw e;
}
catch (Throwable e) {
LOG.error(e);
return new Runnable() {
@Override
public void run() {
clearDiffPresentation();
myPanel.setErrorContent();
}
};
}
}
@NotNull
private Runnable apply(@NotNull final List<MergeLineFragment> fragments, final long outputModificationStamp) {
return new Runnable() {
@Override
public void run() {
if (myMergeRequest.getOutputContent().getDocument().getModificationStamp() != outputModificationStamp) {
setInitialOutputContent(); // in case if anyone changed output content since init() call. (unlikely, but possible)
}
clearDiffPresentation();
resetChangeCounters();
for (MergeLineFragment fragment : fragments) {
TextMergeChange change = new TextMergeChange(fragment, TextMergeViewer.this);
myAllMergeChanges.add(change);
onChangeAdded(change);
}
myInitialScrollHelper.onRediff();
myContentPanel.repaintDividers();
myStatusPanel.update();
getEditor(ThreeSide.BASE).setViewer(false);
myInitialRediffFinished = true;
}
};
}
protected void destroyChangedBlocks() {
for (TextMergeChange change : myAllMergeChanges) {
change.destroyHighlighter();
}
myAllMergeChanges.clear();
}
//
// Impl
//
@Override
@CalledInAwt
protected void onBeforeDocumentChange(@NotNull DocumentEvent e) {
super.onBeforeDocumentChange(e);
enterBulkChangeUpdateBlock();
if (myAllMergeChanges.isEmpty()) return;
ThreeSide side = null;
if (e.getDocument() == getEditor(ThreeSide.LEFT).getDocument()) side = ThreeSide.LEFT;
if (e.getDocument() == getEditor(ThreeSide.RIGHT).getDocument()) side = ThreeSide.RIGHT;
if (e.getDocument() == getEditor(ThreeSide.BASE).getDocument()) side = ThreeSide.BASE;
if (side == null) {
LOG.warn("Unknown document changed");
return;
}
if (side != ThreeSide.BASE) {
LOG.error("Non-base side was changed"); // unsupported operation
return;
}
if (myInitialRediffFinished) myContentModified = true;
int line1 = e.getDocument().getLineNumber(e.getOffset());
int line2 = e.getDocument().getLineNumber(e.getOffset() + e.getOldLength()) + 1;
int shift = DiffUtil.countLinesShift(e);
final List<Pair<TextMergeChange, TextMergeChange.State>> corruptedStates = ContainerUtil.newArrayList();
for (TextMergeChange change : myAllMergeChanges) {
TextMergeChange.State oldState = change.processBaseChange(line1, line2, shift);
if (oldState != null) {
if (myCurrentMergeCommand == null) {
corruptedStates.add(Pair.create(change, oldState));
}
reinstallHighlighter(change); // document state is not updated yet - can't reinstall range here
}
}
if (!corruptedStates.isEmpty() && myUndoManager != null) {
// document undo is registered inside onDocumentChange, so our undo() will be called after its undo().
// thus thus we can avoid checks for isUndoInProgress() (to avoid modification of the same TextMergeChange by this listener)
myUndoManager.undoableActionPerformed(new BasicUndoableAction(getEditor(ThreeSide.BASE).getDocument()) {
@Override
public void undo() throws UnexpectedUndoException {
enterBulkChangeUpdateBlock();
for (Pair<TextMergeChange, TextMergeChange.State> pair : corruptedStates) {
restoreChangeState(pair.first, pair.second);
}
exitBulkChangeUpdateBlock();
}
@Override
public void redo() throws UnexpectedUndoException {
}
});
}
}
@Override
protected void onDocumentChange(@NotNull DocumentEvent e) {
super.onDocumentChange(e);
exitBulkChangeUpdateBlock();
}
public void repaintDividers() {
myContentPanel.repaintDividers();
}
@CalledInAwt
public void reinstallHighlighter(@NotNull TextMergeChange change) {
if (myBulkChangeUpdateDepth > 0) {
myChangesToUpdate.add(change);
}
else {
change.doReinstallHighlighter();
}
}
@CalledInAwt
public void enterBulkChangeUpdateBlock() {
myBulkChangeUpdateDepth++;
}
@CalledInAwt
public void exitBulkChangeUpdateBlock() {
myBulkChangeUpdateDepth--;
LOG.assertTrue(myBulkChangeUpdateDepth >= 0);
if (myBulkChangeUpdateDepth == 0) {
for (TextMergeChange change : myChangesToUpdate) {
change.doReinstallHighlighter();
}
myChangesToUpdate.clear();
}
}
private void onChangeResolved(@NotNull TextMergeChange change) {
if (change.isResolved()) {
onChangeRemoved(change);
}
else {
onChangeAdded(change);
}
if (getChangesCount() == 0 && getConflictsCount() == 0) {
LOG.assertTrue(getFirstUnresolvedChange(true, null) == null);
ApplicationManager.getApplication().invokeLater(new Runnable() {
@Override
public void run() {
String message = "All changes have been processed.<br><a href=\"\">Save changes and finish merging</a>";
HyperlinkListener listener = new HyperlinkAdapter() {
@Override
protected void hyperlinkActivated(HyperlinkEvent e) {
destroyChangedBlocks();
myMergeContext.finishMerge(MergeResult.RESOLVED);
}
};
JComponent component = getEditor(ThreeSide.BASE).getComponent();
Point point = new Point(component.getWidth() / 2, JBUI.scale(5));
Color bgColor = MessageType.INFO.getPopupBackground();
BalloonBuilder balloonBuilder = JBPopupFactory.getInstance().createHtmlTextBalloonBuilder(message, null, bgColor, listener)
.setAnimationCycle(200);
Balloon balloon = balloonBuilder.createBalloon();
balloon.show(new RelativePoint(component, point), Balloon.Position.below);
Disposer.register(MyThreesideViewer.this, balloon);
}
});
}
}
//
// Getters
//
@NotNull
public List<TextMergeChange> getAllChanges() {
return myAllMergeChanges;
}
@NotNull
public List<TextMergeChange> getChanges() {
return ContainerUtil.filter(myAllMergeChanges, new Condition<TextMergeChange>() {
@Override
public boolean value(TextMergeChange mergeChange) {
return !mergeChange.isResolved();
}
});
}
@NotNull
@Override
protected DiffDividerDrawUtil.DividerPaintable getDividerPaintable(@NotNull Side side) {
return new MyDividerPaintable(side);
}
@NotNull
public ModifierProvider getModifierProvider() {
return myModifierProvider;
}
@Nullable
private TextMergeChange getFirstUnresolvedChange(boolean acceptConflicts, @Nullable Side side) {
for (TextMergeChange change : getAllChanges()) {
if (change.isResolved()) continue;
if (!acceptConflicts && change.isConflict()) continue;
if (side != null && !change.isChange(side)) continue;
return change;
}
return null;
}
//
// Modification operations
//
private void restoreChangeState(@NotNull TextMergeChange change, @NotNull TextMergeChange.State state) {
boolean wasResolved = change.isResolved();
change.restoreState(state);
reinstallHighlighter(change);
if (wasResolved != change.isResolved()) onChangeResolved(change);
}
private abstract class MergeCommandAction extends DiffUtil.DiffCommandAction {
@Nullable private final List<TextMergeChange> myAffectedChanges;
public MergeCommandAction(@Nullable Project project,
@Nullable String commandName,
boolean underBulkUpdate,
@Nullable List<TextMergeChange> changes) {
this(project, commandName, null, UndoConfirmationPolicy.DEFAULT, underBulkUpdate, changes);
}
public MergeCommandAction(@Nullable Project project,
@Nullable String commandName,
@Nullable String commandGroupId,
@NotNull UndoConfirmationPolicy confirmationPolicy,
boolean underBulkUpdate,
@Nullable List<TextMergeChange> changes) {
super(project, getEditor(ThreeSide.BASE).getDocument(), commandName, commandGroupId, confirmationPolicy, underBulkUpdate);
myAffectedChanges = collectAffectedChanges(changes);
}
@Override
@CalledWithWriteLock
protected final void execute() {
LOG.assertTrue(myCurrentMergeCommand == null);
myContentModified = true;
// We should restore states after changes in document (by DocumentUndoProvider) to avoid corruption by our onBeforeDocumentChange()
// Undo actions are performed in backward order, while redo actions are performed in forward order.
// Thus we should register two UndoableActions.
myCurrentMergeCommand = this;
registerUndoRedo(true);
enterBulkChangeUpdateBlock();
try {
doExecute();
}
finally {
exitBulkChangeUpdateBlock();
registerUndoRedo(false);
myCurrentMergeCommand = null;
}
}
private void registerUndoRedo(final boolean undo) {
if (myUndoManager == null) return;
List<TextMergeChange> affectedChanges = getAffectedChanges();
final List<TextMergeChange.State> states = new ArrayList<TextMergeChange.State>(affectedChanges.size());
for (TextMergeChange change : affectedChanges) {
states.add(change.storeState());
}
myUndoManager.undoableActionPerformed(new BasicUndoableAction(myDocument) {
@Override
public void undo() throws UnexpectedUndoException {
if (undo) restoreStates(states);
}
@Override
public void redo() throws UnexpectedUndoException {
if (!undo) restoreStates(states);
}
});
}
private void restoreStates(@NotNull List<TextMergeChange.State> states) {
List<TextMergeChange> affectedChanges = getAffectedChanges();
enterBulkChangeUpdateBlock();
for (int i = 0; i < affectedChanges.size(); i++) {
restoreChangeState(affectedChanges.get(i), states.get(i));
}
exitBulkChangeUpdateBlock();
}
@NotNull
private List<TextMergeChange> getAffectedChanges() {
return myAffectedChanges != null ? myAffectedChanges : myAllMergeChanges;
}
@CalledWithWriteLock
protected abstract void doExecute();
}
/*
* affected changes should be sorted
*/
public void executeMergeCommand(@Nullable String commandName,
boolean underBulkUpdate,
@Nullable List<TextMergeChange> affected,
@NotNull final Runnable task) {
new MergeCommandAction(getProject(), commandName, underBulkUpdate, affected) {
@Override
protected void doExecute() {
task.run();
}
}.run();
}
public void executeMergeCommand(@Nullable String commandName,
@Nullable List<TextMergeChange> affected,
@NotNull Runnable task) {
executeMergeCommand(commandName, false, affected, task);
}
@CalledInAwt
public void markChangeResolved(@NotNull TextMergeChange change) {
if (change.isResolved()) return;
change.setResolved(Side.LEFT, true);
change.setResolved(Side.RIGHT, true);
onChangeResolved(change);
reinstallHighlighter(change);
}
@CalledInAwt
public void markChangeResolved(@NotNull TextMergeChange change, @NotNull Side side) {
if (change.isResolved(side)) return;
change.setResolved(side, true);
if (change.isResolved()) onChangeResolved(change);
reinstallHighlighter(change);
}
public void ignoreChange(@NotNull TextMergeChange change, @NotNull Side side, boolean modifier) {
if (!change.isConflict() || modifier) {
markChangeResolved(change);
}
else {
markChangeResolved(change, side);
}
}
@CalledWithWriteLock
public void replaceChange(@NotNull TextMergeChange change, @NotNull Side side, boolean modifier) {
LOG.assertTrue(myCurrentMergeCommand != null);
if (change.isResolved(side)) return;
if (!change.isChange(side)) {
markChangeResolved(change);
return;
}
ThreeSide sourceSide = side.select(ThreeSide.LEFT, ThreeSide.RIGHT);
ThreeSide oppositeSide = side.select(ThreeSide.RIGHT, ThreeSide.LEFT);
ThreeSide outputSide = ThreeSide.BASE;
int outputStartLine = change.getStartLine(outputSide);
int outputEndLine = change.getEndLine(outputSide);
int sourceStartLine = change.getStartLine(sourceSide);
int sourceEndLine = change.getEndLine(sourceSide);
enterBulkChangeUpdateBlock();
try {
if (change.isConflict()) {
boolean append = change.isOnesideAppliedConflict();
int actualOutputStartLine = append ? outputEndLine : outputStartLine;
DiffUtil.applyModification(getContent(outputSide).getDocument(), actualOutputStartLine, outputEndLine,
getContent(sourceSide).getDocument(), sourceStartLine, sourceEndLine);
if (outputStartLine == outputEndLine || append) { // onBeforeDocumentChange() should process other cases correctly
int newOutputEndLine = actualOutputStartLine + (sourceEndLine - sourceStartLine);
moveChangesAfterInsertion(change, outputStartLine, newOutputEndLine);
}
if (modifier || change.getStartLine(oppositeSide) == change.getEndLine(oppositeSide)) {
markChangeResolved(change);
} else {
change.markOnesideAppliedConflict();
markChangeResolved(change, side);
}
}
else {
DiffUtil.applyModification(getContent(outputSide).getDocument(), outputStartLine, outputEndLine,
getContent(sourceSide).getDocument(), sourceStartLine, sourceEndLine);
if (outputStartLine == outputEndLine) { // onBeforeDocumentChange() should process other cases correctly
int newOutputEndLine = outputStartLine + (sourceEndLine - sourceStartLine);
moveChangesAfterInsertion(change, outputStartLine, newOutputEndLine);
}
markChangeResolved(change);
}
}
finally {
exitBulkChangeUpdateBlock();
}
}
/*
* We want to include inserted block into change, so we are updating endLine(BASE).
*
* It could break order of changes if there are other changes that starts/ends at this line.
* So we should check all other changes and shift them if necessary.
*/
private void moveChangesAfterInsertion(@NotNull TextMergeChange change,
int newOutputStartLine,
int newOutputEndLine) {
LOG.assertTrue(myCurrentMergeCommand != null);
if (change.getStartLine(ThreeSide.BASE) != newOutputStartLine ||
change.getEndLine(ThreeSide.BASE) != newOutputEndLine) {
change.setStartLine(ThreeSide.BASE, newOutputStartLine);
change.setEndLine(ThreeSide.BASE, newOutputEndLine);
reinstallHighlighter(change);
}
boolean beforeChange = true;
for (TextMergeChange otherChange : getAllChanges()) {
int startLine = otherChange.getStartLine(ThreeSide.BASE);
int endLine = otherChange.getEndLine(ThreeSide.BASE);
if (endLine < newOutputStartLine) continue;
if (startLine > newOutputEndLine) break;
if (otherChange == change) {
beforeChange = false;
continue;
}
int newStartLine = beforeChange ? Math.min(startLine, newOutputStartLine) : Math.max(startLine, newOutputEndLine);
int newEndLine = beforeChange ? Math.min(endLine, newOutputStartLine) : Math.max(endLine, newOutputEndLine);
if (startLine != newStartLine || endLine != newEndLine) {
otherChange.setStartLine(ThreeSide.BASE, newStartLine);
otherChange.setEndLine(ThreeSide.BASE, newEndLine);
reinstallHighlighter(otherChange);
}
}
}
/*
* Nearby changes could be affected as well (ex: by moveChangesAfterInsertion)
*
* null means all changes could be affected
*/
@Nullable
private List<TextMergeChange> collectAffectedChanges(@Nullable List<TextMergeChange> directChanges) {
if (directChanges == null || directChanges.isEmpty()) return null;
List<TextMergeChange> result = new ArrayList<TextMergeChange>(directChanges.size());
int directIndex = 0;
int otherIndex = 0;
while (directIndex < directChanges.size() && otherIndex < myAllMergeChanges.size()) {
TextMergeChange directChange = directChanges.get(directIndex);
TextMergeChange otherChange = myAllMergeChanges.get(otherIndex);
if (directChange == otherChange) {
result.add(directChange);
otherIndex++;
continue;
}
int directStart = directChange.getStartLine(ThreeSide.BASE);
int directEnd = directChange.getEndLine(ThreeSide.BASE);
int otherStart = otherChange.getStartLine(ThreeSide.BASE);
int otherEnd = otherChange.getEndLine(ThreeSide.BASE);
if (otherEnd < directStart) {
otherIndex++;
continue;
}
if (otherStart > directEnd) {
directIndex++;
continue;
}
result.add(otherChange);
otherIndex++;
}
LOG.assertTrue(directChanges.size() <= result.size());
return result;
}
//
// Actions
//
private abstract class ApplySelectedChangesActionBase extends AnAction implements DumbAware {
private final boolean myShortcut;
public ApplySelectedChangesActionBase(boolean shortcut) {
myShortcut = shortcut;
}
@Override
public void update(@NotNull AnActionEvent e) {
if (myShortcut) {
// consume shortcut even if there are nothing to do - avoid calling some other action
e.getPresentation().setEnabledAndVisible(true);
return;
}
Presentation presentation = e.getPresentation();
Editor editor = e.getData(CommonDataKeys.EDITOR);
ThreeSide side = getEditorSide(editor);
if (side == null) {
presentation.setEnabledAndVisible(false);
return;
}
if (!isVisible(side)) {
presentation.setEnabledAndVisible(false);
return;
}
presentation.setVisible(true);
presentation.setEnabled(isSomeChangeSelected(side));
}
@Override
public void actionPerformed(@NotNull final AnActionEvent e) {
Editor editor = e.getData(CommonDataKeys.EDITOR);
final ThreeSide side = getEditorSide(editor);
if (editor == null || side == null) return;
final List<TextMergeChange> selectedChanges = getSelectedChanges(side);
if (selectedChanges.isEmpty()) return;
String title = e.getPresentation().getText() + " in merge";
executeMergeCommand(title, true, selectedChanges, new Runnable() {
@Override
public void run() {
apply(side, selectedChanges);
}
});
}
private boolean isSomeChangeSelected(@NotNull ThreeSide side) {
EditorEx editor = getEditor(side);
List<Caret> carets = editor.getCaretModel().getAllCarets();
if (carets.size() != 1) return true;
Caret caret = carets.get(0);
if (caret.hasSelection()) return true;
int line = editor.getDocument().getLineNumber(editor.getExpectedCaretOffset());
List<TextMergeChange> changes = getAllChanges();
for (TextMergeChange change : changes) {
if (!isEnabled(change)) continue;
int line1 = change.getStartLine(side);
int line2 = change.getEndLine(side);
if (DiffUtil.isSelectedByLine(line, line1, line2)) return true;
}
return false;
}
@NotNull
@CalledInAwt
private List<TextMergeChange> getSelectedChanges(@NotNull ThreeSide side) {
final BitSet lines = DiffUtil.getSelectedLines(getEditor(side));
List<TextMergeChange> changes = getChanges();
List<TextMergeChange> affectedChanges = new ArrayList<TextMergeChange>();
for (TextMergeChange change : changes) {
if (!isEnabled(change)) continue;
int line1 = change.getStartLine(side);
int line2 = change.getEndLine(side);
if (DiffUtil.isSelectedByLine(lines, line1, line2)) {
affectedChanges.add(change);
}
}
return affectedChanges;
}
protected abstract boolean isVisible(@NotNull ThreeSide side);
protected abstract boolean isEnabled(@NotNull TextMergeChange change);
@CalledWithWriteLock
protected abstract void apply(@NotNull ThreeSide side, @NotNull List<TextMergeChange> changes);
}
private class IgnoreSelectedChangesAction extends ApplySelectedChangesActionBase {
@NotNull private final Side mySide;
public IgnoreSelectedChangesAction(@NotNull Side side, boolean shortcut) {
super(shortcut);
mySide = side;
EmptyAction.setupAction(this, mySide.select("Diff.IgnoreLeftSide", "Diff.IgnoreRightSide"), null);
}
@Override
protected boolean isVisible(@NotNull ThreeSide side) {
if (side == ThreeSide.BASE) return true;
return side == mySide.select(ThreeSide.LEFT, ThreeSide.RIGHT);
}
@Override
protected boolean isEnabled(@NotNull TextMergeChange change) {
return !change.isResolved(mySide);
}
@Override
protected void apply(@NotNull ThreeSide side, @NotNull List<TextMergeChange> changes) {
for (TextMergeChange change : changes) {
ignoreChange(change, mySide, false);
}
}
}
private class ApplySelectedChangesAction extends ApplySelectedChangesActionBase {
@NotNull private final Side mySide;
public ApplySelectedChangesAction(@NotNull Side side, boolean shortcut) {
super(shortcut);
mySide = side;
EmptyAction.setupAction(this, mySide.select("Diff.ApplyLeftSide", "Diff.ApplyRightSide"), null);
}
@Override
protected boolean isVisible(@NotNull ThreeSide side) {
if (side == ThreeSide.BASE) return true;
return side == mySide.select(ThreeSide.LEFT, ThreeSide.RIGHT);
}
@Override
protected boolean isEnabled(@NotNull TextMergeChange change) {
return !change.isResolved(mySide);
}
@Override
protected void apply(@NotNull ThreeSide side, @NotNull List<TextMergeChange> changes) {
for (int i = changes.size() - 1; i >= 0; i--) {
replaceChange(changes.get(i), mySide, false);
}
}
}
public abstract class ApplyNonConflictsActionBase extends DumbAwareAction {
public ApplyNonConflictsActionBase(@Nullable String text, @Nullable String description, @Nullable Icon icon) {
super(text, description, icon);
}
public void actionPerformed(AnActionEvent e) {
executeMergeCommand("Apply Non Conflicted Changes", true, null, new Runnable() {
@Override
public void run() {
doPerform();
}
});
TextMergeChange firstConflict = getFirstUnresolvedChange(true, null);
if (firstConflict != null) doScrollToChange(firstConflict, true);
}
@CalledWithWriteLock
protected abstract void doPerform();
}
public class ApplyNonConflictsAction extends ApplyNonConflictsActionBase {
public ApplyNonConflictsAction() {
super(DiffBundle.message("merge.dialog.apply.all.non.conflicting.changes.action.name"), null, AllIcons.Diff.ApplyNotConflicts);
}
@Override
protected void doPerform() {
List<TextMergeChange> allChanges = ContainerUtil.newArrayList(getAllChanges());
for (TextMergeChange change : allChanges) {
if (change.isConflict()) continue;
if (change.isResolved()) continue;
Side masterSide = change.isChange(Side.LEFT) ? Side.LEFT : Side.RIGHT;
replaceChange(change, masterSide, false);
}
}
public void update(AnActionEvent e) {
e.getPresentation().setEnabled(getFirstUnresolvedChange(false, null) != null);
}
}
public class ApplySideNonConflictsAction extends ApplyNonConflictsActionBase {
@NotNull private final Side mySide;
public ApplySideNonConflictsAction(@NotNull Side side) {
super(side.select(DiffBundle.message("merge.dialog.apply.left.non.conflicting.changes.action.name"),
DiffBundle.message("merge.dialog.apply.right.non.conflicting.changes.action.name")),
null,
side.select(AllIcons.Diff.ApplyNotConflictsLeft, AllIcons.Diff.ApplyNotConflictsRight));
mySide = side;
}
@Override
protected void doPerform() {
List<TextMergeChange> allChanges = ContainerUtil.newArrayList(getAllChanges());
for (TextMergeChange change : allChanges) {
if (change.isConflict()) continue;
if (change.isResolved(mySide)) continue;
if (!change.isChange(mySide)) continue;
replaceChange(change, mySide, false);
}
}
public void update(AnActionEvent e) {
e.getPresentation().setEnabled(getFirstUnresolvedChange(false, mySide) != null);
}
}
//
// Helpers
//
private class UndoRedoAction extends DumbAwareAction {
private final boolean myUndo;
public UndoRedoAction(boolean undo) {
myUndo = undo;
}
public void register() {
EmptyAction.setupAction(this, myUndo ? IdeActions.ACTION_UNDO : IdeActions.ACTION_REDO, myContentPanel);
}
@Override
public void update(AnActionEvent e) {
assert myUndoManager != null;
TextEditor textEditor = getTextEditor();
e.getPresentation().setEnabled(myUndo ? myUndoManager.isUndoAvailable(textEditor) : myUndoManager.isRedoAvailable(textEditor));
}
@Override
public void actionPerformed(AnActionEvent e) {
assert myUndoManager != null;
TextEditor textEditor = getTextEditor();
if (myUndo) {
myUndoManager.undo(textEditor);
}
else {
myUndoManager.redo(textEditor);
}
}
@NotNull
private TextEditor getTextEditor() {
EditorEx editor = getEditor(ThreeSide.BASE);
return TextEditorProvider.getInstance().getTextEditor(editor);
}
}
private class MyDividerPaintable implements DiffDividerDrawUtil.DividerPaintable {
@NotNull private final Side mySide;
public MyDividerPaintable(@NotNull Side side) {
mySide = side;
}
@Override
public void process(@NotNull Handler handler) {
ThreeSide left = mySide.select(ThreeSide.LEFT, ThreeSide.BASE);
ThreeSide right = mySide.select(ThreeSide.BASE, ThreeSide.RIGHT);
for (TextMergeChange mergeChange : myAllMergeChanges) {
if (!mergeChange.isChange(mySide)) continue;
Color color = mergeChange.getDiffType().getColor(getEditor(ThreeSide.BASE));
boolean isResolved = mergeChange.isResolved(mySide);
if (!handler.process(mergeChange.getStartLine(left), mergeChange.getEndLine(left),
mergeChange.getStartLine(right), mergeChange.getEndLine(right),
color, isResolved)) {
return;
}
}
}
}
public class ModifierProvider extends KeyboardModifierListener {
public void init() {
init(myPanel, TextMergeViewer.this);
}
@Override
public void onModifiersChanged() {
for (TextMergeChange change : myAllMergeChanges) {
change.updateGutterActions(false);
}
}
}
}
}
}
| |
/**
* Copyright (C) 2015 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.sesame.marketdata.scenarios;
import java.util.Map;
import java.util.NoSuchElementException;
import java.util.Set;
import org.joda.beans.Bean;
import org.joda.beans.BeanDefinition;
import org.joda.beans.ImmutableBean;
import org.joda.beans.ImmutableConstructor;
import org.joda.beans.JodaBeanUtils;
import org.joda.beans.MetaProperty;
import org.joda.beans.Property;
import org.joda.beans.PropertyDefinition;
import org.joda.beans.impl.direct.DirectFieldsBeanBuilder;
import org.joda.beans.impl.direct.DirectMetaBean;
import org.joda.beans.impl.direct.DirectMetaProperty;
import org.joda.beans.impl.direct.DirectMetaPropertyMap;
import com.google.common.collect.ImmutableSet;
import com.opengamma.financial.currency.CurrencyPair;
import com.opengamma.sesame.marketdata.FxRateId;
import com.opengamma.sesame.marketdata.MarketDataId;
import com.opengamma.util.ArgumentChecker;
import com.opengamma.util.money.Currency;
/**
* Market data filter that matches a currency pair or its inverse.
* <p>
* The {@code apply} methods return an instance of {@link CurrencyPairMatchDetails} which contains a flag
* indicating whether the pair or its inverse was matched.
*/
@BeanDefinition
public final class CurrencyPairFilter implements MarketDataFilter, ImmutableBean {
/** The currency pair matched by this filter. The inverse pair will also be matched. */
@PropertyDefinition(validate = "notNull")
private final CurrencyPair _currencyPair;
/**
* @param currencyPair the currency pair matched by this filter. The inverse pair will also be matched
*/
@ImmutableConstructor
public CurrencyPairFilter(CurrencyPair currencyPair) {
_currencyPair = ArgumentChecker.notNull(currencyPair, "currencyPair");
}
/**
* @param base the base currency of the pair matched by this filter. The inverse pair will also be matched
* @param counter the counter currency of the pair matched by this filter. The inverse pair will also be matched
*/
public CurrencyPairFilter(Currency base, Currency counter) {
this(CurrencyPair.of(base, counter));
}
@Override
public Set<? extends MatchDetails> apply(MarketDataId<?> marketDataId) {
FxRateId rateId = (FxRateId) marketDataId;
CurrencyPair currencyPair = rateId.getCurrencyPair();
if (_currencyPair.equals(currencyPair)) {
return ImmutableSet.of(new CurrencyPairMatchDetails(false));
} else if (_currencyPair.equals(currencyPair.inverse())) {
return ImmutableSet.of(new CurrencyPairMatchDetails(true));
} else {
return ImmutableSet.of();
}
}
@Override
public Set<? extends MatchDetails> apply(MarketDataId<?> marketDataId, Object marketData) {
return apply(marketDataId);
}
@Override
public Class<?> getMarketDataType() {
return Double.class;
}
@Override
public Class<? extends MarketDataId<?>> getMarketDataIdType() {
return FxRateId.class;
}
//------------------------- AUTOGENERATED START -------------------------
///CLOVER:OFF
/**
* The meta-bean for {@code CurrencyPairFilter}.
* @return the meta-bean, not null
*/
public static CurrencyPairFilter.Meta meta() {
return CurrencyPairFilter.Meta.INSTANCE;
}
static {
JodaBeanUtils.registerMetaBean(CurrencyPairFilter.Meta.INSTANCE);
}
/**
* Returns a builder used to create an instance of the bean.
* @return the builder, not null
*/
public static CurrencyPairFilter.Builder builder() {
return new CurrencyPairFilter.Builder();
}
@Override
public CurrencyPairFilter.Meta metaBean() {
return CurrencyPairFilter.Meta.INSTANCE;
}
@Override
public <R> Property<R> property(String propertyName) {
return metaBean().<R>metaProperty(propertyName).createProperty(this);
}
@Override
public Set<String> propertyNames() {
return metaBean().metaPropertyMap().keySet();
}
//-----------------------------------------------------------------------
/**
* Gets the currency pair matched by this filter. The inverse pair will also be matched.
* @return the value of the property, not null
*/
public CurrencyPair getCurrencyPair() {
return _currencyPair;
}
//-----------------------------------------------------------------------
/**
* Returns a builder that allows this bean to be mutated.
* @return the mutable builder, not null
*/
public Builder toBuilder() {
return new Builder(this);
}
@Override
public boolean equals(Object obj) {
if (obj == this) {
return true;
}
if (obj != null && obj.getClass() == this.getClass()) {
CurrencyPairFilter other = (CurrencyPairFilter) obj;
return JodaBeanUtils.equal(getCurrencyPair(), other.getCurrencyPair());
}
return false;
}
@Override
public int hashCode() {
int hash = getClass().hashCode();
hash = hash * 31 + JodaBeanUtils.hashCode(getCurrencyPair());
return hash;
}
@Override
public String toString() {
StringBuilder buf = new StringBuilder(64);
buf.append("CurrencyPairFilter{");
buf.append("currencyPair").append('=').append(JodaBeanUtils.toString(getCurrencyPair()));
buf.append('}');
return buf.toString();
}
//-----------------------------------------------------------------------
/**
* The meta-bean for {@code CurrencyPairFilter}.
*/
public static final class Meta extends DirectMetaBean {
/**
* The singleton instance of the meta-bean.
*/
static final Meta INSTANCE = new Meta();
/**
* The meta-property for the {@code currencyPair} property.
*/
private final MetaProperty<CurrencyPair> _currencyPair = DirectMetaProperty.ofImmutable(
this, "currencyPair", CurrencyPairFilter.class, CurrencyPair.class);
/**
* The meta-properties.
*/
private final Map<String, MetaProperty<?>> _metaPropertyMap$ = new DirectMetaPropertyMap(
this, null,
"currencyPair");
/**
* Restricted constructor.
*/
private Meta() {
}
@Override
protected MetaProperty<?> metaPropertyGet(String propertyName) {
switch (propertyName.hashCode()) {
case 1005147787: // currencyPair
return _currencyPair;
}
return super.metaPropertyGet(propertyName);
}
@Override
public CurrencyPairFilter.Builder builder() {
return new CurrencyPairFilter.Builder();
}
@Override
public Class<? extends CurrencyPairFilter> beanType() {
return CurrencyPairFilter.class;
}
@Override
public Map<String, MetaProperty<?>> metaPropertyMap() {
return _metaPropertyMap$;
}
//-----------------------------------------------------------------------
/**
* The meta-property for the {@code currencyPair} property.
* @return the meta-property, not null
*/
public MetaProperty<CurrencyPair> currencyPair() {
return _currencyPair;
}
//-----------------------------------------------------------------------
@Override
protected Object propertyGet(Bean bean, String propertyName, boolean quiet) {
switch (propertyName.hashCode()) {
case 1005147787: // currencyPair
return ((CurrencyPairFilter) bean).getCurrencyPair();
}
return super.propertyGet(bean, propertyName, quiet);
}
@Override
protected void propertySet(Bean bean, String propertyName, Object newValue, boolean quiet) {
metaProperty(propertyName);
if (quiet) {
return;
}
throw new UnsupportedOperationException("Property cannot be written: " + propertyName);
}
}
//-----------------------------------------------------------------------
/**
* The bean-builder for {@code CurrencyPairFilter}.
*/
public static final class Builder extends DirectFieldsBeanBuilder<CurrencyPairFilter> {
private CurrencyPair _currencyPair;
/**
* Restricted constructor.
*/
private Builder() {
}
/**
* Restricted copy constructor.
* @param beanToCopy the bean to copy from, not null
*/
private Builder(CurrencyPairFilter beanToCopy) {
this._currencyPair = beanToCopy.getCurrencyPair();
}
//-----------------------------------------------------------------------
@Override
public Object get(String propertyName) {
switch (propertyName.hashCode()) {
case 1005147787: // currencyPair
return _currencyPair;
default:
throw new NoSuchElementException("Unknown property: " + propertyName);
}
}
@Override
public Builder set(String propertyName, Object newValue) {
switch (propertyName.hashCode()) {
case 1005147787: // currencyPair
this._currencyPair = (CurrencyPair) newValue;
break;
default:
throw new NoSuchElementException("Unknown property: " + propertyName);
}
return this;
}
@Override
public Builder set(MetaProperty<?> property, Object value) {
super.set(property, value);
return this;
}
@Override
public Builder setString(String propertyName, String value) {
setString(meta().metaProperty(propertyName), value);
return this;
}
@Override
public Builder setString(MetaProperty<?> property, String value) {
super.setString(property, value);
return this;
}
@Override
public Builder setAll(Map<String, ? extends Object> propertyValueMap) {
super.setAll(propertyValueMap);
return this;
}
@Override
public CurrencyPairFilter build() {
return new CurrencyPairFilter(
_currencyPair);
}
//-----------------------------------------------------------------------
/**
* Sets the {@code currencyPair} property in the builder.
* @param currencyPair the new value, not null
* @return this, for chaining, not null
*/
public Builder currencyPair(CurrencyPair currencyPair) {
JodaBeanUtils.notNull(currencyPair, "currencyPair");
this._currencyPair = currencyPair;
return this;
}
//-----------------------------------------------------------------------
@Override
public String toString() {
StringBuilder buf = new StringBuilder(64);
buf.append("CurrencyPairFilter.Builder{");
buf.append("currencyPair").append('=').append(JodaBeanUtils.toString(_currencyPair));
buf.append('}');
return buf.toString();
}
}
///CLOVER:ON
//-------------------------- AUTOGENERATED END --------------------------
}
| |
package org.keycloak.models.utils;
import org.keycloak.models.ClientModel;
import org.keycloak.models.UserConsentModel;
import org.keycloak.models.RoleModel;
import org.keycloak.models.UserCredentialModel;
import org.keycloak.models.UserCredentialValueModel;
import org.keycloak.models.UserModel;
import java.util.List;
import java.util.Map;
import java.util.Set;
/**
* @author <a href="mailto:bill@burkecentral.com">Bill Burke</a>
* @version $Revision: 1 $
*/
public class UserModelDelegate implements UserModel {
protected UserModel delegate;
public UserModelDelegate(UserModel delegate) {
this.delegate = delegate;
}
@Override
public String getId() {
return delegate.getId();
}
@Override
public String getUsername() {
return delegate.getUsername();
}
@Override
public void setUsername(String username) {
delegate.setUsername(username);
}
@Override
public boolean isEnabled() {
return delegate.isEnabled();
}
@Override
public boolean isOtpEnabled() {
return delegate.isOtpEnabled();
}
@Override
public void setEnabled(boolean enabled) {
delegate.setEnabled(enabled);
}
@Override
public void setSingleAttribute(String name, String value) {
delegate.setSingleAttribute(name, value);
}
@Override
public void setAttribute(String name, List<String> values) {
delegate.setAttribute(name, values);
}
@Override
public void removeAttribute(String name) {
delegate.removeAttribute(name);
}
@Override
public String getFirstAttribute(String name) {
return delegate.getFirstAttribute(name);
}
@Override
public List<String> getAttribute(String name) {
return delegate.getAttribute(name);
}
@Override
public Map<String, List<String>> getAttributes() {
return delegate.getAttributes();
}
@Override
public Set<String> getRequiredActions() {
return delegate.getRequiredActions();
}
@Override
public void addRequiredAction(String action) {
delegate.addRequiredAction(action);
}
@Override
public void removeRequiredAction(String action) {
delegate.removeRequiredAction(action);
}
@Override
public void addRequiredAction(RequiredAction action) {
delegate.addRequiredAction(action);
}
@Override
public void removeRequiredAction(RequiredAction action) {
delegate.removeRequiredAction(action);
}
@Override
public String getFirstName() {
return delegate.getFirstName();
}
@Override
public void setFirstName(String firstName) {
delegate.setFirstName(firstName);
}
@Override
public String getLastName() {
return delegate.getLastName();
}
@Override
public void setLastName(String lastName) {
delegate.setLastName(lastName);
}
@Override
public String getEmail() {
return delegate.getEmail();
}
@Override
public void setEmail(String email) {
delegate.setEmail(email);
}
@Override
public boolean isEmailVerified() {
return delegate.isEmailVerified();
}
@Override
public void setEmailVerified(boolean verified) {
delegate.setEmailVerified(verified);
}
@Override
public void setOtpEnabled(boolean totp) {
delegate.setOtpEnabled(totp);
}
@Override
public void updateCredential(UserCredentialModel cred) {
delegate.updateCredential(cred);
}
@Override
public List<UserCredentialValueModel> getCredentialsDirectly() {
return delegate.getCredentialsDirectly();
}
@Override
public void updateCredentialDirectly(UserCredentialValueModel cred) {
delegate.updateCredentialDirectly(cred);
}
@Override
public Set<RoleModel> getRealmRoleMappings() {
return delegate.getRealmRoleMappings();
}
@Override
public Set<RoleModel> getClientRoleMappings(ClientModel app) {
return delegate.getClientRoleMappings(app);
}
@Override
public boolean hasRole(RoleModel role) {
return delegate.hasRole(role);
}
@Override
public void grantRole(RoleModel role) {
delegate.grantRole(role);
}
@Override
public Set<RoleModel> getRoleMappings() {
return delegate.getRoleMappings();
}
@Override
public void deleteRoleMapping(RoleModel role) {
delegate.deleteRoleMapping(role);
}
@Override
public String getFederationLink() {
return delegate.getFederationLink();
}
@Override
public void setFederationLink(String link) {
delegate.setFederationLink(link);
}
@Override
public String getServiceAccountClientLink() {
return delegate.getServiceAccountClientLink();
}
@Override
public void setServiceAccountClientLink(String clientInternalId) {
delegate.setServiceAccountClientLink(clientInternalId);
}
@Override
public void addConsent(UserConsentModel consent) {
delegate.addConsent(consent);
}
@Override
public UserConsentModel getConsentByClient(String clientId) {
return delegate.getConsentByClient(clientId);
}
@Override
public List<UserConsentModel> getConsents() {
return delegate.getConsents();
}
@Override
public void updateConsent(UserConsentModel consent) {
delegate.updateConsent(consent);
}
@Override
public boolean revokeConsentForClient(String clientId) {
return delegate.revokeConsentForClient(clientId);
}
public UserModel getDelegate() {
return delegate;
}
@Override
public Long getCreatedTimestamp(){
return delegate.getCreatedTimestamp();
}
@Override
public void setCreatedTimestamp(Long timestamp){
delegate.setCreatedTimestamp(timestamp);
}
}
| |
/*
* Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.ec2.model;
import java.io.Serializable;
import javax.annotation.Generated;
/**
* <p>
* Describes a block device mapping.
* </p>
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/ec2-2016-11-15/LaunchTemplateBlockDeviceMapping"
* target="_top">AWS API Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class LaunchTemplateBlockDeviceMapping implements Serializable, Cloneable {
/**
* <p>
* The device name.
* </p>
*/
private String deviceName;
/**
* <p>
* The virtual device name (ephemeralN).
* </p>
*/
private String virtualName;
/**
* <p>
* Information about the block device for an EBS volume.
* </p>
*/
private LaunchTemplateEbsBlockDevice ebs;
/**
* <p>
* Suppresses the specified device included in the block device mapping of the AMI.
* </p>
*/
private String noDevice;
/**
* <p>
* The device name.
* </p>
*
* @param deviceName
* The device name.
*/
public void setDeviceName(String deviceName) {
this.deviceName = deviceName;
}
/**
* <p>
* The device name.
* </p>
*
* @return The device name.
*/
public String getDeviceName() {
return this.deviceName;
}
/**
* <p>
* The device name.
* </p>
*
* @param deviceName
* The device name.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public LaunchTemplateBlockDeviceMapping withDeviceName(String deviceName) {
setDeviceName(deviceName);
return this;
}
/**
* <p>
* The virtual device name (ephemeralN).
* </p>
*
* @param virtualName
* The virtual device name (ephemeralN).
*/
public void setVirtualName(String virtualName) {
this.virtualName = virtualName;
}
/**
* <p>
* The virtual device name (ephemeralN).
* </p>
*
* @return The virtual device name (ephemeralN).
*/
public String getVirtualName() {
return this.virtualName;
}
/**
* <p>
* The virtual device name (ephemeralN).
* </p>
*
* @param virtualName
* The virtual device name (ephemeralN).
* @return Returns a reference to this object so that method calls can be chained together.
*/
public LaunchTemplateBlockDeviceMapping withVirtualName(String virtualName) {
setVirtualName(virtualName);
return this;
}
/**
* <p>
* Information about the block device for an EBS volume.
* </p>
*
* @param ebs
* Information about the block device for an EBS volume.
*/
public void setEbs(LaunchTemplateEbsBlockDevice ebs) {
this.ebs = ebs;
}
/**
* <p>
* Information about the block device for an EBS volume.
* </p>
*
* @return Information about the block device for an EBS volume.
*/
public LaunchTemplateEbsBlockDevice getEbs() {
return this.ebs;
}
/**
* <p>
* Information about the block device for an EBS volume.
* </p>
*
* @param ebs
* Information about the block device for an EBS volume.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public LaunchTemplateBlockDeviceMapping withEbs(LaunchTemplateEbsBlockDevice ebs) {
setEbs(ebs);
return this;
}
/**
* <p>
* Suppresses the specified device included in the block device mapping of the AMI.
* </p>
*
* @param noDevice
* Suppresses the specified device included in the block device mapping of the AMI.
*/
public void setNoDevice(String noDevice) {
this.noDevice = noDevice;
}
/**
* <p>
* Suppresses the specified device included in the block device mapping of the AMI.
* </p>
*
* @return Suppresses the specified device included in the block device mapping of the AMI.
*/
public String getNoDevice() {
return this.noDevice;
}
/**
* <p>
* Suppresses the specified device included in the block device mapping of the AMI.
* </p>
*
* @param noDevice
* Suppresses the specified device included in the block device mapping of the AMI.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public LaunchTemplateBlockDeviceMapping withNoDevice(String noDevice) {
setNoDevice(noDevice);
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getDeviceName() != null)
sb.append("DeviceName: ").append(getDeviceName()).append(",");
if (getVirtualName() != null)
sb.append("VirtualName: ").append(getVirtualName()).append(",");
if (getEbs() != null)
sb.append("Ebs: ").append(getEbs()).append(",");
if (getNoDevice() != null)
sb.append("NoDevice: ").append(getNoDevice());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof LaunchTemplateBlockDeviceMapping == false)
return false;
LaunchTemplateBlockDeviceMapping other = (LaunchTemplateBlockDeviceMapping) obj;
if (other.getDeviceName() == null ^ this.getDeviceName() == null)
return false;
if (other.getDeviceName() != null && other.getDeviceName().equals(this.getDeviceName()) == false)
return false;
if (other.getVirtualName() == null ^ this.getVirtualName() == null)
return false;
if (other.getVirtualName() != null && other.getVirtualName().equals(this.getVirtualName()) == false)
return false;
if (other.getEbs() == null ^ this.getEbs() == null)
return false;
if (other.getEbs() != null && other.getEbs().equals(this.getEbs()) == false)
return false;
if (other.getNoDevice() == null ^ this.getNoDevice() == null)
return false;
if (other.getNoDevice() != null && other.getNoDevice().equals(this.getNoDevice()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getDeviceName() == null) ? 0 : getDeviceName().hashCode());
hashCode = prime * hashCode + ((getVirtualName() == null) ? 0 : getVirtualName().hashCode());
hashCode = prime * hashCode + ((getEbs() == null) ? 0 : getEbs().hashCode());
hashCode = prime * hashCode + ((getNoDevice() == null) ? 0 : getNoDevice().hashCode());
return hashCode;
}
@Override
public LaunchTemplateBlockDeviceMapping clone() {
try {
return (LaunchTemplateBlockDeviceMapping) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e);
}
}
}
| |
/* ====================================================================
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==================================================================== */
package org.apache.poi.hpsf.basic;
import java.io.ByteArrayOutputStream;
import java.io.EOFException;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.util.ArrayList;
import java.util.Collections;
import java.util.LinkedList;
import java.util.List;
import java.util.Properties;
import org.apache.poi.hpsf.PropertySet;
import org.apache.poi.poifs.eventfilesystem.POIFSReader;
import org.apache.poi.poifs.eventfilesystem.POIFSReaderEvent;
import org.apache.poi.poifs.eventfilesystem.POIFSReaderListener;
/**
* <p>Static utility methods needed by the HPSF test cases.</p>
*/
final class Util {
/**
* <p>Reads bytes from an input stream and writes them to an
* output stream until end of file is encountered.</p>
*
* @param in the input stream to read from
*
* @param out the output stream to write to
*
* @exception IOException if an I/O exception occurs
*/
public static void copy(final InputStream in, final OutputStream out)
throws IOException
{
final int BUF_SIZE = 1000;
byte[] b = new byte[BUF_SIZE];
int read;
boolean eof = false;
while (!eof)
{
try
{
read = in.read(b, 0, BUF_SIZE);
if (read > 0)
out.write(b, 0, read);
else
eof = true;
}
catch (EOFException ex)
{
eof = true;
}
}
}
/**
* <p>Reads all files from a POI filesystem and returns them as an
* array of {@link POIFile} instances. This method loads all files
* into memory and thus does not cope well with large POI
* filessystems.</p>
*
* @param poiFs The name of the POI filesystem as seen by the
* operating system. (This is the "filename".)
*
* @return The POI files. The elements are ordered in the same way
* as the files in the POI filesystem.
*
* @exception FileNotFoundException if the file containing the POI
* filesystem does not exist
*
* @exception IOException if an I/O exception occurs
*/
public static POIFile[] readPOIFiles(final File poiFs)
throws FileNotFoundException, IOException
{
return readPOIFiles(poiFs, null);
}
/**
* <p>Reads a set of files from a POI filesystem and returns them
* as an array of {@link POIFile} instances. This method loads all
* files into memory and thus does not cope well with large POI
* filessystems.</p>
*
* @param poiFs The name of the POI filesystem as seen by the
* operating system. (This is the "filename".)
*
* @param poiFiles The names of the POI files to be read.
*
* @return The POI files. The elements are ordered in the same way
* as the files in the POI filesystem.
*
* @exception FileNotFoundException if the file containing the POI
* filesystem does not exist
*
* @exception IOException if an I/O exception occurs
*/
public static POIFile[] readPOIFiles(final File poiFs,
final String[] poiFiles)
throws FileNotFoundException, IOException
{
final List<POIFile> files = new ArrayList<POIFile>();
POIFSReader r = new POIFSReader();
POIFSReaderListener pfl = new POIFSReaderListener()
{
@Override
public void processPOIFSReaderEvent(final POIFSReaderEvent event)
{
try
{
final POIFile f = new POIFile();
f.setName(event.getName());
f.setPath(event.getPath());
final InputStream in = event.getStream();
final ByteArrayOutputStream out =
new ByteArrayOutputStream();
Util.copy(in, out);
out.close();
f.setBytes(out.toByteArray());
files.add(f);
}
catch (IOException ex)
{
throw new RuntimeException(ex);
}
}
};
if (poiFiles == null)
/* Register the listener for all POI files. */
r.registerListener(pfl);
else
for (String poiFile : poiFiles)
r.registerListener(pfl, poiFile);
/* Read the POI filesystem. */
FileInputStream stream = new FileInputStream(poiFs);
try {
r.read(stream);
} finally {
stream.close();
}
POIFile[] result = new POIFile[files.size()];
for (int i = 0; i < result.length; i++)
result[i] = files.get(i);
return result;
}
/**
* <p>Read all files from a POI filesystem which are property set streams
* and returns them as an array of {@link org.apache.poi.hpsf.PropertySet}
* instances.</p>
*
* @param poiFs The name of the POI filesystem as seen by the
* operating system. (This is the "filename".)
*
* @return The property sets. The elements are ordered in the same way
* as the files in the POI filesystem.
*
* @exception FileNotFoundException if the file containing the POI
* filesystem does not exist
*
* @exception IOException if an I/O exception occurs
*/
public static POIFile[] readPropertySets(final File poiFs)
throws FileNotFoundException, IOException
{
final List<POIFile> files = new ArrayList<POIFile>(7);
final POIFSReader r = new POIFSReader();
POIFSReaderListener pfl = new POIFSReaderListener()
{
@Override
public void processPOIFSReaderEvent(final POIFSReaderEvent event)
{
try
{
final POIFile f = new POIFile();
f.setName(event.getName());
f.setPath(event.getPath());
final InputStream in = event.getStream();
if (PropertySet.isPropertySetStream(in))
{
final ByteArrayOutputStream out =
new ByteArrayOutputStream();
Util.copy(in, out);
out.close();
f.setBytes(out.toByteArray());
files.add(f);
}
}
catch (Exception ex)
{
throw new RuntimeException(ex);
}
}
};
/* Register the listener for all POI files. */
r.registerListener(pfl);
/* Read the POI filesystem. */
FileInputStream stream = new FileInputStream(poiFs);
try {
r.read(stream);
} finally {
stream.close();
}
POIFile[] result = new POIFile[files.size()];
for (int i = 0; i < result.length; i++)
result[i] = files.get(i);
return result;
}
/**
* <p>Prints the system properties to System.out.</p>
*/
public static void printSystemProperties()
{
final Properties p = System.getProperties();
final List<String> names = new LinkedList<String>();
for (String name : p.stringPropertyNames())
names.add(name);
Collections.sort(names);
for (String name : names) {
String value = p.getProperty(name);
System.out.println(name + ": " + value);
}
System.out.println("Current directory: " +
System.getProperty("user.dir"));
}
}
| |
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/pubsub/v1/pubsub.proto
package com.google.pubsub.v1;
/**
*
*
* <pre>
* Response for the `Seek` method (this response is empty).
* </pre>
*
* Protobuf type {@code google.pubsub.v1.SeekResponse}
*/
public final class SeekResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.pubsub.v1.SeekResponse)
SeekResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use SeekResponse.newBuilder() to construct.
private SeekResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private SeekResponse() {}
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet getUnknownFields() {
return this.unknownFields;
}
private SeekResponse(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default:
{
if (!parseUnknownFieldProto3(input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.pubsub.v1.PubsubProto
.internal_static_google_pubsub_v1_SeekResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.pubsub.v1.PubsubProto
.internal_static_google_pubsub_v1_SeekResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.pubsub.v1.SeekResponse.class,
com.google.pubsub.v1.SeekResponse.Builder.class);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
unknownFields.writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.pubsub.v1.SeekResponse)) {
return super.equals(obj);
}
com.google.pubsub.v1.SeekResponse other = (com.google.pubsub.v1.SeekResponse) obj;
boolean result = true;
result = result && unknownFields.equals(other.unknownFields);
return result;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.pubsub.v1.SeekResponse parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.pubsub.v1.SeekResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.pubsub.v1.SeekResponse parseFrom(com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.pubsub.v1.SeekResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.pubsub.v1.SeekResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.pubsub.v1.SeekResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.pubsub.v1.SeekResponse parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.pubsub.v1.SeekResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.pubsub.v1.SeekResponse parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.pubsub.v1.SeekResponse parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.pubsub.v1.SeekResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.pubsub.v1.SeekResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.pubsub.v1.SeekResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Response for the `Seek` method (this response is empty).
* </pre>
*
* Protobuf type {@code google.pubsub.v1.SeekResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.pubsub.v1.SeekResponse)
com.google.pubsub.v1.SeekResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.pubsub.v1.PubsubProto
.internal_static_google_pubsub_v1_SeekResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.pubsub.v1.PubsubProto
.internal_static_google_pubsub_v1_SeekResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.pubsub.v1.SeekResponse.class,
com.google.pubsub.v1.SeekResponse.Builder.class);
}
// Construct using com.google.pubsub.v1.SeekResponse.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {}
}
@java.lang.Override
public Builder clear() {
super.clear();
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.pubsub.v1.PubsubProto
.internal_static_google_pubsub_v1_SeekResponse_descriptor;
}
@java.lang.Override
public com.google.pubsub.v1.SeekResponse getDefaultInstanceForType() {
return com.google.pubsub.v1.SeekResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.pubsub.v1.SeekResponse build() {
com.google.pubsub.v1.SeekResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.pubsub.v1.SeekResponse buildPartial() {
com.google.pubsub.v1.SeekResponse result = new com.google.pubsub.v1.SeekResponse(this);
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return (Builder) super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return (Builder) super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return (Builder) super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return (Builder) super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return (Builder) super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return (Builder) super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.pubsub.v1.SeekResponse) {
return mergeFrom((com.google.pubsub.v1.SeekResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.pubsub.v1.SeekResponse other) {
if (other == com.google.pubsub.v1.SeekResponse.getDefaultInstance()) return this;
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.pubsub.v1.SeekResponse parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (com.google.pubsub.v1.SeekResponse) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFieldsProto3(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.pubsub.v1.SeekResponse)
}
// @@protoc_insertion_point(class_scope:google.pubsub.v1.SeekResponse)
private static final com.google.pubsub.v1.SeekResponse DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.pubsub.v1.SeekResponse();
}
public static com.google.pubsub.v1.SeekResponse getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<SeekResponse> PARSER =
new com.google.protobuf.AbstractParser<SeekResponse>() {
@java.lang.Override
public SeekResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new SeekResponse(input, extensionRegistry);
}
};
public static com.google.protobuf.Parser<SeekResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<SeekResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.pubsub.v1.SeekResponse getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
| |
package org.xwalk.core.extension;
import android.util.Log;
import java.lang.annotation.Annotation;
import java.lang.reflect.Field;
import java.lang.reflect.Method;
import java.util.Map;
import org.xwalk.core.extension.ReflectionHelper.MemberInfo;
import org.xwalk.core.extension.ReflectionHelper.MemberType;
public class JsStubGenerator {
static public String TAG = "JsStubGenerator";
static public final String MSG_TO_OBJECT = "postMessageToObject";
static public final String MSG_TO_CLASS = "postMessageToClass";
static public final String MSG_TO_EXTENSION = "postMessageToExtension";
ReflectionHelper reflection;
String jsHeader =
"var v8tools = requireNative(\"v8tools\");\n" +
"var jsStubModule = requireNative(\"jsStub\");\n" +
"jsStubModule.init(extension, v8tools);\n" +
"var jsStub = jsStubModule.jsStub;\n" +
"var helper = jsStub.createRootStub(exports);\n";
JsStubGenerator (ReflectionHelper extReflection) {
reflection = extReflection;
}
/*
* Typically, an external Crosswalk extension for Android will contain three parts:
* 1. the java files of the native implementation
* 2. a JavaScript stub for the extension
* 3. a manifest.json file pointing out: extension name, main class of the extension,
* and the JavaScript stub file name.
*
* With JavaScript stub auto-generation feature, you do not need to write a JavaScript
* stub, nor to point out the JavaScript stub file in manifest.json.
*
* The parent class XWalkExternalExtension will generate the JavaScript stub if the jsApi
* is found null or empty string in its constructor. The generated JavaScript stub will
* also leverage an internal JavaScript module "jsStub", please refer following file for
* more information: SOURCE/xwalk/extensions/renderer/xwalk_js_stub_wrapper.js
*
* What's more, this parent class also provides extra helper methods for descendants to
* communicate with JavaScript by a object oriented way, other than raw messages.
* For Example:
* invokeJsCallback()
* dispatchEvent()
* updateProperty()
* LogJs()
*
* This is the generation logic triggered if the jsApi is null or empty in the constructor
* of XWalkExternalExtension.
*/
String generate() {
String result = "";
MemberInfo entry = reflection.getEntryPoint();
if (entry != null) result = generateEntryPoint(entry);
result = (result.length() > 0) ? result : jsHeader;
if (reflection.getEventList() != null) {
result += generateEventTarget(reflection);
}
Map<String, MemberInfo> members = reflection.getMembers();
for (String key : members.keySet()) {
MemberInfo m = members.get(key);
if (m.isEntryPoint) continue;
switch (m.type) {
case JS_PROPERTY:
result += generateProperty(MSG_TO_EXTENSION, m);
break;
case JS_METHOD:
result += generateMethod(MSG_TO_EXTENSION, m, true);
break;
case JS_CONSTRUCTOR:
result += generateConstructor(m, true);
break;
default:
break;
}
}
return result + "\n";
}
String generateEntryPoint(MemberInfo entry) {
// Is a binding Object.
if (entry.type == MemberType.JS_PROPERTY) {
Class<?> type = ((Field)(entry.accesser)).getType();
String funcName = type.getSimpleName();
return jsHeader + String.format("%s(exports, helper);\n", getPrototypeName(funcName));
}
if (entry.type == MemberType.JS_METHOD) {
return String.format("exports = %s;\n %s\n %s",
getInternalName(entry.jsName), jsHeader,
generateMethod(MSG_TO_EXTENSION, entry, false));
}
if (entry.type == MemberType.JS_CONSTRUCTOR) {
return String.format("exports = %s;\n %s\n %s",
entry.jsName, jsHeader, generateConstructor(entry, false));
}
return "";
}
String[] classGenerator(ReflectionHelper targetReflect) {
String result = "";
String staticResult = "";
if (targetReflect.getEventList() != null) {
String eventStr = generateEventTarget(targetReflect);
result += eventStr;
staticResult += eventStr;
}
Map<String, MemberInfo> members = targetReflect.getMembers();
String memberStr;
String msgType;
// @JsConstructor should always used in the extension class, not
// in binding class, so ignore constructor type in binding classes.
for (String key : members.keySet()) {
MemberInfo m = members.get(key);
msgType = m.isStatic ? MSG_TO_CLASS: MSG_TO_OBJECT;
switch (m.type) {
case JS_PROPERTY:
memberStr = generateProperty(msgType, m);
break;
case JS_METHOD:
memberStr = generateMethod(msgType, m, true);
break;
default:
memberStr = "";
break;
}
if (m.isStatic) {
staticResult += memberStr;
} else {
result += memberStr;
}
}
return (new String[] {result, staticResult});
}
String destroyBindingObject(ReflectionHelper targetReflect) {
String result = "exports.destroy = function() {\n";
Map<String, MemberInfo> members = targetReflect.getMembers();
for (String key : members.keySet()) {
result += "delete exports[\"" + key + "\"];\n";
}
result += "helper.destroy();\n";
result += "delete exports[\"__stubHelper\"];\n";
result += "delete exports[\"destroy\"];\n";
result += "};";
return result;
}
String generateEventTarget(ReflectionHelper targetReflect) {
String[] eventList = targetReflect.getEventList();
if (eventList == null || eventList.length == 0) {
return "";
}
String gen = "jsStub.makeEventTarget(exports);\n";
for (String e : eventList) {
gen += "helper.addEvent(\"" + e + "\");\n";
}
return gen;
}
String generateProperty(String msgType, MemberInfo m) {
String name = m.jsName;
return String.format(
"jsStub.defineProperty(\"%s\", exports, \"%s\", %b);\n",
msgType, name, m.isWritable);
}
String generatePromiseMethod(String msgType, MemberInfo mInfo) {
String name = mInfo.jsName;
String wrapArgs = mInfo.wrapArgs.length() > 0 ? mInfo.wrapArgs : "null";
String wrapReturns = mInfo.wrapReturns.length() > 0 ? mInfo.wrapReturns : "null";
return String.format(
"jsStub.addMethodWithPromise(\"%s\", exports, \"%s\", %s, %s);\n",
msgType, name, wrapArgs, wrapReturns);
}
String getArgString(Method m, boolean withPromise) {
if (m == null) return "";
Class<?>[] pTypes = m.getParameterTypes();
Annotation[][] anns = m.getParameterAnnotations();
String jsArgs = "";
int length = withPromise ? (pTypes.length - 1 ) : pTypes.length;
for (int i = 0; i < length; ++i) {
Class<?> p = pTypes[i];
String pStr = "arg" + i + "_" + p.getSimpleName();
if (jsArgs.length() > 0)
jsArgs += ", ";
jsArgs += pStr;
}
return jsArgs;
}
String generateMethod(String msgType, MemberInfo mInfo, boolean isMember) {
// Generate method that returns promise.
if (mInfo.withPromise) return generatePromiseMethod(msgType,mInfo);
String name = mInfo.jsName;
Method m = (Method)mInfo.accesser;
String iName = getInternalName(name);
Annotation[][] anns = m.getParameterAnnotations();
String jsArgs = getArgString(m, mInfo.withPromise);
boolean isSync = !(m.getReturnType().equals(Void.TYPE));
String funcBody = String.format(
"function %s(%s) {\n" +
((isSync) ? " return " : " ") +
"helper.invokeNative(\"%s\", \"%s\", [%s], %b);\n" +
"};\n",
iName, jsArgs, msgType, name, jsArgs, isSync);
String memberStr = isMember ? String.format("exports[\"%s\"] = %s;\n", name, iName) : "";
return funcBody + memberStr;
}
String getInternalName(String name) {
return "__" + name;
}
String getPrototypeName(String funcName) {
return "__" + funcName + "_prototype";
}
String generateConstructor(MemberInfo mInfo, boolean isMember) {
String name = mInfo.jsName;
String protoFunc = getPrototypeName(name);
String argStr = getArgString((Method)mInfo.accesser, false);
ReflectionHelper targetReflect = reflection.getConstructorReflection(name);
String[] classStr = classGenerator(targetReflect);
String protoStr = String.format(
"function %s(exports, helper){\n" + "%s\n" + "%s\n" + "}\n",
protoFunc, classStr[0], destroyBindingObject(targetReflect));
String self = String.format(
"function %s(%s) {\n" +
"var newObject = this;\n" +
"var objectId =\n" +
"Number(helper.invokeNative(\"%s\", \"+%s\", [%s], true));\n" +
"if (!objectId) throw \"Error to create instance for constructor:%s.\";\n" +
"var objectHelper = jsStub.getHelper(newObject, helper);\n" +
"objectHelper.objectId = objectId;\n" +
"objectHelper.constructorJsName = \"%s\";\n" +
"objectHelper.registerLifecycleTracker();" +
"%s(newObject, objectHelper);\n" +
"helper.addBindingObject(objectId, newObject);}\n" +
"helper.constructors[\"%s\"] = %s;\n",
name, argStr, MSG_TO_EXTENSION, name, argStr, name, name,
protoFunc, name, name);
String staticStr = String.format(
"(function(exports, helper){\n" +
" helper.constructorJsName = \"%s\";\n" +
"%s\n" +
"})(%s, jsStub.getHelper(%s, helper));\n",
name, classStr[1], name, name);
String memberStr = isMember ? String.format("exports[\"%s\"] = %s;\n", name, name) : "";
return protoStr + self + staticStr + memberStr;
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* Autogenerated by Thrift Compiler (0.9.2)
*
* DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
* @generated
*/
package org.apache.airavata.model.error;
import org.apache.thrift.scheme.IScheme;
import org.apache.thrift.scheme.SchemeFactory;
import org.apache.thrift.scheme.StandardScheme;
import org.apache.thrift.scheme.TupleScheme;
import org.apache.thrift.protocol.TTupleProtocol;
import org.apache.thrift.protocol.TProtocolException;
import org.apache.thrift.EncodingUtils;
import org.apache.thrift.TException;
import org.apache.thrift.async.AsyncMethodCallback;
import org.apache.thrift.server.AbstractNonblockingServer.*;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.util.HashMap;
import java.util.EnumMap;
import java.util.Set;
import java.util.HashSet;
import java.util.EnumSet;
import java.util.Collections;
import java.util.BitSet;
import java.nio.ByteBuffer;
import java.util.Arrays;
import javax.annotation.Generated;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
/**
* This exception is thrown for invalid authentication requests.
*
* message: contains the cause of the authorization failure.
*/
@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-8-3")
public class AuthenticationException extends TException implements org.apache.thrift.TBase<AuthenticationException, AuthenticationException._Fields>, java.io.Serializable, Cloneable, Comparable<AuthenticationException> {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("AuthenticationException");
private static final org.apache.thrift.protocol.TField MESSAGE_FIELD_DESC = new org.apache.thrift.protocol.TField("message", org.apache.thrift.protocol.TType.STRING, (short)1);
private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>();
static {
schemes.put(StandardScheme.class, new AuthenticationExceptionStandardSchemeFactory());
schemes.put(TupleScheme.class, new AuthenticationExceptionTupleSchemeFactory());
}
private String message; // required
/** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
public enum _Fields implements org.apache.thrift.TFieldIdEnum {
MESSAGE((short)1, "message");
private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
static {
for (_Fields field : EnumSet.allOf(_Fields.class)) {
byName.put(field.getFieldName(), field);
}
}
/**
* Find the _Fields constant that matches fieldId, or null if its not found.
*/
public static _Fields findByThriftId(int fieldId) {
switch(fieldId) {
case 1: // MESSAGE
return MESSAGE;
default:
return null;
}
}
/**
* Find the _Fields constant that matches fieldId, throwing an exception
* if it is not found.
*/
public static _Fields findByThriftIdOrThrow(int fieldId) {
_Fields fields = findByThriftId(fieldId);
if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
return fields;
}
/**
* Find the _Fields constant that matches name, or null if its not found.
*/
public static _Fields findByName(String name) {
return byName.get(name);
}
private final short _thriftId;
private final String _fieldName;
_Fields(short thriftId, String fieldName) {
_thriftId = thriftId;
_fieldName = fieldName;
}
public short getThriftFieldId() {
return _thriftId;
}
public String getFieldName() {
return _fieldName;
}
}
// isset id assignments
public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
static {
Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
tmpMap.put(_Fields.MESSAGE, new org.apache.thrift.meta_data.FieldMetaData("message", org.apache.thrift.TFieldRequirementType.REQUIRED,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
metaDataMap = Collections.unmodifiableMap(tmpMap);
org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(AuthenticationException.class, metaDataMap);
}
public AuthenticationException() {
}
public AuthenticationException(
String message)
{
this();
this.message = message;
}
/**
* Performs a deep copy on <i>other</i>.
*/
public AuthenticationException(AuthenticationException other) {
if (other.isSetMessage()) {
this.message = other.message;
}
}
public AuthenticationException deepCopy() {
return new AuthenticationException(this);
}
@Override
public void clear() {
this.message = null;
}
public String getMessage() {
return this.message;
}
public void setMessage(String message) {
this.message = message;
}
public void unsetMessage() {
this.message = null;
}
/** Returns true if field message is set (has been assigned a value) and false otherwise */
public boolean isSetMessage() {
return this.message != null;
}
public void setMessageIsSet(boolean value) {
if (!value) {
this.message = null;
}
}
public void setFieldValue(_Fields field, Object value) {
switch (field) {
case MESSAGE:
if (value == null) {
unsetMessage();
} else {
setMessage((String)value);
}
break;
}
}
public Object getFieldValue(_Fields field) {
switch (field) {
case MESSAGE:
return getMessage();
}
throw new IllegalStateException();
}
/** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
public boolean isSet(_Fields field) {
if (field == null) {
throw new IllegalArgumentException();
}
switch (field) {
case MESSAGE:
return isSetMessage();
}
throw new IllegalStateException();
}
@Override
public boolean equals(Object that) {
if (that == null)
return false;
if (that instanceof AuthenticationException)
return this.equals((AuthenticationException)that);
return false;
}
public boolean equals(AuthenticationException that) {
if (that == null)
return false;
boolean this_present_message = true && this.isSetMessage();
boolean that_present_message = true && that.isSetMessage();
if (this_present_message || that_present_message) {
if (!(this_present_message && that_present_message))
return false;
if (!this.message.equals(that.message))
return false;
}
return true;
}
@Override
public int hashCode() {
List<Object> list = new ArrayList<Object>();
boolean present_message = true && (isSetMessage());
list.add(present_message);
if (present_message)
list.add(message);
return list.hashCode();
}
@Override
public int compareTo(AuthenticationException other) {
if (!getClass().equals(other.getClass())) {
return getClass().getName().compareTo(other.getClass().getName());
}
int lastComparison = 0;
lastComparison = Boolean.valueOf(isSetMessage()).compareTo(other.isSetMessage());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetMessage()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.message, other.message);
if (lastComparison != 0) {
return lastComparison;
}
}
return 0;
}
public _Fields fieldForId(int fieldId) {
return _Fields.findByThriftId(fieldId);
}
public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {
schemes.get(iprot.getScheme()).getScheme().read(iprot, this);
}
public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
schemes.get(oprot.getScheme()).getScheme().write(oprot, this);
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder("AuthenticationException(");
boolean first = true;
sb.append("message:");
if (this.message == null) {
sb.append("null");
} else {
sb.append(this.message);
}
first = false;
sb.append(")");
return sb.toString();
}
public void validate() throws org.apache.thrift.TException {
// check for required fields
if (!isSetMessage()) {
throw new org.apache.thrift.protocol.TProtocolException("Required field 'message' is unset! Struct:" + toString());
}
// check for sub-struct validity
}
private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
try {
write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
try {
read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private static class AuthenticationExceptionStandardSchemeFactory implements SchemeFactory {
public AuthenticationExceptionStandardScheme getScheme() {
return new AuthenticationExceptionStandardScheme();
}
}
private static class AuthenticationExceptionStandardScheme extends StandardScheme<AuthenticationException> {
public void read(org.apache.thrift.protocol.TProtocol iprot, AuthenticationException struct) throws org.apache.thrift.TException {
org.apache.thrift.protocol.TField schemeField;
iprot.readStructBegin();
while (true)
{
schemeField = iprot.readFieldBegin();
if (schemeField.type == org.apache.thrift.protocol.TType.STOP) {
break;
}
switch (schemeField.id) {
case 1: // MESSAGE
if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
struct.message = iprot.readString();
struct.setMessageIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
default:
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
iprot.readFieldEnd();
}
iprot.readStructEnd();
struct.validate();
}
public void write(org.apache.thrift.protocol.TProtocol oprot, AuthenticationException struct) throws org.apache.thrift.TException {
struct.validate();
oprot.writeStructBegin(STRUCT_DESC);
if (struct.message != null) {
oprot.writeFieldBegin(MESSAGE_FIELD_DESC);
oprot.writeString(struct.message);
oprot.writeFieldEnd();
}
oprot.writeFieldStop();
oprot.writeStructEnd();
}
}
private static class AuthenticationExceptionTupleSchemeFactory implements SchemeFactory {
public AuthenticationExceptionTupleScheme getScheme() {
return new AuthenticationExceptionTupleScheme();
}
}
private static class AuthenticationExceptionTupleScheme extends TupleScheme<AuthenticationException> {
@Override
public void write(org.apache.thrift.protocol.TProtocol prot, AuthenticationException struct) throws org.apache.thrift.TException {
TTupleProtocol oprot = (TTupleProtocol) prot;
oprot.writeString(struct.message);
}
@Override
public void read(org.apache.thrift.protocol.TProtocol prot, AuthenticationException struct) throws org.apache.thrift.TException {
TTupleProtocol iprot = (TTupleProtocol) prot;
struct.message = iprot.readString();
struct.setMessageIsSet(true);
}
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.metron.management;
import org.apache.commons.collections4.CollectionUtils;
import org.apache.commons.lang3.ClassUtils;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.clients.producer.RecordMetadata;
import org.apache.kafka.common.PartitionInfo;
import org.apache.kafka.common.TopicPartition;
import org.apache.kafka.common.serialization.StringDeserializer;
import org.apache.kafka.common.serialization.StringSerializer;
import org.apache.metron.common.system.Clock;
import org.apache.metron.stellar.common.LambdaExpression;
import org.apache.metron.stellar.common.utils.ConversionUtils;
import org.apache.metron.stellar.common.utils.JSONUtils;
import org.apache.metron.stellar.dsl.Context;
import org.apache.metron.stellar.dsl.ParseException;
import org.apache.metron.stellar.dsl.Stellar;
import org.apache.metron.stellar.dsl.StellarFunction;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.lang.invoke.MethodHandles;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.Properties;
import java.util.Set;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import static java.lang.String.format;
import static org.apache.metron.stellar.dsl.Context.Capabilities.GLOBAL_CONFIG;
/**
* Defines the following Kafka-related functions available in Stellar.
*
* KAFKA_GET
* KAFKA_PUT
* KAFKA_TAIL
* KAFKA_FIND
* KAFKA_PROPS
*/
public class KafkaFunctions {
private static final Logger LOG = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
/**
* The key for the property that defines the maximum amount of time
* to wait to receive messages.
*/
public static final String POLL_TIMEOUT_PROPERTY = "stellar.kafka.poll.timeout";
/**
* How long to wait on each poll request in milliseconds.
*
* <p>One each function call, there will likely be multiple poll requests, each
* waiting this period of time.
*/
private static final int DEFAULT_POLL_TIMEOUT = 500;
/**
* The key for the property that defines the maximum amount of time
* to wait to receive messages in milliseconds.
*/
public static final String MAX_WAIT_PROPERTY = "stellar.kafka.max.wait.millis";
/**
* The default max wait time in milliseconds.
*/
public static final int DEFAULT_MAX_WAIT = 5000;
/**
* The key for the global property that defines how a message is returned
* from the set of KAFKA functions.
*
* <p>simple - The result contains only the message value as a string.
* <p>rich - The result contains the message value, topic, partition, and offset.
*/
public static final String MESSAGE_VIEW_PROPERTY = "stellar.kafka.message.view";
/**
* An acceptable value for the 'stellar.kafka.message.view' property. The result
* provided will contain only the message value as a string.
*/
public static final String MESSAGE_VIEW_SIMPLE = "simple";
/**
* An acceptable value for the 'stellar.kafka.message.view' property.
*
* <p>Provides a view of each message with more detailed metadata beyond just the
* message value. The result provided will contain the message value, topic, partition,
* and offset.
*/
public static final String MESSAGE_VIEW_RICH = "rich";
/**
* The default set of Kafka properties.
*/
private static Properties defaultProperties = defaultKafkaProperties();
/**
* A clock to tell time.
*
* Allows any functions that depend on the system clock to be more readily tested.
*/
protected static Clock clock = new Clock();
/**
* KAFKA_GET
*
* <p>Retrieves messages from a Kafka topic. Subsequent calls will continue retrieving messages
* sequentially from the original offset.
*
* <p>Example: Retrieve one message from a topic.
* <pre>
* {@code
* KAFKA_GET('topic')
* }
* </pre>
*
* <p>Example: Retrieve 10 messages from a topic.
* <pre>
* {@code
* KAFKA_GET('topic', 10)
* }
* </pre>
*
* <p>Example: Retrieve the first message from a topic. This must be the first retrieval
* from the topic, otherwise the messages will be retrieved starting from the
* previously stored consumer offset.
* <pre>
* {@code
* KAFKA_GET('topic', 1, { "auto.offset.reset": "earliest" })
* }
* </pre>
*
* <p>By default, only the message value is returned. By setting the global property
* 'stellar.kafka.message.view' = 'rich' the function will return additional Kafka metadata
* including the topic, partition, offset, key, and timestamp contained in a map. Setting
* this property value to 'simple' or simply not setting the property value, will result
* in the default view behavior.
*/
@Stellar(
namespace = "KAFKA",
name = "GET",
description = "Retrieves messages from a Kafka topic. Subsequent calls will" +
"continue retrieving messages sequentially from the original offset.",
params = {
"topic - The name of the Kafka topic",
"count - The number of Kafka messages to retrieve",
"config - Optional map of key/values that override any global properties."
},
returns = "The messages as a list of strings"
)
public static class KafkaGet implements StellarFunction {
@Override
public Object apply(List<Object> args, Context context) throws ParseException {
// required - name of the topic to retrieve messages from
String topic = getArg("topic", 0, String.class, args);
// optional - how many messages should be retrieved?
int count = 1;
if(args.size() > 1) {
count = getArg("count", 1, Integer.class, args);
}
// optional - property overrides provided by the user
Map<String, String> overrides = new HashMap<>();
if(args.size() > 2) {
overrides = getArg("overrides", 2, Map.class, args);
}
// build the properties for kafka
Properties properties = buildKafkaProperties(overrides, context);
properties.put("max.poll.records", count);
return getMessages(topic, count, properties);
}
/**
* Gets messages from a Kafka topic.
*
* @param topic The Kafka topic.
* @param count The maximum number of messages to get.
* @param properties The function properties.
* @return
*/
private Object getMessages(String topic, int count, Properties properties) {
int maxWait = getMaxWait(properties);
int pollTimeout = getPollTimeout(properties);
List<Object> messages = new ArrayList<>();
// read some messages
try (KafkaConsumer<String, String> consumer = new KafkaConsumer<>(properties)) {
manualPartitionAssignment(topic, consumer);
// continue until we have enough messages or exceeded the max wait time
long wait = 0L;
final long start = clock.currentTimeMillis();
while(messages.size() < count && wait < maxWait) {
for(ConsumerRecord<String, String> record: consumer.poll(pollTimeout)) {
Object viewOfMessage = render(record, properties);
messages.add(viewOfMessage);
}
// how long have we waited?
wait = clock.currentTimeMillis() - start;
consumer.commitSync();
LOG.debug("KAFKA_GET polled for messages; topic={}, count={}, waitTime={} ms",
topic, messages.size(), wait);
}
}
return messages;
}
@Override
public void initialize(Context context) {
// no initialization required
}
@Override
public boolean isInitialized() {
// no initialization required
return true;
}
}
/**
* KAFKA_TAIL
*
* <p>Tails messages from a Kafka topic always starting with the most recently received message.
*
* <p>Example: Retrieve the latest message from a topic.
* <pre>
* {@code
* KAFKA_TAIL('topic')
* }
* </pre>
*
* <p>Example: Retrieve 10 messages from a topic starting with the latest.
* <pre>
* {@code
* KAFKA_TAIL('topic', 10)
* }
* </pre>
*
* <p>By default, only the message value is returned. By setting the global property
* 'stellar.kafka.message.view' = 'rich' the function will return additional Kafka metadata
* including the topic, partition, offset, key, and timestamp contained in a map. Setting
* this property value to 'simple' or simply not setting the property value, will result
* in the default view behavior.
*/
@Stellar(
namespace = "KAFKA",
name = "TAIL",
description = "Tails messages from a Kafka topic always starting with the most recently received message.",
params = {
"topic - The name of the Kafka topic",
"count - The number of Kafka messages to retrieve",
"config - Optional map of key/values that override any global properties."
},
returns = "The messages as a list of strings"
)
public static class KafkaTail implements StellarFunction {
@Override
public Object apply(List<Object> args, Context context) throws ParseException {
// required - name of the topic to retrieve messages from
String topic = getArg("topic", 0, String.class, args);
// optional - how many messages should be retrieved?
int count = 1;
if(args.size() > 1) {
count = getArg("count", 1, Integer.class, args);
}
// optional - property overrides provided by the user
Map<String, String> overrides = new HashMap<>();
if(args.size() > 2) {
overrides = getArg("overrides", 2, Map.class, args);
}
Properties properties = buildKafkaProperties(overrides, context);
properties.put("max.poll.records", count);
return tailMessages(topic, count, properties);
}
/**
* Gets messages from the tail end of a Kafka topic.
*
* @param topic The name of the kafka topic.
* @param count The maximum number of messages to get.
* @param properties The function configuration properties.
* @return A list of messages from the tail end of a Kafka topic.
*/
private Object tailMessages(String topic, int count, Properties properties) {
List<Object> messages = new ArrayList<>();
int pollTimeout = getPollTimeout(properties);
int maxWait = getMaxWait(properties);
// create the consumer
try (KafkaConsumer<String, String> consumer = new KafkaConsumer<>(properties)) {
// seek to the end of all topic/partitions
Set<TopicPartition> partitions = manualPartitionAssignment(topic, consumer);
consumer.seekToEnd(partitions);
// continue until we have enough messages or exceeded the max wait time
long wait = 0L;
final long start = clock.currentTimeMillis();
while(messages.size() < count && wait < maxWait) {
for(ConsumerRecord<String, String> record: consumer.poll(pollTimeout)) {
Object viewOfMessage = render(record, properties);
messages.add(viewOfMessage);
}
// how long have we waited?
wait = clock.currentTimeMillis() - start;
consumer.commitSync();
LOG.debug("KAFKA_TAIL polled for messages; topic={}, count={}, waitTime={} ms",
topic, messages.size(), wait);
}
}
return messages;
}
@Override
public void initialize(Context context) {
// no initialization required
}
@Override
public boolean isInitialized() {
// no initialization required
return true;
}
}
/**
* KAFKA_PUT
*
* <p>Sends messages to a Kafka topic.
*
* <p>Example: Put two messages on the topic 'topic'.
* <pre>
* {@code
* KAFKA_PUT('topic', ["message1", "message2"])
* }
* </pre>
*
* <p>Example: Put a message on a topic and also define an alternative Kafka broker.
* <pre>
* {@code
* KAFKA_PUT('topic', ["message1"], { "bootstrap.servers": "kafka-broker-1:6667" })
* }
* </pre>
*
*/
@Stellar(
namespace = "KAFKA",
name = "PUT",
description = "Sends messages to a Kafka topic. ",
params = {
"topic - The name of the Kafka topic.",
"messages - A list of messages to write.",
"config - An optional map of key/values that override any global properties."
},
returns = " "
)
public static class KafkaPut implements StellarFunction {
@Override
public Object apply(List<Object> args, Context context) throws ParseException {
String topic = ConversionUtils.convert(args.get(0), String.class);
List<String> messages;
if(args.get(1) instanceof String) {
// a single message needs sent
String msg = getArg("message(s)", 1, String.class, args);
messages = Collections.singletonList(msg);
} else {
// a list of messages; all need sent
messages = getArg("message(s)", 1, List.class, args);
}
// are there any overrides?
Map<String, String> overrides = new HashMap<>();
if(args.size() > 2) {
overrides = getArg("overrides", 2, Map.class, args);
}
// send the messages
Properties properties = buildKafkaProperties(overrides, context);
List<RecordMetadata> records = putMessages(topic, messages, properties);
// render a view of the messages that were written for the user
Object view = render(records, properties);
return view;
}
/**
* Render a view of the {@link RecordMetadata} that resulted from writing
* messages to Kafka.
*
* @param records The record metadata.
* @param properties The properties.
* @return
*/
private Object render(List<RecordMetadata> records, Properties properties) {
Object view;
if(MESSAGE_VIEW_RICH.equals(getMessageView(properties))) {
// build a 'rich' view of the messages that were written
List<Object> responses = new ArrayList<>();
for(RecordMetadata record: records) {
// render the 'rich' view of the record
Map<String, Object> richView = new HashMap<>();
richView.put("topic", record.topic());
richView.put("partition", record.partition());
richView.put("offset", record.offset());
richView.put("timestamp", record.timestamp());
responses.add(richView);
}
// the rich view is a list of maps containing metadata about how each message was written
view = responses;
} else {
// otherwise, the view is simply a count of the number of messages written
view = CollectionUtils.size(records);
}
return view;
}
/**
* Put messages to a Kafka topic.
*
* <p>Sends each message synchronously.
*
* @param topic The topic to send messages to.
* @param messages The messages to send.
* @param properties The properties to use with Kafka.
* @return Metadata about all the records written to Kafka.
*/
private List<RecordMetadata> putMessages(String topic, List<String> messages, Properties properties) {
LOG.debug("KAFKA_PUT sending messages; topic={}, count={}", topic, messages.size());
List<RecordMetadata> records = new ArrayList<>();
try (KafkaProducer<String, String> producer = new KafkaProducer<>(properties)) {
List<Future<RecordMetadata>> futures = new ArrayList<>();
// send each message
for(String msg : messages) {
Future<RecordMetadata> future = producer.send(new ProducerRecord<>(topic, msg));
futures.add(future);
}
// wait for the sends to complete
for(Future<RecordMetadata> future : futures) {
RecordMetadata record = waitForResponse(future, properties);
records.add(record);
}
producer.flush();
}
return records;
}
/**
* Wait for response to the message being sent.
*
* @param future The future for the message being sent.
* @param properties The configuration properties.
* @return Metadata about the record that was written to Kafka.
*/
private RecordMetadata waitForResponse(Future<RecordMetadata> future, Properties properties) {
RecordMetadata record = null;
int maxWait = getMaxWait(properties);
try {
// wait for the record and then render it for the user
record = future.get(maxWait, TimeUnit.MILLISECONDS);
LOG.debug("KAFKA_PUT message sent; topic={}, partition={}, offset={}",
record.topic(), record.partition(), record.offset());
} catch(TimeoutException | InterruptedException | ExecutionException e) {
LOG.error("KAFKA_PUT message send failure", e);
}
return record;
}
@Override
public void initialize(Context context) {
// no initialization required
}
@Override
public boolean isInitialized() {
// no initialization required
return true;
}
}
/**
* KAFKA_PROPS
*
* Retrieves the Kafka properties that are used by other KAFKA_* functions
* like KAFKA_GET and KAFKA_PUT. The Kafka properties are compiled from a
* set of default properties, the global properties, and any overrides.
*
* Example: Retrieve the current Kafka properties.
* KAFKA_PROPS()
*
* Example: Retrieve the current Kafka properties taking into account a set of overrides.
* KAFKA_PROPS({ "max.poll.records": 1 })
*/
@Stellar(
namespace = "KAFKA",
name = "PROPS",
description = "Retrieves the Kafka properties that are used by other KAFKA_* functions " +
"like KAFKA_GET and KAFKA_PUT. The Kafka properties are compiled from a " +
"set of default properties, the global properties, and any overrides.",
params = { "config - An optional map of key/values that override any global properties." },
returns = " "
)
public static class KafkaProps implements StellarFunction {
@Override
public Object apply(List<Object> args, Context context) throws ParseException {
// optional - did the user provide any overrides?
Map<String, String> overrides = new HashMap<>();
if(args.size() > 0) {
overrides = getArg("overrides", 0, Map.class, args);
}
return buildKafkaProperties(overrides, context);
}
@Override
public void initialize(Context context) {
// no initialization required
}
@Override
public boolean isInitialized() {
// no initialization required
return true;
}
}
/**
* KAFKA_FIND
*
* <p>Finds messages that satisfy a given filter expression. Subsequent calls will continue retrieving messages
* sequentially from the original offset.
*
* <p>Example: Retrieve a 'bro' message.
* <pre>
* {@code
* KAFKA_FIND('topic', m -> MAP_GET('source.type', m) == 'bro')
* }
* </pre>
*
* <p>Example: Find 10 messages that contain geo-location data.
* <pre>
* {@code
* KAFKA_FIND('topic', m -> MAP_EXISTS('geo', m), 10)
* }
* </pre>
*
* <p>By default, only the message value is returned. By setting the global property
* 'stellar.kafka.message.view' = 'rich' the function will return additional Kafka metadata
* including the topic, partition, offset, key, and timestamp contained in a map. Setting
* this property value to 'simple' or simply not setting the property value, will result
* in the default view behavior.
*/
@Stellar(
namespace = "KAFKA",
name = "FIND",
description = "Find messages that satisfy a given filter expression. Messages are filtered starting from " +
"the latest offset.",
params = {
"topic - The name of the Kafka topic",
"filter - A lambda expression that filters messages. Messages are presented as a map of fields to the expression.",
"count - The number of Kafka messages to retrieve",
"config - Optional map of key/values that override any global properties."
},
returns = "The messages as a list of strings"
)
public static class KafkaFind implements StellarFunction {
@Override
public Object apply(List<Object> args, Context context) throws ParseException {
// required - name of the topic to retrieve messages from
String topic = getArg("topic", 0, String.class, args);
// required - a lambda which filters the messages
LambdaExpression filter = getArg("filter", 1, LambdaExpression.class, args);
// optional - how many messages should be retrieved?
int count = 1;
if(args.size() > 2) {
count = getArg("count", 2, Integer.class, args);
}
// optional - property overrides provided by the user
Map<String, String> overrides = new HashMap<>();
if(args.size() > 3) {
overrides = getArg("overrides", 3, Map.class, args);
}
Properties properties = buildKafkaProperties(overrides, context);
properties.put("max.poll.records", 10 * count);
return findMessages(topic, filter, count, properties);
}
/**
* Find messages in Kafka that satisfy a filter expression.
*
* @param topic The kafka topic.
* @param filter The filter expression.
* @param count The maximum number of messages to find.
* @param properties Function configuration values.
* @return A list of messages that satisfy the filter expression.
*/
private List<Object> findMessages(String topic, LambdaExpression filter, int count, Properties properties) {
final int pollTimeout = getPollTimeout(properties);
final int maxWait = getMaxWait(properties);
List<Object> messages = new ArrayList<>();
try (KafkaConsumer<String, String> consumer = new KafkaConsumer<>(properties)) {
// seek to the end of all topic/partitions
Set<TopicPartition> partitions = manualPartitionAssignment(topic, consumer);
consumer.seekToEnd(partitions);
// continue until we have enough messages or exceeded the max wait time
long wait = 0L;
final long start = clock.currentTimeMillis();
while(messages.size() < count && wait < maxWait) {
// poll kafka for messages
ConsumerRecords<String, String> records = consumer.poll(pollTimeout);
for(ConsumerRecord<String, String> record : records) {
// only keep the message if the filter expression is satisfied
if(isSatisfied(filter, record.value())) {
Object view = render(record, properties);
messages.add(view);
// do we have enough messages already?
if(messages.size() >= count) {
break;
}
}
}
// how long have we waited?
wait = clock.currentTimeMillis() - start;
consumer.commitSync();
LOG.debug("KAFKA_FIND polled for messages; topic={}, count={}, waitTime={} ms",
topic, messages.size(), wait);
}
}
return messages;
}
/**
* Executes a given expression on a message.
*
* @param expr The filter expression to execute.
* @param message The message that the expression is executed on.
* @return Returns true, only if the expression returns true. If the expression
* returns false or fails to execute, false is returned.
*/
public boolean isSatisfied(LambdaExpression expr, String message) {
boolean result = false;
Map<String, Object> messageAsMap;
try {
// transform the message to a map of fields
messageAsMap = JSONUtils.INSTANCE.load(message, JSONUtils.MAP_SUPPLIER);
// apply the filter expression
Object out = expr.apply(Collections.singletonList(messageAsMap));
if(out instanceof Boolean) {
result = (Boolean) out;
} else {
LOG.error("Expected boolean from filter expression, got {}", ClassUtils.getShortClassName(out, "null"));
}
} catch(IOException e) {
LOG.error("Unable to parse message", e);
}
return result;
}
@Override
public void initialize(Context context) {
// no initialization required
}
@Override
public boolean isInitialized() {
// no initialization required
return true;
}
}
/**
* KAFKA_SEEK
*
* <p>Seeks to a specific offset and returns the message.
*
* <p>Example: Find the message in 'topic', partition 2, offset 1001.
* <pre>
* {@code
* KAFKA_SEEK('topic', 1, 1001)
* }
* </pre>
*
* <p>By default, only the message value is returned. By setting the global property
* 'stellar.kafka.message.view' = 'rich' the function will return additional Kafka metadata
* including the topic, partition, offset, key, and timestamp contained in a map. Setting
* this property value to 'simple' or simply not setting the property value, will result
* in the default view behavior.
*/
@Stellar(
namespace = "KAFKA",
name = "SEEK",
description = "Seeks to an offset within a topic and returns the message.",
params = {
"topic - The name of the Kafka topic",
"partition - The partition identifier; starts at 0.",
"offset - The offset within the partition; starts at 0.",
"config - Optional map of key/values that override any global properties."
},
returns = "The message at the given offset, if the offset exists. Otherwise, returns null."
)
public static class KafkaSeek implements StellarFunction {
@Override
public Object apply(List<Object> args, Context context) throws ParseException {
// required - the topic, partition, and offset are all required
String topic = getArg("topic", 0, String.class, args);
int partition = getArg("partition", 1, Integer.class, args);
int offset = getArg("offset", 2, Integer.class, args);
// optional - property overrides provided by the user
Map<String, String> overrides = new HashMap<>();
if(args.size() > 3) {
overrides = getArg("overrides", 3, Map.class, args);
}
Properties properties = buildKafkaProperties(overrides, context);
return seek(topic, partition, offset, properties);
}
/**
* Find messages in Kafka that satisfy a filter expression.
*
* @param topic The kafka topic.
* @param partition The partition identifier.
* @param offset The offset within the given partition.
* @param properties Function configuration values.
* @return A list of messages that satisfy the filter expression.
*/
private Object seek(String topic, int partition, int offset, Properties properties) {
final int pollTimeout = getPollTimeout(properties);
final int maxWait = getMaxWait(properties);
Object message = null;
try (KafkaConsumer<String, String> consumer = new KafkaConsumer<>(properties)) {
// continue until we have the message or exceeded the max wait time
long wait = 0L;
final long start = clock.currentTimeMillis();
while(message == null && wait < maxWait) {
// seek to the offset
TopicPartition topar = new TopicPartition(topic, partition);
consumer.assign(Collections.singletonList(topar));
consumer.seek(topar, offset);
// poll kafka for messages
for(ConsumerRecord<String, String> record : consumer.poll(pollTimeout)) {
// kafka will attempt to be helpful and return a message, even if the actual offset does not exist
if(record.offset() == offset && record.partition() == partition) {
LOG.debug("KAFKA_SEEK found message; topic={}, partition={}, offset={}", topic, partition, offset);
message = render(record, properties);
}
}
// how long have we waited?
wait = clock.currentTimeMillis() - start;
if(LOG.isDebugEnabled() && message == null) {
LOG.debug("KAFKA_SEEK no message yet; topic={}, partition={}, offset={}, waitTime={} ms",
topic, partition, offset, wait);
}
}
}
return message;
}
@Override
public void initialize(Context context) {
// no initialization required
}
@Override
public boolean isInitialized() {
// no initialization required
return true;
}
}
/**
* Renders the Kafka record into a view.
*
* <p>A user can customize the way in which a Kafka record is rendered by altering
* the "stellar.kafka.message.view" property.
*
* @param record The Kafka record to render.
* @param properties The properties which allows a user to customize the rendered view of a record.
* @return
*/
private static Object render(ConsumerRecord<String, String> record, Properties properties) {
LOG.debug("Render message; topic={}, partition={}, offset={}",
record.topic(), record.partition(), record.offset());
Object result;
if(MESSAGE_VIEW_RICH.equals(getMessageView(properties))) {
// build the detailed view of the record
Map<String, Object> view = new HashMap<>();
view.put("value", record.value());
view.put("topic", record.topic());
view.put("partition", record.partition());
view.put("offset", record.offset());
view.put("timestamp", record.timestamp());
view.put("key", record.key());
result = view;
} else {
// default to the simple view
result = record.value();
}
return result;
}
/**
* Manually assigns all partitions in a topic to a consumer
*
* @param topic The topic whose partitions will be assigned.
* @param consumer The consumer to assign partitions to.
* @return A set of topic-partitions that were manually assigned to the consumer.
*/
private static Set<TopicPartition> manualPartitionAssignment(String topic, KafkaConsumer<String, String> consumer) {
// find all partitions for the topic
Set<TopicPartition> partitions = new HashSet<>();
for(PartitionInfo partition : consumer.partitionsFor(topic)) {
partitions.add(new TopicPartition(topic, partition.partition()));
}
if(partitions.size() == 0) {
throw new IllegalStateException(format("No partitions available for consumer assignment; topic=%s", topic));
}
// manually assign this consumer to each partition in the topic
consumer.assign(partitions);
return partitions;
}
/**
* Assembles the set of Properties required by the Kafka client.
*
* A set of default properties has been defined to provide minimum functionality.
* Any properties defined in the global configuration override these defaults.
* Any user-defined overrides then override all others.
*
* @param overrides Property overrides provided by the user.
* @param context The Stellar context.
*/
private static Properties buildKafkaProperties(Map<String, String> overrides, Context context) {
// start with minimal set of default properties
Properties properties = new Properties();
properties.putAll(defaultProperties);
// override the default properties with those in the global configuration
Optional<Object> globalCapability = context.getCapability(GLOBAL_CONFIG, false);
if(globalCapability.isPresent()) {
Map<String, Object> global = (Map<String, Object>) globalCapability.get();
properties.putAll(global);
}
// any user-defined properties will override both the defaults and globals
properties.putAll(overrides);
return properties;
}
/**
* Return the max wait time setting.
*
* @param properties The function configuration properties.
* @return The mex wait time in milliseconds.
*/
private static int getMaxWait(Properties properties) {
int maxWait = DEFAULT_MAX_WAIT;
Object value = properties.get(MAX_WAIT_PROPERTY);
if(value != null) {
maxWait = ConversionUtils.convert(value, Integer.class);
}
return maxWait;
}
/**
* Returns the poll timeout setting.
*
* <p>The maximum amount of time waited each time that Kafka is polled
* for messages.
*
* @param properties The function configuration properties.
* @return
*/
private static int getPollTimeout(Properties properties) {
int pollTimeout = DEFAULT_POLL_TIMEOUT;
Object value = properties.get(POLL_TIMEOUT_PROPERTY);
if(value != null) {
pollTimeout = ConversionUtils.convert(value, Integer.class);
}
return pollTimeout;
}
/**
* Determines how Kafka messages should be rendered for the user.
*
* @param properties The properties.
* @return How the Kafka messages should be rendered.
*/
private static String getMessageView(Properties properties) {
// defaults to the simple view
String messageView = MESSAGE_VIEW_SIMPLE;
if(properties.containsKey(MESSAGE_VIEW_PROPERTY)) {
messageView = ConversionUtils.convert(properties.get(MESSAGE_VIEW_PROPERTY), String.class);
}
return messageView;
}
/**
* Defines a minimal set of default parameters that can be overridden
* via the global properties.
*/
private static Properties defaultKafkaProperties() {
Properties properties = new Properties();
properties.put("bootstrap.servers", "localhost:9092");
properties.put("group.id", "kafka-functions-stellar");
/*
* What to do when there is no initial offset in Kafka or if the current
* offset does not exist any more on the server (e.g. because that data has been deleted):
*
* "earliest": automatically reset the offset to the earliest offset
* "latest": automatically reset the offset to the latest offset
* "none": throw exception to the consumer if no previous offset is found or the consumer's group
* anything else: throw exception to the consumer.
*/
properties.put("auto.offset.reset", "latest");
// limits the number of messages read in a single poll request
properties.put("max.poll.records", 1);
// consumer deserialization
properties.put("key.deserializer", StringDeserializer.class.getName());
properties.put("value.deserializer", StringDeserializer.class.getName());
// producer serialization
properties.put("key.serializer", StringSerializer.class.getName());
properties.put("value.serializer", StringSerializer.class.getName());
// set the default max time to wait for messages
properties.put(MAX_WAIT_PROPERTY, DEFAULT_MAX_WAIT);
// set the default poll timeout
properties.put(POLL_TIMEOUT_PROPERTY, DEFAULT_POLL_TIMEOUT);
// set the default message view
properties.put(MESSAGE_VIEW_PROPERTY, MESSAGE_VIEW_SIMPLE);
return properties;
}
/**
* Get an argument from a list of arguments.
*
* @param argName The name of the argument.
* @param index The index within the list of arguments.
* @param clazz The type expected.
* @param args All of the arguments.
* @param <T> The type of the argument expected.
*/
public static <T> T getArg(String argName, int index, Class<T> clazz, List<Object> args) {
if(index >= args.size()) {
throw new IllegalArgumentException(format("missing '%s'; expected at least %d argument(s), found %d",
argName, index+1, args.size()));
}
return ConversionUtils.convert(args.get(index), clazz);
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.