gt stringclasses 1
value | context stringlengths 2.05k 161k |
|---|---|
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hdfs.web;
import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.FS_DEFAULT_NAME_KEY;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.spy;
import static org.mockito.Mockito.verify;
import java.io.ByteArrayInputStream;
import java.io.DataInputStream;
import java.io.IOException;
import java.net.URI;
import java.util.HashMap;
import java.util.Map;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.FileSystemTestHelper;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hdfs.DFSConfigKeys;
import org.apache.hadoop.hdfs.DFSTestUtil;
import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.hdfs.MiniDFSNNTopology;
import org.apache.hadoop.hdfs.protocol.HdfsConstants;
import org.apache.hadoop.hdfs.security.token.delegation.DelegationTokenIdentifier;
import org.apache.hadoop.hdfs.security.token.delegation.DelegationTokenSecretManager;
import org.apache.hadoop.hdfs.server.namenode.NameNode;
import org.apache.hadoop.hdfs.server.namenode.NameNodeAdapter;
import org.apache.hadoop.hdfs.server.namenode.ha.HATestUtil;
import org.apache.hadoop.hdfs.server.protocol.NamenodeProtocols;
import org.apache.hadoop.hdfs.web.resources.ExceptionHandler;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.ipc.RemoteException;
import org.apache.hadoop.ipc.StandbyException;
import org.apache.hadoop.security.token.SecretManager;
import org.apache.hadoop.security.token.Token;
import org.junit.Assert;
import org.junit.Test;
import org.mockito.internal.util.reflection.Whitebox;
import org.mortbay.util.ajax.JSON;
import javax.servlet.http.HttpServletResponse;
import javax.ws.rs.core.Response;
public class TestWebHDFSForHA {
private static final String LOGICAL_NAME = "minidfs";
private static final URI WEBHDFS_URI = URI.create(WebHdfsConstants.WEBHDFS_SCHEME +
"://" + LOGICAL_NAME);
private static final MiniDFSNNTopology topo = new MiniDFSNNTopology()
.addNameservice(new MiniDFSNNTopology.NSConf(LOGICAL_NAME).addNN(
new MiniDFSNNTopology.NNConf("nn1")).addNN(
new MiniDFSNNTopology.NNConf("nn2")));
@Test
public void testHA() throws IOException {
Configuration conf = DFSTestUtil.newHAConfiguration(LOGICAL_NAME);
MiniDFSCluster cluster = null;
FileSystem fs = null;
try {
cluster = new MiniDFSCluster.Builder(conf).nnTopology(topo)
.numDataNodes(0).build();
HATestUtil.setFailoverConfigurations(cluster, conf, LOGICAL_NAME);
cluster.waitActive();
fs = FileSystem.get(WEBHDFS_URI, conf);
cluster.transitionToActive(0);
final Path dir = new Path("/test");
Assert.assertTrue(fs.mkdirs(dir));
cluster.shutdownNameNode(0);
cluster.transitionToActive(1);
final Path dir2 = new Path("/test2");
Assert.assertTrue(fs.mkdirs(dir2));
} finally {
IOUtils.cleanup(null, fs);
if (cluster != null) {
cluster.shutdown();
}
}
}
@Test
public void testSecureHAToken() throws IOException, InterruptedException {
Configuration conf = DFSTestUtil.newHAConfiguration(LOGICAL_NAME);
conf.setBoolean(DFSConfigKeys
.DFS_NAMENODE_DELEGATION_TOKEN_ALWAYS_USE_KEY, true);
MiniDFSCluster cluster = null;
WebHdfsFileSystem fs = null;
try {
cluster = new MiniDFSCluster.Builder(conf).nnTopology(topo)
.numDataNodes(0).build();
HATestUtil.setFailoverConfigurations(cluster, conf, LOGICAL_NAME);
cluster.waitActive();
fs = spy((WebHdfsFileSystem) FileSystem.get(WEBHDFS_URI, conf));
FileSystemTestHelper.addFileSystemForTesting(WEBHDFS_URI, conf, fs);
cluster.transitionToActive(0);
Token<?> token = fs.getDelegationToken(null);
cluster.shutdownNameNode(0);
cluster.transitionToActive(1);
token.renew(conf);
token.cancel(conf);
verify(fs).renewDelegationToken(token);
verify(fs).cancelDelegationToken(token);
} finally {
IOUtils.cleanup(null, fs);
if (cluster != null) {
cluster.shutdown();
}
}
}
@Test
public void testClientFailoverWhenStandbyNNHasStaleCredentials()
throws IOException {
Configuration conf = DFSTestUtil.newHAConfiguration(LOGICAL_NAME);
conf.setBoolean(DFSConfigKeys
.DFS_NAMENODE_DELEGATION_TOKEN_ALWAYS_USE_KEY, true);
MiniDFSCluster cluster = null;
WebHdfsFileSystem fs = null;
try {
cluster = new MiniDFSCluster.Builder(conf).nnTopology(topo).numDataNodes(
0).build();
HATestUtil.setFailoverConfigurations(cluster, conf, LOGICAL_NAME);
cluster.waitActive();
fs = (WebHdfsFileSystem) FileSystem.get(WEBHDFS_URI, conf);
cluster.transitionToActive(0);
Token<?> token = fs.getDelegationToken(null);
final DelegationTokenIdentifier identifier = new DelegationTokenIdentifier();
identifier.readFields(
new DataInputStream(new ByteArrayInputStream(token.getIdentifier())));
cluster.transitionToStandby(0);
cluster.transitionToActive(1);
final DelegationTokenSecretManager secretManager = NameNodeAdapter.getDtSecretManager(
cluster.getNamesystem(0));
ExceptionHandler eh = new ExceptionHandler();
eh.initResponse(mock(HttpServletResponse.class));
Response resp = null;
try {
secretManager.retrievePassword(identifier);
} catch (IOException e) {
// Mimic the UserProvider class logic (server side) by throwing
// SecurityException here
Assert.assertTrue(e instanceof SecretManager.InvalidToken);
resp = eh.toResponse(new SecurityException(e));
}
// The Response (resp) below is what the server will send to client
//
// BEFORE HDFS-6475 fix, the resp.entity is
// {"RemoteException":{"exception":"SecurityException",
// "javaClassName":"java.lang.SecurityException",
// "message":"Failed to obtain user group information:
// org.apache.hadoop.security.token.SecretManager$InvalidToken:
// StandbyException"}}
// AFTER the fix, the resp.entity is
// {"RemoteException":{"exception":"StandbyException",
// "javaClassName":"org.apache.hadoop.ipc.StandbyException",
// "message":"Operation category READ is not supported in
// state standby"}}
//
// Mimic the client side logic by parsing the response from server
//
Map<?, ?> m = (Map<?, ?>) JSON.parse(resp.getEntity().toString());
RemoteException re = JsonUtilClient.toRemoteException(m);
Exception unwrapped = re.unwrapRemoteException(StandbyException.class);
Assert.assertTrue(unwrapped instanceof StandbyException);
} finally {
IOUtils.cleanup(null, fs);
if (cluster != null) {
cluster.shutdown();
}
}
}
@Test
public void testFailoverAfterOpen() throws IOException {
Configuration conf = DFSTestUtil.newHAConfiguration(LOGICAL_NAME);
conf.set(FS_DEFAULT_NAME_KEY, HdfsConstants.HDFS_URI_SCHEME +
"://" + LOGICAL_NAME);
MiniDFSCluster cluster = null;
FileSystem fs = null;
final Path p = new Path("/test");
final byte[] data = "Hello".getBytes();
try {
cluster = new MiniDFSCluster.Builder(conf).nnTopology(topo)
.numDataNodes(1).build();
HATestUtil.setFailoverConfigurations(cluster, conf, LOGICAL_NAME);
cluster.waitActive();
fs = FileSystem.get(WEBHDFS_URI, conf);
cluster.transitionToActive(1);
FSDataOutputStream out = fs.create(p);
cluster.shutdownNameNode(1);
cluster.transitionToActive(0);
out.write(data);
out.close();
FSDataInputStream in = fs.open(p);
byte[] buf = new byte[data.length];
IOUtils.readFully(in, buf, 0, buf.length);
Assert.assertArrayEquals(data, buf);
} finally {
IOUtils.cleanup(null, fs);
if (cluster != null) {
cluster.shutdown();
}
}
}
@Test
public void testMultipleNamespacesConfigured() throws Exception {
Configuration conf = DFSTestUtil.newHAConfiguration(LOGICAL_NAME);
MiniDFSCluster cluster = null;
WebHdfsFileSystem fs = null;
try {
cluster = new MiniDFSCluster.Builder(conf).nnTopology(topo)
.numDataNodes(1).build();
HATestUtil.setFailoverConfigurations(cluster, conf, LOGICAL_NAME);
cluster.waitActive();
DFSTestUtil.addHAConfiguration(conf, LOGICAL_NAME + "remote");
DFSTestUtil.setFakeHttpAddresses(conf, LOGICAL_NAME + "remote");
fs = (WebHdfsFileSystem)FileSystem.get(WEBHDFS_URI, conf);
Assert.assertEquals(2, fs.getResolvedNNAddr().length);
} finally {
IOUtils.cleanup(null, fs);
if (cluster != null) {
cluster.shutdown();
}
}
}
/**
* Make sure the WebHdfsFileSystem will retry based on RetriableException when
* rpcServer is null in NamenodeWebHdfsMethods while NameNode starts up.
*/
@Test (timeout=120000)
public void testRetryWhileNNStartup() throws Exception {
final Configuration conf = DFSTestUtil.newHAConfiguration(LOGICAL_NAME);
MiniDFSCluster cluster = null;
final Map<String, Boolean> resultMap = new HashMap<String, Boolean>();
try {
cluster = new MiniDFSCluster.Builder(conf).nnTopology(topo)
.numDataNodes(0).build();
HATestUtil.setFailoverConfigurations(cluster, conf, LOGICAL_NAME);
cluster.waitActive();
cluster.transitionToActive(0);
final NameNode namenode = cluster.getNameNode(0);
final NamenodeProtocols rpcServer = namenode.getRpcServer();
Whitebox.setInternalState(namenode, "rpcServer", null);
new Thread() {
@Override
public void run() {
boolean result = false;
FileSystem fs = null;
try {
fs = FileSystem.get(WEBHDFS_URI, conf);
final Path dir = new Path("/test");
result = fs.mkdirs(dir);
} catch (IOException e) {
result = false;
} finally {
IOUtils.cleanup(null, fs);
}
synchronized (TestWebHDFSForHA.this) {
resultMap.put("mkdirs", result);
TestWebHDFSForHA.this.notifyAll();
}
}
}.start();
Thread.sleep(1000);
Whitebox.setInternalState(namenode, "rpcServer", rpcServer);
synchronized (this) {
while (!resultMap.containsKey("mkdirs")) {
this.wait();
}
Assert.assertTrue(resultMap.get("mkdirs"));
}
} finally {
if (cluster != null) {
cluster.shutdown();
}
}
}
}
| |
package com.siu.android.volleyball;
import android.os.Handler;
import android.os.Looper;
import android.os.SystemClock;
import com.android.volley.NetworkResponse;
import com.android.volley.Request;
import com.android.volley.Response;
import com.android.volley.VolleyError;
import com.android.volley.VolleyLog;
import com.siu.android.volleyball.exception.BallException;
import com.siu.android.volleyball.local.LocalRequestProcessor;
import com.siu.android.volleyball.network.NetworkRequestProcessor;
import com.siu.android.volleyball.response.ResponseListener;
/**
* Created by lukas on 8/29/13.
*/
public abstract class BallRequest<T> extends Request<T> {
/* Private fields from Request class */
protected static final long SLOW_REQUEST_THRESHOLD_MS = 3000;
protected final BallMarkerLog mEventLog = BallMarkerLog.ENABLED ? new BallMarkerLog() : null;
protected long mRequestBirthTime = 0;
protected BallRequestQueue mRequestQueue;
/* Additionnal logic from Ball */
protected LocalRequestProcessor<T> mLocalRequestProcessor;
protected NetworkRequestProcessor<T> mNetworkRequestProcessor;
protected ResponseListener mResponseListener;
/**
* Error from final response, stored and used later if intermediate response is still to be delivered:
* If intermediate response is delivered after with no response, deliver the final error
*/
private VolleyError mFinalResponseError;
protected boolean mFinalResponseDelivered = false;
/**
* Intermediate response of the request has been delivered.
* <p/>
* Several use cases:
* - In the executor delivery to consider only the 1st intermediate request and ignore the 2nd one.
* - REMOVED FOR NOW, NOT VOLATILE -- In the network dispatcher to return identical response in case of 304 not modified response
* and if a valid intermediate response was returned. Must be volatile because it can apply between
* local and network thread.
*/
protected boolean mIntermediateResponseDelivered = false;
/**
* Request is finished and no more response should be delivered
* Volatile because it is used to determine if a marker should be added to the log,
* and value need to be synchronized between all worker threads
*/
protected volatile boolean mFinished = false;
private boolean mLocalIntermediateResponseDelivered = false;
private boolean mCacheIntermediateResponseDelivered = false;
private boolean mIntermediateResponseDeliveredWithSuccess = false;
protected BallRequest(int method, String url, Response.ErrorListener errorListener) {
super(method, url, errorListener);
if (shouldProcessLocal()) {
mLocalRequestProcessor = createLocalRequestProcessor();
if (mLocalRequestProcessor == null) {
throw new BallException("Request should process local but local request processor is not provided");
}
}
if (shouldProcessNetwork()) {
mNetworkRequestProcessor = createNetworkRequestProcessor();
if (mNetworkRequestProcessor == null) {
throw new BallException("Request should process network but network request processor is not provided");
}
}
}
protected BallRequest(int method, String url, ResponseListener<T> responseListener, Response.ErrorListener errorListener) {
this(method, url, errorListener);
mResponseListener = responseListener;
}
@Override
public void deliverResponse(T response) {
throw new BallException("Illegal call to #deliverResponse(), you need to call the new #deliverIntermediate and #deliverFinal methods");
}
/* Override from parent because of return type or proctected scope */
@Override
protected final Response<T> parseNetworkResponse(NetworkResponse response) {
throw new BallException("Illegal call to #parseBallNetworkResponse, you need to call the new #parseBallNetworkResponse() method");
}
//abstract protected BallResponse<T> parseBallNetworkResponse(NetworkResponse response);
protected VolleyError parseNetworkError(VolleyError volleyError) {
return super.parseNetworkError(volleyError);
}
public void addMarker(String tag) {
// ignore adding marker to finished log because it can happen when markers are added from several parallel threads
if (mFinished) {
return;
}
if (BallMarkerLog.ENABLED) {
try {
mEventLog.add(tag, Thread.currentThread().getId());
} catch (IllegalStateException e) {
// ignore exception from adding marker to finished log because it can happen when
// markers are added from several parallel threads
}
} else if (mRequestBirthTime == 0) {
mRequestBirthTime = SystemClock.elapsedRealtime();
}
}
public void finish(final String tag) {
if (mFinished) {
throw new BallException("Trying to finish an already finished request");
}
mFinished = true;
if (mRequestQueue != null) {
mRequestQueue.finish(this);
}
if (BallMarkerLog.ENABLED) {
final long threadId = Thread.currentThread().getId();
if (Looper.myLooper() != Looper.getMainLooper()) {
// If we finish marking off of the main thread, we need to
// actually do it on the main thread to ensure correct ordering.
Handler mainThread = new Handler(Looper.getMainLooper());
mainThread.post(new Runnable() {
@Override
public void run() {
mEventLog.add(tag, threadId);
mEventLog.finish(this.toString());
}
});
return;
}
mEventLog.add(tag, threadId);
mEventLog.finish(this.toString());
} else {
long requestTime = SystemClock.elapsedRealtime() - mRequestBirthTime;
if (requestTime >= SLOW_REQUEST_THRESHOLD_MS) {
VolleyLog.d("%d ms: %s", requestTime, this.toString());
}
}
}
public void setRequestQueue(BallRequestQueue requestQueue) {
mRequestQueue = requestQueue;
}
/* Override to get local processing */
public boolean shouldProcessLocal() {
return false;
}
protected LocalRequestProcessor<T> createLocalRequestProcessor() {
return null;
}
/* Override to get network processing */
public boolean shouldProcessNetwork() {
return false;
}
protected NetworkRequestProcessor createNetworkRequestProcessor() {
return null;
}
/* Complete request */
public void deliverIntermediateResponse(T response, BallResponse.ResponseSource responseSource) {
assertListenerExists();
mResponseListener.onIntermediateResponse(response, responseSource);
}
public void deliverFinalResponse(T response, BallResponse.ResponseSource responseSource) {
assertListenerExists();
mResponseListener.onFinalResponse(response, responseSource);
}
public void deliverIdenticalFinalResponse(BallResponse.ResponseSource responseSource) {
assertListenerExists();
mResponseListener.onFinalResponseIdenticalToIntermediate(responseSource);
}
protected void assertListenerExists() {
if (mResponseListener == null) {
throw new BallException("Listener is null, you need to provide one or override deliverIntermediateResponse and deliverFinalResponse");
}
}
public boolean isCompleteRequest() {
return shouldProcessLocal() && shouldProcessNetwork();
}
/**
* Only request with both local and networking processing have intermediate response aspect
*
* @return true if the request can have intermediate response, false otherwise
*/
public boolean canHaveIntermediateResponse() {
return shouldProcessLocal() && shouldProcessNetwork();
}
/* Gets and sets */
public LocalRequestProcessor<T> getLocalRequestProcessor() {
return mLocalRequestProcessor;
}
public NetworkRequestProcessor<T> getNetworkRequestProcessor() {
return mNetworkRequestProcessor;
}
public boolean isFinalResponseDelivered() {
return mFinalResponseDelivered;
}
public void setFinalResponseDelivered(boolean finalResponseDelivered) {
this.mFinalResponseDelivered = finalResponseDelivered;
}
public boolean isIntermediateResponseDelivered() {
return mIntermediateResponseDelivered;
}
public void setIntermediateResponseDelivered(boolean intermediateResponseDelivered) {
this.mIntermediateResponseDelivered = intermediateResponseDelivered;
}
public boolean isFinished() {
return mFinished;
}
public void setFinished(boolean finished) {
this.mFinished = finished;
}
public VolleyError getFinalResponseError() {
return mFinalResponseError;
}
public void setFinalResponseError(VolleyError finalResponseError) {
mFinalResponseError = finalResponseError;
}
// public boolean areAllIntermediateResponsesDelivered() {
// return mLocalIntermediateResponseDelivered && mCacheIntermediateResponseDelivered;
// }
//
// public void markIntermediateResponseDelivered(BallResponse.ResponseSource source) {
// switch (source) {
// case LOCAL:
// if (mLocalIntermediateResponseDelivered) {
// throw new BallException("local intermediate response already delivered");
// }
//
// mLocalIntermediateResponseDelivered = true;
// break;
// case CACHE:
// if (mCacheIntermediateResponseDelivered) {
// throw new BallException("cache intermediate response already delivered");
// }
//
// mCacheIntermediateResponseDelivered = true;
// break;
// default:
// throw new BallException("mark intermediate response delivered from invalid source");
// }
// }
//
// public boolean isIntermediateResponseDeliveredWithSuccess() {
// return mIntermediateResponseDeliveredWithSuccess;
// }
//
// public void setIntermediateResponseDeliveredWithSuccess(boolean intermediateResponseDeliveredWithSuccess) {
// mIntermediateResponseDeliveredWithSuccess = intermediateResponseDeliveredWithSuccess;
// }
}
| |
/*
Copyright 2010-2021 BusinessCode GmbH, Germany
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package de.businesscode.sqlengine;
import java.io.StringWriter;
import java.util.HashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.AbstractMap;
import java.util.Set;
import java.util.Vector;
import java.util.stream.Collectors;
import org.apache.velocity.VelocityContext;
import org.apache.velocity.app.VelocityEngine;
import org.apache.velocity.runtime.log.NullLogChute;
import de.businesscode.bcdui.binding.BindingItem;
import de.businesscode.bcdui.binding.Bindings;
import de.businesscode.bcdui.binding.StandardBindingSet;
import de.businesscode.sqlengine.context.BindingSetContextObject;
import de.businesscode.sqlengine.context.BindingsContextObject;
import de.businesscode.sqlengine.context.BindingsLookupContextObject;
import de.businesscode.sqlengine.context.ParamsContextObject;
/**
* The SQL Engine can transform sql fragments containing references to BindingSets into pure sql,
* resolving table and column names
*
* To resolve the given binding set/group names to the right concrete BindingSets first, it parses the given sql twice,
* - In phase 2 it will just collect the binding set name - binding item names combination,
* using BindingsLookupContextObject and BindingSetLookupContextObject
* - In phase 2 it does then know the concrete BindingSet and BindingItem and can output the appropriate table and column names,
* using BindingsContextObject and BindingSetContextObject
*/
public class SQLEngine {
private VelocityEngine velocityEngine;
private Set<String> requestedBindingSets = new HashSet<String>();
private Set<StandardBindingSet> resultingBindingSets = new HashSet<StandardBindingSet>();
private final List<BindingItem> selectedBindigItemsInOrder = new LinkedList<BindingItem>();
private final List<BindingItem> allBindigItemsInOrder = new LinkedList<BindingItem>();
/**
* Getter for a list of all BindindItems used in this sql and mentioned before the table name
*/
public List<BindingItem> getSelectedBindigItemsInOrder() {
return selectedBindigItemsInOrder;
}
/**
* Getter for a list of all BindindItems used in this sql
*/
public List<BindingItem> getAllBindigItemsInOrder() {
return allBindigItemsInOrder;
}
/**
* Get first index of selected BindingItem
*/
public int getIndex(String bindingItem) {
int index = 1;
for(BindingItem bi: allBindigItemsInOrder) {
if( bi.getId().equals(bindingItem)) {
return index;
}
index++;
}
return -1;
}
/**
* Getter for BindingSets which where chosen base on the BindingSet name and BindingItems
*/
public Set<StandardBindingSet> getResultingBindingSets() {
return resultingBindingSets;
}
/**
* Getter for the BindingSet requested for this sql
*/
public Set<Map.Entry<String,String>> getRequestedBindingSetNames() {
return requestedBindingSets.stream().map(bs->new AbstractMap.SimpleImmutableEntry<>(bs, "")).collect(Collectors.toSet());
}
/**
*
*/
public SQLEngine() {
}
/**
* transform
*
* @param sql
* @return the transformed sql
*/
public String transform(String sql) {
return transform(sql, (Map<String, Object>) null);
}
/**
*
* @param sql
* @param additionalProps
* @return
*/
public String transform(String sql, Map<String, Object> additionalProps) {
Bindings bindings;
try {
bindings = Bindings.getInstance();
}
catch (Exception e) {
throw new RuntimeException("Unable to get the Bindings instance", e);
}
return transform(sql, bindings, additionalProps);
}
/**
* transform
*
* @param sql
* @param bindings
* @return the transformed sql
*/
public String transform(String sql, Bindings bindings) {
return transform(sql, bindings, null);
}
/**
*
* @param sql
* @param bindings
* @param additionalProps
* @return
*/
public String transform(String sql, Bindings bindings, Map<String, Object> additionalProps)
{
// Phase 1 - lookup for used binding set name / binding item names combination
BindingsLookupContextObject bindingsLookup = lookupBindingsReferences(sql);
VelocityContext context = new VelocityContext();
BindingsContextObject bindingsContextObject = new BindingsContextObject(bindings, bindingsLookup);
context.put("bindings", bindingsContextObject);
// Phase 2 - create the real SQL.
// add additional props
if(additionalProps != null){
additionalProps.forEach((k,v) -> {
context.put(k, v);
});
}
StringWriter result = new StringWriter();
getVelocityEngine().evaluate(context, result, "sql", sql);
// Provide some information to our user
Map<String, BindingSetContextObject> bindingMap = bindingsContextObject.getUsedBindings();
bindingMap.keySet().stream().forEach( requestedBindingSets::add );
bindingMap.values().stream().map( p->p.getBindingSet() ).forEach( resultingBindingSets::add );
bindingMap.values().stream().map( p->p.getSelectedBindingItemsInOrder() ).flatMap(l->l.stream() ).forEach( selectedBindigItemsInOrder::add );
bindingMap.values().stream().map( p->p.getAllBindingItemsInOrder() ).flatMap(l->l.stream() ).forEach( allBindigItemsInOrder::add );
return result.toString();
}
/**
* simple transform parameters into multiplicities of actual question marks ('?')
* and store the substituted unqualified names
*
* @param sql input string
* @param substitutes List that will contain the substitutions. Unchecked for null.
* @param multiplicities map the keywords to number of substitutions needed. If null,
* number of substitutions is consistently 1
* @param keyword pretext that is used for look up of values to transform. If null, 'params' is used.
* @return the input string with occurences like '$params.some_name_or_other' substituted by '?'
* (or by '?,?,?' depending on the multiplicity map) and the List of the 'some_name_or_other'
* in the substitutes List.
*
*/
public String transformParams(
String sql, final Map<String, Integer> multiplicities,
String keyword, String separator,
Vector<String> substitutes) {
final String defaultKeyword = "params";
if (null == keyword || keyword.isEmpty() ) {
keyword = defaultKeyword;
}
VelocityContext context = new VelocityContext();
ParamsContextObject mapToQuestionMark = new ParamsContextObject(multiplicities);
context.put(keyword, mapToQuestionMark);
StringWriter result = new StringWriter();
getVelocityEngine().evaluate(context, result, "sql", sql);
substitutes.addAll(mapToQuestionMark.getRequestedKeys());
return result.toString();
}
/**
* This is phase one -> derive the the binding set name / used binding items combination
*
* @param sql
* @return not an transformed sql but a map with binding set name to BindingSetLookupContextObject, which is in essence
* the combination mentioned above
*/
private BindingsLookupContextObject lookupBindingsReferences(String sql) {
VelocityContext context = new VelocityContext();
BindingsLookupContextObject bindingsObject = new BindingsLookupContextObject();
context.put("bindings", bindingsObject);
//
StringWriter result = new StringWriter();
getVelocityEngine().evaluate(context, result, "sql", sql);
//
return bindingsObject;
}
/**
* @return the velocityEngine
*/
private VelocityEngine getVelocityEngine() {
if (velocityEngine == null) {
velocityEngine = new VelocityEngine();
//
// setup the velocityEngine
//
// logging
velocityEngine.setProperty(VelocityEngine.RUNTIME_LOG_LOGSYSTEM, new NullLogChute()); // no logging
//
velocityEngine.init();
}
return velocityEngine;
}
}
| |
/*
* Copyright (c) 2014 Spotify AB.
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.spotify.docker.client;
import com.google.common.io.CharStreams;
import com.google.common.util.concurrent.MoreExecutors;
import com.google.common.util.concurrent.ThreadFactoryBuilder;
import com.fasterxml.jackson.core.JsonGenerator;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.node.JsonNodeFactory;
import com.spotify.docker.client.messages.Container;
import com.spotify.docker.client.messages.ContainerConfig;
import com.spotify.docker.client.messages.ContainerCreation;
import com.spotify.docker.client.messages.ContainerExit;
import com.spotify.docker.client.messages.ContainerInfo;
import com.spotify.docker.client.messages.HostConfig;
import com.spotify.docker.client.messages.Image;
import com.spotify.docker.client.messages.ImageInfo;
import com.spotify.docker.client.messages.Info;
import com.spotify.docker.client.messages.ProgressMessage;
import com.spotify.docker.client.messages.RemovedImage;
import com.spotify.docker.client.messages.Version;
import org.apache.http.config.Registry;
import org.apache.http.config.RegistryBuilder;
import org.apache.http.conn.ConnectTimeoutException;
import org.apache.http.conn.socket.ConnectionSocketFactory;
import org.apache.http.conn.socket.PlainConnectionSocketFactory;
import org.apache.http.conn.ssl.SSLConnectionSocketFactory;
import org.apache.http.impl.conn.PoolingHttpClientConnectionManager;
import org.glassfish.hk2.api.MultiException;
import org.glassfish.jersey.apache.connector.ApacheClientProperties;
import org.glassfish.jersey.apache.connector.ApacheConnectorProvider;
import org.glassfish.jersey.client.ClientConfig;
import org.glassfish.jersey.client.ClientProperties;
import org.glassfish.jersey.client.RequestEntityProcessing;
import org.glassfish.jersey.jackson.JacksonFeature;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.Closeable;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.InterruptedIOException;
import java.io.StringWriter;
import java.net.SocketTimeoutException;
import java.net.URI;
import java.net.URLEncoder;
import java.nio.file.Path;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.regex.Pattern;
import javax.ws.rs.ProcessingException;
import javax.ws.rs.WebApplicationException;
import javax.ws.rs.client.Client;
import javax.ws.rs.client.ClientBuilder;
import javax.ws.rs.client.Entity;
import javax.ws.rs.client.Invocation;
import javax.ws.rs.client.ResponseProcessingException;
import javax.ws.rs.client.WebTarget;
import javax.ws.rs.core.GenericType;
import javax.ws.rs.core.Response;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkNotNull;
import static com.google.common.collect.Maps.newHashMap;
import static com.spotify.docker.client.CompressedDirectory.delete;
import static com.spotify.docker.client.ObjectMapperProvider.objectMapper;
import static java.nio.charset.StandardCharsets.UTF_8;
import static java.util.concurrent.TimeUnit.SECONDS;
import static javax.ws.rs.HttpMethod.DELETE;
import static javax.ws.rs.HttpMethod.GET;
import static javax.ws.rs.HttpMethod.POST;
import static javax.ws.rs.core.MediaType.APPLICATION_JSON_TYPE;
import static javax.ws.rs.core.MediaType.APPLICATION_OCTET_STREAM_TYPE;
public class DefaultDockerClient implements DockerClient, Closeable {
private static final String VERSION = "v1.12";
private static final Logger log = LoggerFactory.getLogger(DefaultDockerClient.class);
public static final long NO_TIMEOUT = 0;
private static final long DEFAULT_CONNECT_TIMEOUT_MILLIS = SECONDS.toMillis(5);
private static final long DEFAULT_READ_TIMEOUT_MILLIS = SECONDS.toMillis(30);
private static final ClientConfig DEFAULT_CONFIG = new ClientConfig(
ObjectMapperProvider.class,
JacksonFeature.class,
LogsResponseReader.class,
ProgressResponseReader.class);
private static final Pattern CONTAINER_NAME_PATTERN = Pattern.compile("/?[a-zA-Z0-9_-]+");
private static final GenericType<List<Container>> CONTAINER_LIST =
new GenericType<List<Container>>() {};
private static final GenericType<List<Image>> IMAGE_LIST =
new GenericType<List<Image>>() {};
private static final GenericType<List<RemovedImage>> REMOVED_IMAGE_LIST =
new GenericType<List<RemovedImage>>() {};
private static final AtomicInteger CLIENT_COUNTER = new AtomicInteger();
private final ExecutorService executor = MoreExecutors.getExitingExecutorService(
(ThreadPoolExecutor) Executors.newCachedThreadPool(
new ThreadFactoryBuilder()
.setDaemon(true)
.setNameFormat("docker-client-" + CLIENT_COUNTER.incrementAndGet() + "-%d")
.build()));
private final Client client;
private final Client noTimeoutClient;
private final URI uri;
/**
* Create a new client with default configuration.
* @param uri The docker rest api uri.
*/
public DefaultDockerClient(final String uri) {
this(URI.create(uri.replaceAll("^unix:///", "unix://localhost/")));
}
/**
* Create a new client with default configuration.
* @param uri The docker rest api uri.
*/
public DefaultDockerClient(final URI uri) {
this(new Builder().uri(uri));
}
/**
* Create a new client with default configuration.
* @param uri The docker rest api uri.
* @param dockerCertificates The certificates to use for HTTPS.
*/
public DefaultDockerClient(final URI uri, final DockerCertificates dockerCertificates) {
this(new Builder().uri(uri).dockerCertificates(dockerCertificates));
}
/**
* Create a new client using the configuration of the builder.
*/
private DefaultDockerClient(final Builder builder) {
URI originalUri = checkNotNull(builder.uri, "uri");
if ((builder.dockerCertificates != null) && !originalUri.getScheme().equals("https")) {
throw new IllegalArgumentException("https URI must be provided to use certificates");
}
if (originalUri.getScheme().equals("unix")) {
this.uri = UnixConnectionSocketFactory.sanitizeUri(originalUri);
} else {
this.uri = originalUri;
}
final ClientConfig config = DEFAULT_CONFIG
.connectorProvider(new ApacheConnectorProvider())
.property(ClientProperties.CONNECT_TIMEOUT, (int) builder.connectTimeoutMillis)
.property(ClientProperties.READ_TIMEOUT, (int) builder.readTimeoutMillis)
.property(ApacheClientProperties.CONNECTION_MANAGER,
new PoolingHttpClientConnectionManager(getSchemeRegistry(builder)));
this.client = ClientBuilder.newClient(config);
// ApacheConnector doesn't respect per-request timeout settings.
// Workaround: create this client with infinite read timeout,
// and use it for waitContainer and stopContainer.
this.noTimeoutClient = ClientBuilder.newBuilder()
.withConfig(config)
.property(ClientProperties.READ_TIMEOUT, (int) NO_TIMEOUT)
.build();
}
private Registry<ConnectionSocketFactory> getSchemeRegistry(final Builder builder) {
final SSLConnectionSocketFactory https;
if (builder.dockerCertificates == null) {
https = SSLConnectionSocketFactory.getSocketFactory();
} else {
https = new SSLConnectionSocketFactory(builder.dockerCertificates.sslContext(),
builder.dockerCertificates.hostnameVerifier());
}
return RegistryBuilder
.<ConnectionSocketFactory>create()
.register("https", https)
.register("http", PlainConnectionSocketFactory.getSocketFactory())
.register("unix", new UnixConnectionSocketFactory(builder.uri))
.build();
}
@Override
public void close() {
executor.shutdownNow();
client.close();
}
@Override
public String ping() throws DockerException, InterruptedException {
final WebTarget resource = client.target(uri).path("_ping");
return request(GET, String.class, resource, resource.request());
}
@Override
public Version version() throws DockerException, InterruptedException {
final WebTarget resource = resource().path("version");
return request(GET, Version.class, resource, resource.request(APPLICATION_JSON_TYPE));
}
@Override
public Info info() throws DockerException, InterruptedException {
final WebTarget resource = resource().path("info");
return request(GET, Info.class, resource, resource.request(APPLICATION_JSON_TYPE));
}
@Override
public List<Container> listContainers(final ListContainersParam... params)
throws DockerException, InterruptedException {
WebTarget resource = resource()
.path("containers").path("json");
for (ListContainersParam param : params) {
resource = resource.queryParam(param.name(), param.value());
}
return request(GET, CONTAINER_LIST, resource, resource.request(APPLICATION_JSON_TYPE));
}
@Override
public List<Image> listImages(ListImagesParam... params)
throws DockerException, InterruptedException {
WebTarget resource = resource()
.path("images").path("json");
final Map<String, String> filters = newHashMap();
for (ListImagesParam param : params) {
if (param instanceof ListImagesFilterParam) {
filters.put(param.name(), param.value());
} else {
resource = resource.queryParam(param.name(), param.value());
}
}
// If filters were specified, we must put them in a JSON object and pass them using the
// 'filters' query param like this: filters={"dangling":["true"]}
try {
if (!filters.isEmpty()) {
final StringWriter writer = new StringWriter();
final JsonGenerator generator = objectMapper().getFactory().createGenerator(writer);
generator.writeStartObject();
for (Map.Entry<String, String> entry : filters.entrySet()) {
generator.writeArrayFieldStart(entry.getKey());
generator.writeString(entry.getValue());
generator.writeEndArray();
}
generator.writeEndObject();
generator.close();
// We must URL encode the string, otherwise Jersey chokes on the double-quotes in the json.
final String encoded = URLEncoder.encode(writer.toString(), UTF_8.name());
resource = resource.queryParam("filters", encoded);
}
} catch (IOException e) {
throw new DockerException(e);
}
return request(GET, IMAGE_LIST, resource, resource.request(APPLICATION_JSON_TYPE));
}
@Override
public ContainerCreation createContainer(final ContainerConfig config)
throws DockerException, InterruptedException {
return createContainer(config, null);
}
@Override
public ContainerCreation createContainer(final ContainerConfig config,
final String name)
throws DockerException, InterruptedException {
WebTarget resource = resource()
.path("containers").path("create");
if (name != null) {
checkArgument(CONTAINER_NAME_PATTERN.matcher(name).matches(),
"Invalid container name: \"%s\"", name);
resource = resource.queryParam("name", name);
}
log.info("Creating container with ContainerConfig: {}", config);
try {
return request(POST, ContainerCreation.class, resource, resource
.request(APPLICATION_JSON_TYPE), Entity.json(config));
} catch (DockerRequestException e) {
switch (e.status()) {
case 404:
throw new ImageNotFoundException(config.image(), e);
default:
throw e;
}
}
}
@Override
public void startContainer(final String containerId)
throws DockerException, InterruptedException {
startContainer(containerId, HostConfig.builder().build());
}
@Override
public void startContainer(final String containerId, final HostConfig hostConfig)
throws DockerException, InterruptedException {
checkNotNull(containerId, "containerId");
checkNotNull(hostConfig, "hostConfig");
log.info("Starting container with HostConfig: {}", hostConfig);
try {
final WebTarget resource = resource()
.path("containers").path(containerId).path("start");
request(POST, resource, resource
.request(APPLICATION_JSON_TYPE)
.property(ClientProperties.REQUEST_ENTITY_PROCESSING,
RequestEntityProcessing.BUFFERED),
Entity.json(hostConfig));
} catch (DockerRequestException e) {
switch (e.status()) {
case 404:
throw new ContainerNotFoundException(containerId, e);
default:
throw e;
}
}
}
@Override
public void restartContainer(String containerId) throws DockerException, InterruptedException {
restartContainer(containerId, 10);
}
@Override
public void restartContainer(String containerId, int secondsToWaitBeforeRestart)
throws DockerException, InterruptedException {
checkNotNull(containerId, "containerId");
checkNotNull(secondsToWaitBeforeRestart, "secondsToWait");
try {
final WebTarget resource = resource().path("containers").path(containerId)
.path("restart")
.queryParam("t", String.valueOf(secondsToWaitBeforeRestart));
request(POST, resource, resource.request());
} catch (WebApplicationException e) {
switch (e.getResponse().getStatus()) {
case 404:
throw new ContainerNotFoundException(containerId, e);
default:
throw new DockerException(e);
}
}
}
@Override
public void killContainer(final String containerId) throws DockerException, InterruptedException {
try {
final WebTarget resource = resource().path("containers").path(containerId).path("kill");
request(POST, resource, resource.request());
} catch (WebApplicationException e) {
switch (e.getResponse().getStatus()) {
case 404:
throw new ContainerNotFoundException(containerId, e);
default:
throw new DockerException(e);
}
}
}
@Override
public void stopContainer(final String containerId, final int secondsToWaitBeforeKilling)
throws DockerException, InterruptedException {
try {
final WebTarget resource = noTimeoutResource()
.path("containers").path(containerId).path("stop")
.queryParam("t", String.valueOf(secondsToWaitBeforeKilling));
request(POST, resource, resource.request());
} catch (WebApplicationException e) {
switch (e.getResponse().getStatus()) {
case 304: // already stopped, so we're cool
return;
case 404:
throw new ContainerNotFoundException(containerId, e);
default:
throw new DockerException(e);
}
}
}
@Override
public ContainerExit waitContainer(final String containerId)
throws DockerException, InterruptedException {
try {
final WebTarget resource = noTimeoutResource()
.path("containers").path(containerId).path("wait");
// Wait forever
return request(POST, ContainerExit.class, resource,
resource.request(APPLICATION_JSON_TYPE));
} catch (DockerRequestException e) {
switch (e.status()) {
case 404:
throw new ContainerNotFoundException(containerId, e);
default:
throw e;
}
}
}
@Override
public void removeContainer(final String containerId)
throws DockerException, InterruptedException {
removeContainer(containerId, false);
}
@Override
public void removeContainer(final String containerId, final boolean removeVolumes)
throws DockerException, InterruptedException {
try {
final WebTarget resource = resource()
.path("containers").path(containerId);
request(DELETE, resource, resource
.queryParam("v", String.valueOf(removeVolumes))
.request(APPLICATION_JSON_TYPE));
} catch (WebApplicationException e) {
switch (e.getResponse().getStatus()) {
case 404:
throw new ContainerNotFoundException(containerId);
default:
throw new DockerException(e);
}
}
}
@Override
public InputStream exportContainer(String containerId)
throws DockerException, InterruptedException {
final WebTarget resource = resource()
.path("containers").path(containerId).path("export");
return request(GET, InputStream.class, resource,
resource.request(APPLICATION_OCTET_STREAM_TYPE));
}
@Override
public InputStream copyContainer(String containerId, String path)
throws DockerException, InterruptedException {
final WebTarget resource = resource()
.path("containers").path(containerId).path("copy");
// Internal JSON object; not worth it to create class for this
JsonNodeFactory nf = JsonNodeFactory.instance;
final JsonNode params = nf.objectNode().set("Resource", nf.textNode(path));
return request(POST, InputStream.class, resource,
resource.request(APPLICATION_OCTET_STREAM_TYPE),
Entity.json(params));
}
@Override
public ContainerInfo inspectContainer(final String containerId)
throws DockerException, InterruptedException {
try {
final WebTarget resource = resource().path("containers").path(containerId).path("json");
return request(GET, ContainerInfo.class, resource, resource.request(APPLICATION_JSON_TYPE));
} catch (DockerRequestException e) {
switch (e.status()) {
case 404:
throw new ContainerNotFoundException(containerId, e);
default:
throw e;
}
}
}
@Override
public void pull(final String image) throws DockerException, InterruptedException {
pull(image, new LoggingPullHandler(image));
}
@Override
public void pull(final String image, final ProgressHandler handler)
throws DockerException, InterruptedException {
final ImageRef imageRef = new ImageRef(image);
WebTarget resource = resource().path("images").path("create");
resource = resource.queryParam("fromImage", imageRef.getImage());
if (imageRef.getTag() != null) {
resource = resource.queryParam("tag", imageRef.getTag());
}
try (ProgressStream pull = request(POST, ProgressStream.class, resource,
resource.request(APPLICATION_JSON_TYPE))) {
pull.tail(handler, POST, resource.getUri());
}
}
@Override
public void push(final String image) throws DockerException, InterruptedException {
push(image, new LoggingPushHandler(image));
}
@Override
public void push(final String image, final ProgressHandler handler)
throws DockerException, InterruptedException {
final ImageRef imageRef = new ImageRef(image);
WebTarget resource =
resource().path("images").path(imageRef.getImage()).path("push");
if (imageRef.getTag() != null) {
resource = resource.queryParam("tag", imageRef.getTag());
}
// the docker daemon requires that the X-Registry-Auth header is specified
// with a non-empty string even if your registry doesn't use authentication
try (ProgressStream push =
request(POST, ProgressStream.class, resource,
resource.request(APPLICATION_JSON_TYPE).header("X-Registry-Auth", "null"))) {
push.tail(handler, POST, resource.getUri());
}
}
@Override
public void tag(final String image, final String name)
throws DockerException, InterruptedException {
final ImageRef imageRef = new ImageRef(name);
WebTarget resource =
resource().path("images").path(image).path("tag");
resource = resource.queryParam("repo", imageRef.getImage());
if (imageRef.getTag() != null) {
resource = resource.queryParam("tag", imageRef.getTag());
}
try {
request(POST, resource, resource.request());
} catch (DockerRequestException e) {
switch (e.status()) {
case 404:
throw new ImageNotFoundException(image, e);
default:
throw e;
}
}
}
@Override
public String build(final Path directory, final BuildParameter... params)
throws DockerException, InterruptedException, IOException {
return build(directory, null, new LoggingBuildHandler(), params);
}
@Override
public String build(final Path directory, final String name, final BuildParameter... params)
throws DockerException, InterruptedException, IOException {
return build(directory, name, new LoggingBuildHandler(), params);
}
@Override
public String build(final Path directory, final ProgressHandler handler,
final BuildParameter... params)
throws DockerException, InterruptedException, IOException {
return build(directory, null, handler, params);
}
@Override
public String build(final Path directory, final String name, final ProgressHandler handler,
final BuildParameter... params)
throws DockerException, InterruptedException, IOException {
checkNotNull(handler, "handler");
WebTarget resource = resource().path("build");
for (final BuildParameter param : params) {
resource = resource.queryParam(param.queryParam, String.valueOf(param.value));
}
if (name != null) {
resource = resource.queryParam("t", name);
}
final File compressedDirectory = CompressedDirectory.create(directory);
try (ProgressStream build = request(POST, ProgressStream.class, resource,
resource.request(APPLICATION_JSON_TYPE),
Entity.entity(compressedDirectory, "application/tar"))) {
String imageId = null;
while (build.hasNextMessage(POST, resource.getUri())) {
final ProgressMessage message = build.nextMessage(POST, resource.getUri());
final String id = message.buildImageId();
if (id != null) {
imageId = id;
}
handler.progress(message);
}
return imageId;
} finally {
delete(compressedDirectory);
}
}
@Override
public ImageInfo inspectImage(final String image) throws DockerException, InterruptedException {
try {
final WebTarget resource = resource().path("images").path(image).path("json");
return request(GET, ImageInfo.class, resource, resource.request(APPLICATION_JSON_TYPE));
} catch (DockerRequestException e) {
switch (e.status()) {
case 404:
throw new ImageNotFoundException(image, e);
default:
throw e;
}
}
}
@Override
public List<RemovedImage> removeImage(String image)
throws DockerException, InterruptedException {
return removeImage(image, false, false);
}
@Override
public List<RemovedImage> removeImage(String image, boolean force, boolean noPrune)
throws DockerException, InterruptedException {
try {
final WebTarget resource = resource().path("images").path(image)
.queryParam("force", String.valueOf(force))
.queryParam("noprune", String.valueOf(noPrune));
return request(DELETE, REMOVED_IMAGE_LIST, resource, resource.request(APPLICATION_JSON_TYPE));
} catch (WebApplicationException e) {
switch (e.getResponse().getStatus()) {
case 404:
throw new ImageNotFoundException(image);
default:
throw new DockerException(e);
}
}
}
@Override
public LogStream logs(final String containerId, final LogsParameter... params)
throws DockerException, InterruptedException {
WebTarget resource = resource()
.path("containers").path(containerId).path("logs");
for (final LogsParameter param : params) {
resource = resource.queryParam(param.name().toLowerCase(Locale.ROOT), String.valueOf(true));
}
try {
return request(GET, LogStream.class, resource,
resource.request("application/vnd.docker.raw-stream"));
} catch (DockerRequestException e) {
switch (e.status()) {
case 404:
throw new ContainerNotFoundException(containerId);
default:
throw e;
}
}
}
private WebTarget resource() {
return client.target(uri).path(VERSION);
}
private WebTarget noTimeoutResource() {
return noTimeoutClient.target(uri).path(VERSION);
}
private <T> T request(final String method, final GenericType<T> type,
final WebTarget resource, final Invocation.Builder request)
throws DockerException, InterruptedException {
try {
return request.async().method(method, type).get();
} catch (ExecutionException | MultiException e) {
throw propagate(method, resource, e);
}
}
private <T> T request(final String method, final Class<T> clazz,
final WebTarget resource, final Invocation.Builder request)
throws DockerException, InterruptedException {
try {
return request.async().method(method, clazz).get();
} catch (ExecutionException | MultiException e) {
throw propagate(method, resource, e);
}
}
private <T> T request(final String method, final Class<T> clazz,
final WebTarget resource, final Invocation.Builder request,
final Entity<?> entity)
throws DockerException, InterruptedException {
try {
return request.async().method(method, entity, clazz).get();
} catch (ExecutionException | MultiException e) {
throw propagate(method, resource, e);
}
}
private void request(final String method,
final WebTarget resource,
final Invocation.Builder request)
throws DockerException, InterruptedException {
try {
request.async().method(method).get();
} catch (ExecutionException | MultiException e) {
throw propagate(method, resource, e);
}
}
private void request(final String method,
final WebTarget resource,
final Invocation.Builder request,
final Entity<?> entity)
throws DockerException, InterruptedException {
try {
request.async().method(method, entity).get();
} catch (ExecutionException | MultiException e) {
throw propagate(method, resource, e);
}
}
private RuntimeException propagate(final String method, final WebTarget resource,
final Exception e)
throws DockerException, InterruptedException {
Throwable cause = e.getCause();
// Sometimes e is a org.glassfish.hk2.api.MultiException
// which contains the cause we're actually interested in.
// So we unpack it here.
if (e instanceof MultiException) {
cause = cause.getCause();
}
Response response = null;
if (cause instanceof ResponseProcessingException) {
response = ((ResponseProcessingException) cause).getResponse();
} else if (cause instanceof WebApplicationException) {
response = ((WebApplicationException) cause).getResponse();
} else if ((cause instanceof ProcessingException) && (cause.getCause() != null)) {
// For a ProcessingException, The exception message or nested Throwable cause SHOULD contain
// additional information about the reason of the processing failure.
cause = cause.getCause();
}
if (response != null) {
throw new DockerRequestException(method, resource.getUri(), response.getStatus(),
message(response), cause);
} else if ((cause instanceof SocketTimeoutException) ||
(cause instanceof ConnectTimeoutException)) {
throw new DockerTimeoutException(method, resource.getUri(), e);
} else if ((cause instanceof InterruptedIOException)
|| (cause instanceof InterruptedException)) {
throw new InterruptedException("Interrupted: " + method + " " + resource);
} else {
throw new DockerException(e);
}
}
private String message(final Response response) {
final Readable reader = new InputStreamReader(response.readEntity(InputStream.class), UTF_8);
try {
return CharStreams.toString(reader);
} catch (IOException ignore) {
return null;
}
}
/**
* Create a new {@link DefaultDockerClient} builder.
*/
public static Builder builder() {
return new Builder();
}
public static class Builder {
private URI uri;
private long connectTimeoutMillis = DEFAULT_CONNECT_TIMEOUT_MILLIS;
private long readTimeoutMillis = DEFAULT_READ_TIMEOUT_MILLIS;
private DockerCertificates dockerCertificates;
public URI uri() {
return uri;
}
public Builder uri(final URI uri) {
this.uri = uri;
return this;
}
public Builder uri(final String uri) {
return uri(URI.create(uri));
}
public long connectTimeoutMillis() {
return connectTimeoutMillis;
}
public Builder connectTimeoutMillis(final long connectTimeoutMillis) {
this.connectTimeoutMillis = connectTimeoutMillis;
return this;
}
public long readTimeoutMillis() {
return readTimeoutMillis;
}
public Builder readTimeoutMillis(final long readTimeoutMillis) {
this.readTimeoutMillis = readTimeoutMillis;
return this;
}
public DockerCertificates dockerCertificates() {
return dockerCertificates;
}
public Builder dockerCertificates(final DockerCertificates dockerCertificates) {
this.dockerCertificates = dockerCertificates;
return this;
}
public DefaultDockerClient build() {
return new DefaultDockerClient(this);
}
}
}
| |
/*
* Copyright 2013 Gunnar Kappei.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.lidoSchema;
/**
* A document containing one recordWrap(@http://www.lido-schema.org) element.
*
* This is a complex type.
*/
public interface RecordWrapDocument extends org.apache.xmlbeans.XmlObject
{
public static final org.apache.xmlbeans.SchemaType type = (org.apache.xmlbeans.SchemaType)
org.apache.xmlbeans.XmlBeans.typeSystemForClassLoader(RecordWrapDocument.class.getClassLoader(), "schemaorg_apache_xmlbeans.system.s6E28D279B6C224D74769DB8B98AF1665").resolveHandle("recordwrapcddfdoctype");
/**
* Gets the "recordWrap" element
*/
org.lidoSchema.RecordWrapDocument.RecordWrap getRecordWrap();
/**
* Sets the "recordWrap" element
*/
void setRecordWrap(org.lidoSchema.RecordWrapDocument.RecordWrap recordWrap);
/**
* Appends and returns a new empty "recordWrap" element
*/
org.lidoSchema.RecordWrapDocument.RecordWrap addNewRecordWrap();
/**
* An XML recordWrap(@http://www.lido-schema.org).
*
* This is a complex type.
*/
public interface RecordWrap extends org.apache.xmlbeans.XmlObject
{
public static final org.apache.xmlbeans.SchemaType type = (org.apache.xmlbeans.SchemaType)
org.apache.xmlbeans.XmlBeans.typeSystemForClassLoader(RecordWrap.class.getClassLoader(), "schemaorg_apache_xmlbeans.system.s6E28D279B6C224D74769DB8B98AF1665").resolveHandle("recordwrap35b6elemtype");
/**
* Gets a List of "recordID" elements
*/
java.util.List<org.lidoSchema.IdentifierComplexType> getRecordIDList();
/**
* Gets array of all "recordID" elements
* @deprecated
*/
@Deprecated
org.lidoSchema.IdentifierComplexType[] getRecordIDArray();
/**
* Gets ith "recordID" element
*/
org.lidoSchema.IdentifierComplexType getRecordIDArray(int i);
/**
* Returns number of "recordID" element
*/
int sizeOfRecordIDArray();
/**
* Sets array of all "recordID" element
*/
void setRecordIDArray(org.lidoSchema.IdentifierComplexType[] recordIDArray);
/**
* Sets ith "recordID" element
*/
void setRecordIDArray(int i, org.lidoSchema.IdentifierComplexType recordID);
/**
* Inserts and returns a new empty value (as xml) as the ith "recordID" element
*/
org.lidoSchema.IdentifierComplexType insertNewRecordID(int i);
/**
* Appends and returns a new empty value (as xml) as the last "recordID" element
*/
org.lidoSchema.IdentifierComplexType addNewRecordID();
/**
* Removes the ith "recordID" element
*/
void removeRecordID(int i);
/**
* Gets the "recordType" element
*/
org.lidoSchema.ConceptComplexType getRecordType();
/**
* Sets the "recordType" element
*/
void setRecordType(org.lidoSchema.ConceptComplexType recordType);
/**
* Appends and returns a new empty "recordType" element
*/
org.lidoSchema.ConceptComplexType addNewRecordType();
/**
* Gets a List of "recordSource" elements
*/
java.util.List<org.lidoSchema.RecordWrapDocument.RecordWrap.RecordSource> getRecordSourceList();
/**
* Gets array of all "recordSource" elements
* @deprecated
*/
@Deprecated
org.lidoSchema.RecordWrapDocument.RecordWrap.RecordSource[] getRecordSourceArray();
/**
* Gets ith "recordSource" element
*/
org.lidoSchema.RecordWrapDocument.RecordWrap.RecordSource getRecordSourceArray(int i);
/**
* Returns number of "recordSource" element
*/
int sizeOfRecordSourceArray();
/**
* Sets array of all "recordSource" element
*/
void setRecordSourceArray(org.lidoSchema.RecordWrapDocument.RecordWrap.RecordSource[] recordSourceArray);
/**
* Sets ith "recordSource" element
*/
void setRecordSourceArray(int i, org.lidoSchema.RecordWrapDocument.RecordWrap.RecordSource recordSource);
/**
* Inserts and returns a new empty value (as xml) as the ith "recordSource" element
*/
org.lidoSchema.RecordWrapDocument.RecordWrap.RecordSource insertNewRecordSource(int i);
/**
* Appends and returns a new empty value (as xml) as the last "recordSource" element
*/
org.lidoSchema.RecordWrapDocument.RecordWrap.RecordSource addNewRecordSource();
/**
* Removes the ith "recordSource" element
*/
void removeRecordSource(int i);
/**
* Gets a List of "recordRights" elements
*/
java.util.List<org.lidoSchema.RecordWrapDocument.RecordWrap.RecordRights> getRecordRightsList();
/**
* Gets array of all "recordRights" elements
* @deprecated
*/
@Deprecated
org.lidoSchema.RecordWrapDocument.RecordWrap.RecordRights[] getRecordRightsArray();
/**
* Gets ith "recordRights" element
*/
org.lidoSchema.RecordWrapDocument.RecordWrap.RecordRights getRecordRightsArray(int i);
/**
* Returns number of "recordRights" element
*/
int sizeOfRecordRightsArray();
/**
* Sets array of all "recordRights" element
*/
void setRecordRightsArray(org.lidoSchema.RecordWrapDocument.RecordWrap.RecordRights[] recordRightsArray);
/**
* Sets ith "recordRights" element
*/
void setRecordRightsArray(int i, org.lidoSchema.RecordWrapDocument.RecordWrap.RecordRights recordRights);
/**
* Inserts and returns a new empty value (as xml) as the ith "recordRights" element
*/
org.lidoSchema.RecordWrapDocument.RecordWrap.RecordRights insertNewRecordRights(int i);
/**
* Appends and returns a new empty value (as xml) as the last "recordRights" element
*/
org.lidoSchema.RecordWrapDocument.RecordWrap.RecordRights addNewRecordRights();
/**
* Removes the ith "recordRights" element
*/
void removeRecordRights(int i);
/**
* Gets a List of "recordInfoSet" elements
*/
java.util.List<org.lidoSchema.RecordInfoSetComplexType> getRecordInfoSetList();
/**
* Gets array of all "recordInfoSet" elements
* @deprecated
*/
@Deprecated
org.lidoSchema.RecordInfoSetComplexType[] getRecordInfoSetArray();
/**
* Gets ith "recordInfoSet" element
*/
org.lidoSchema.RecordInfoSetComplexType getRecordInfoSetArray(int i);
/**
* Returns number of "recordInfoSet" element
*/
int sizeOfRecordInfoSetArray();
/**
* Sets array of all "recordInfoSet" element
*/
void setRecordInfoSetArray(org.lidoSchema.RecordInfoSetComplexType[] recordInfoSetArray);
/**
* Sets ith "recordInfoSet" element
*/
void setRecordInfoSetArray(int i, org.lidoSchema.RecordInfoSetComplexType recordInfoSet);
/**
* Inserts and returns a new empty value (as xml) as the ith "recordInfoSet" element
*/
org.lidoSchema.RecordInfoSetComplexType insertNewRecordInfoSet(int i);
/**
* Appends and returns a new empty value (as xml) as the last "recordInfoSet" element
*/
org.lidoSchema.RecordInfoSetComplexType addNewRecordInfoSet();
/**
* Removes the ith "recordInfoSet" element
*/
void removeRecordInfoSet(int i);
/**
* An XML recordSource(@http://www.lido-schema.org).
*
* This is a complex type.
*/
public interface RecordSource extends org.lidoSchema.LegalBodyRefComplexType
{
public static final org.apache.xmlbeans.SchemaType type = (org.apache.xmlbeans.SchemaType)
org.apache.xmlbeans.XmlBeans.typeSystemForClassLoader(RecordSource.class.getClassLoader(), "schemaorg_apache_xmlbeans.system.s6E28D279B6C224D74769DB8B98AF1665").resolveHandle("recordsourcec24eelemtype");
/**
* Gets the "type" attribute
*/
java.lang.String getType();
/**
* Gets (as xml) the "type" attribute
*/
org.apache.xmlbeans.XmlString xgetType();
/**
* True if has "type" attribute
*/
boolean isSetType();
/**
* Sets the "type" attribute
*/
void setType(java.lang.String type);
/**
* Sets (as xml) the "type" attribute
*/
void xsetType(org.apache.xmlbeans.XmlString type);
/**
* Unsets the "type" attribute
*/
void unsetType();
/**
* Gets the "sortorder" attribute
*/
java.math.BigInteger getSortorder();
/**
* Gets (as xml) the "sortorder" attribute
*/
org.apache.xmlbeans.XmlInteger xgetSortorder();
/**
* True if has "sortorder" attribute
*/
boolean isSetSortorder();
/**
* Sets the "sortorder" attribute
*/
void setSortorder(java.math.BigInteger sortorder);
/**
* Sets (as xml) the "sortorder" attribute
*/
void xsetSortorder(org.apache.xmlbeans.XmlInteger sortorder);
/**
* Unsets the "sortorder" attribute
*/
void unsetSortorder();
/**
* A factory class with static methods for creating instances
* of this type.
*/
public static final class Factory
{
public static org.lidoSchema.RecordWrapDocument.RecordWrap.RecordSource newInstance() {
return (org.lidoSchema.RecordWrapDocument.RecordWrap.RecordSource) org.apache.xmlbeans.XmlBeans.getContextTypeLoader().newInstance( type, null ); }
public static org.lidoSchema.RecordWrapDocument.RecordWrap.RecordSource newInstance(org.apache.xmlbeans.XmlOptions options) {
return (org.lidoSchema.RecordWrapDocument.RecordWrap.RecordSource) org.apache.xmlbeans.XmlBeans.getContextTypeLoader().newInstance( type, options ); }
private Factory() { } // No instance of this class allowed
}
}
/**
* An XML recordRights(@http://www.lido-schema.org).
*
* This is a complex type.
*/
public interface RecordRights extends org.lidoSchema.RightsComplexType
{
public static final org.apache.xmlbeans.SchemaType type = (org.apache.xmlbeans.SchemaType)
org.apache.xmlbeans.XmlBeans.typeSystemForClassLoader(RecordRights.class.getClassLoader(), "schemaorg_apache_xmlbeans.system.s6E28D279B6C224D74769DB8B98AF1665").resolveHandle("recordrights4612elemtype");
/**
* Gets the "sortorder" attribute
*/
java.math.BigInteger getSortorder();
/**
* Gets (as xml) the "sortorder" attribute
*/
org.apache.xmlbeans.XmlInteger xgetSortorder();
/**
* True if has "sortorder" attribute
*/
boolean isSetSortorder();
/**
* Sets the "sortorder" attribute
*/
void setSortorder(java.math.BigInteger sortorder);
/**
* Sets (as xml) the "sortorder" attribute
*/
void xsetSortorder(org.apache.xmlbeans.XmlInteger sortorder);
/**
* Unsets the "sortorder" attribute
*/
void unsetSortorder();
/**
* A factory class with static methods for creating instances
* of this type.
*/
public static final class Factory
{
public static org.lidoSchema.RecordWrapDocument.RecordWrap.RecordRights newInstance() {
return (org.lidoSchema.RecordWrapDocument.RecordWrap.RecordRights) org.apache.xmlbeans.XmlBeans.getContextTypeLoader().newInstance( type, null ); }
public static org.lidoSchema.RecordWrapDocument.RecordWrap.RecordRights newInstance(org.apache.xmlbeans.XmlOptions options) {
return (org.lidoSchema.RecordWrapDocument.RecordWrap.RecordRights) org.apache.xmlbeans.XmlBeans.getContextTypeLoader().newInstance( type, options ); }
private Factory() { } // No instance of this class allowed
}
}
/**
* A factory class with static methods for creating instances
* of this type.
*/
public static final class Factory
{
public static org.lidoSchema.RecordWrapDocument.RecordWrap newInstance() {
return (org.lidoSchema.RecordWrapDocument.RecordWrap) org.apache.xmlbeans.XmlBeans.getContextTypeLoader().newInstance( type, null ); }
public static org.lidoSchema.RecordWrapDocument.RecordWrap newInstance(org.apache.xmlbeans.XmlOptions options) {
return (org.lidoSchema.RecordWrapDocument.RecordWrap) org.apache.xmlbeans.XmlBeans.getContextTypeLoader().newInstance( type, options ); }
private Factory() { } // No instance of this class allowed
}
}
/**
* A factory class with static methods for creating instances
* of this type.
*/
public static final class Factory
{
public static org.lidoSchema.RecordWrapDocument newInstance() {
return (org.lidoSchema.RecordWrapDocument) org.apache.xmlbeans.XmlBeans.getContextTypeLoader().newInstance( type, null ); }
public static org.lidoSchema.RecordWrapDocument newInstance(org.apache.xmlbeans.XmlOptions options) {
return (org.lidoSchema.RecordWrapDocument) org.apache.xmlbeans.XmlBeans.getContextTypeLoader().newInstance( type, options ); }
/** @param xmlAsString the string value to parse */
public static org.lidoSchema.RecordWrapDocument parse(java.lang.String xmlAsString) throws org.apache.xmlbeans.XmlException {
return (org.lidoSchema.RecordWrapDocument) org.apache.xmlbeans.XmlBeans.getContextTypeLoader().parse( xmlAsString, type, null ); }
public static org.lidoSchema.RecordWrapDocument parse(java.lang.String xmlAsString, org.apache.xmlbeans.XmlOptions options) throws org.apache.xmlbeans.XmlException {
return (org.lidoSchema.RecordWrapDocument) org.apache.xmlbeans.XmlBeans.getContextTypeLoader().parse( xmlAsString, type, options ); }
/** @param file the file from which to load an xml document */
public static org.lidoSchema.RecordWrapDocument parse(java.io.File file) throws org.apache.xmlbeans.XmlException, java.io.IOException {
return (org.lidoSchema.RecordWrapDocument) org.apache.xmlbeans.XmlBeans.getContextTypeLoader().parse( file, type, null ); }
public static org.lidoSchema.RecordWrapDocument parse(java.io.File file, org.apache.xmlbeans.XmlOptions options) throws org.apache.xmlbeans.XmlException, java.io.IOException {
return (org.lidoSchema.RecordWrapDocument) org.apache.xmlbeans.XmlBeans.getContextTypeLoader().parse( file, type, options ); }
public static org.lidoSchema.RecordWrapDocument parse(java.net.URL u) throws org.apache.xmlbeans.XmlException, java.io.IOException {
return (org.lidoSchema.RecordWrapDocument) org.apache.xmlbeans.XmlBeans.getContextTypeLoader().parse( u, type, null ); }
public static org.lidoSchema.RecordWrapDocument parse(java.net.URL u, org.apache.xmlbeans.XmlOptions options) throws org.apache.xmlbeans.XmlException, java.io.IOException {
return (org.lidoSchema.RecordWrapDocument) org.apache.xmlbeans.XmlBeans.getContextTypeLoader().parse( u, type, options ); }
public static org.lidoSchema.RecordWrapDocument parse(java.io.InputStream is) throws org.apache.xmlbeans.XmlException, java.io.IOException {
return (org.lidoSchema.RecordWrapDocument) org.apache.xmlbeans.XmlBeans.getContextTypeLoader().parse( is, type, null ); }
public static org.lidoSchema.RecordWrapDocument parse(java.io.InputStream is, org.apache.xmlbeans.XmlOptions options) throws org.apache.xmlbeans.XmlException, java.io.IOException {
return (org.lidoSchema.RecordWrapDocument) org.apache.xmlbeans.XmlBeans.getContextTypeLoader().parse( is, type, options ); }
public static org.lidoSchema.RecordWrapDocument parse(java.io.Reader r) throws org.apache.xmlbeans.XmlException, java.io.IOException {
return (org.lidoSchema.RecordWrapDocument) org.apache.xmlbeans.XmlBeans.getContextTypeLoader().parse( r, type, null ); }
public static org.lidoSchema.RecordWrapDocument parse(java.io.Reader r, org.apache.xmlbeans.XmlOptions options) throws org.apache.xmlbeans.XmlException, java.io.IOException {
return (org.lidoSchema.RecordWrapDocument) org.apache.xmlbeans.XmlBeans.getContextTypeLoader().parse( r, type, options ); }
public static org.lidoSchema.RecordWrapDocument parse(javax.xml.stream.XMLStreamReader sr) throws org.apache.xmlbeans.XmlException {
return (org.lidoSchema.RecordWrapDocument) org.apache.xmlbeans.XmlBeans.getContextTypeLoader().parse( sr, type, null ); }
public static org.lidoSchema.RecordWrapDocument parse(javax.xml.stream.XMLStreamReader sr, org.apache.xmlbeans.XmlOptions options) throws org.apache.xmlbeans.XmlException {
return (org.lidoSchema.RecordWrapDocument) org.apache.xmlbeans.XmlBeans.getContextTypeLoader().parse( sr, type, options ); }
public static org.lidoSchema.RecordWrapDocument parse(org.w3c.dom.Node node) throws org.apache.xmlbeans.XmlException {
return (org.lidoSchema.RecordWrapDocument) org.apache.xmlbeans.XmlBeans.getContextTypeLoader().parse( node, type, null ); }
public static org.lidoSchema.RecordWrapDocument parse(org.w3c.dom.Node node, org.apache.xmlbeans.XmlOptions options) throws org.apache.xmlbeans.XmlException {
return (org.lidoSchema.RecordWrapDocument) org.apache.xmlbeans.XmlBeans.getContextTypeLoader().parse( node, type, options ); }
/** @deprecated {@link org.apache.xmlbeans.xml.stream.XMLInputStream} */
@Deprecated
public static org.lidoSchema.RecordWrapDocument parse(org.apache.xmlbeans.xml.stream.XMLInputStream xis) throws org.apache.xmlbeans.XmlException, org.apache.xmlbeans.xml.stream.XMLStreamException {
return (org.lidoSchema.RecordWrapDocument) org.apache.xmlbeans.XmlBeans.getContextTypeLoader().parse( xis, type, null ); }
/** @deprecated {@link org.apache.xmlbeans.xml.stream.XMLInputStream} */
@Deprecated
public static org.lidoSchema.RecordWrapDocument parse(org.apache.xmlbeans.xml.stream.XMLInputStream xis, org.apache.xmlbeans.XmlOptions options) throws org.apache.xmlbeans.XmlException, org.apache.xmlbeans.xml.stream.XMLStreamException {
return (org.lidoSchema.RecordWrapDocument) org.apache.xmlbeans.XmlBeans.getContextTypeLoader().parse( xis, type, options ); }
/** @deprecated {@link org.apache.xmlbeans.xml.stream.XMLInputStream} */
@Deprecated
public static org.apache.xmlbeans.xml.stream.XMLInputStream newValidatingXMLInputStream(org.apache.xmlbeans.xml.stream.XMLInputStream xis) throws org.apache.xmlbeans.XmlException, org.apache.xmlbeans.xml.stream.XMLStreamException {
return org.apache.xmlbeans.XmlBeans.getContextTypeLoader().newValidatingXMLInputStream( xis, type, null ); }
/** @deprecated {@link org.apache.xmlbeans.xml.stream.XMLInputStream} */
@Deprecated
public static org.apache.xmlbeans.xml.stream.XMLInputStream newValidatingXMLInputStream(org.apache.xmlbeans.xml.stream.XMLInputStream xis, org.apache.xmlbeans.XmlOptions options) throws org.apache.xmlbeans.XmlException, org.apache.xmlbeans.xml.stream.XMLStreamException {
return org.apache.xmlbeans.XmlBeans.getContextTypeLoader().newValidatingXMLInputStream( xis, type, options ); }
private Factory() { } // No instance of this class allowed
}
}
| |
/**
* This file is part of Erjang - A JVM-based Erlang VM
*
* Copyright (c) 2009 by Trifork
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
**/
package erjang.driver;
import static erjang.EPort.am_arg0;
import static erjang.EPort.am_args;
import static erjang.EPort.am_binary;
import static erjang.EPort.am_cd;
import static erjang.EPort.am_close;
import static erjang.EPort.am_env;
import static erjang.EPort.am_eof;
import static erjang.EPort.am_exit_status;
import static erjang.EPort.am_hide;
import static erjang.EPort.am_in;
import static erjang.EPort.am_line;
import static erjang.EPort.am_nouse_stdio;
import static erjang.EPort.am_out;
import static erjang.EPort.am_packet;
import static erjang.EPort.am_stream;
import static erjang.EPort.am_use_stdio;
import erjang.ETask.STATE;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.nio.channels.SelectableChannel;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.concurrent.ConcurrentHashMap;
import java.util.logging.Level;
import java.util.logging.Logger;
import kilim.Pausable;
import kilim.Task;
import erjang.EAtom;
import erjang.EBinary;
import erjang.ECons;
import erjang.EHandle;
import erjang.EInternalPID;
import erjang.EInternalPort;
import erjang.EObject;
import erjang.EPID;
import erjang.EPeer;
import erjang.EPort;
import erjang.EProc;
import erjang.ERT;
import erjang.ERef;
import erjang.ESeq;
import erjang.EString;
import erjang.ETask;
import erjang.ETuple;
import erjang.ETuple2;
import erjang.ETuple3;
import erjang.ETuple4;
import erjang.ErjangConfig;
import erjang.ErlangError;
import erjang.ErlangException;
import erjang.ErlangExit;
import erjang.ErlangExitSignal;
import erjang.NotImplemented;
/**
* Base class for the two kinds of driver tasks: drivers, and "exec"s
*/
public abstract class EDriverTask extends ETask<EInternalPort> implements
NIOHandler {
static Logger log = Logger.getLogger("erjang.driver");
public abstract EObject getName();
@Override
public String toString() {
return "<driver_task:" + super.id + ">";
}
private static final EAtom am_name = EAtom.intern("name");
private static final EAtom am_data = EAtom.intern("data");
private static final EAtom am_connected = EAtom.intern("connected");
private static final EAtom am_closed = EAtom.intern("closed");
private final EInternalPort port;
protected EPID owner;
private final EDriverControl instance;
public EPID owner() {
return owner;
}
public void owner(EInternalPID ipid) {
this.owner = ipid;
}
private static ConcurrentHashMap<Integer,EDriverTask> all_ports
= new ConcurrentHashMap<Integer,EDriverTask> ();
public EDriverTask(EPID owner, EDriverControl driver) {
EDriver drv = driver.getDriver();
if (drv.useDriverLevelLocking() == true) {
log.fine("DRIVER_LEVEL_LOCK: "+driver);
driver = new LockingDriverInstance(driver, drv.getLock());
} else {
driver = new LockingDriverInstance(driver, new kilim.ReentrantLock());
}
this.owner = owner;
this.instance = driver;
this.port = new EInternalPort(this);
driver.setTask(this);
all_ports.put(id, this);
}
public void setupInstance() {instance.setup();}
@Override
public EInternalPort self_handle() {
return port;
}
static enum Mode {
STREAM, PACKET, LINE
};
protected boolean send_binary_data;
protected boolean is_out_only;
protected boolean is_in_only;
protected boolean send_eof;
protected boolean hide;
protected int port_out_fd;
protected int port_in_fd;
protected boolean send_exit_status;
protected int packet;
protected int line_length;
protected Mode mode = Mode.STREAM;
protected String[] cmd;
protected String cwd;
protected HashMap<String, String> env;
private long abs_timeout;
/** state controlled from elsewhere... erlang:port_set_data/2*/
public EObject port_data;
public static final int ERTS_PORT_SFLG_CONNECTED = 1<<0;
public static final int ERTS_PORT_SFLG_EXITING = 1<<1;
public static final int ERTS_PORT_SFLG_DISTRIBUTION = 1<<2;
public static final int ERTS_PORT_SFLG_BINARY_IO = 1<<3;
public static final int ERTS_PORT_SFLG_SOFT_EOF = 1<<4;
public static final int ERTS_PORT_SFLG_PORT_BUSY = 1<<5;
public static final int ERTS_PORT_SFLG_CLOSING = 1<<6;
public static final int ERTS_PORT_SFLG_SEND_CLOSED = 1<<7;
public static final int ERTS_PORT_SFLG_LINEBUF_IO = 1<<8;
public static final int ERTS_PORT_SFLG_IMMORTAL = 1<<9;
public static final int ERTS_PORT_SFLG_FREE = 1<<10;
public static final int ERTS_PORT_SFLG_FREE_SCHEDULED = 1<<11;
public static final int ERTS_PORT_SFLG_INITIALIZING = 1<<12;
public static final int ERTS_PORT_SFLG_PORT_SPECIFIC_LOCK = 1<<13;
public static final int ERTS_PORT_SFLG_INVALID = 1<<14;
public static final int ERTS_PORT_SFLG_DEBUG = 1<<31;
public int status;
private EPeer peer;
boolean stderr_to_stdout;
private EPID reply_closed_to;
/**
* @param cmd
* @param portSetting
*/
protected void parseOptions(String[] cmd, EObject portSetting) {
// TODO: most of this is way too expensive for non-exec ports
// set by options
this.cmd = cmd;
this.cwd = System.getProperty("user.dir");
this.env = new HashMap<String, String>(ErjangConfig.getenv());
this.packet = -1; // not set
this.line_length = -1;
this.send_exit_status = false;
this.stderr_to_stdout = false;
this.port_in_fd = 1;
this.port_out_fd = 2;
this.hide = false;
this.send_eof = false;
this.is_in_only = false;
this.is_out_only = false;
this.send_binary_data = false;
ECons settings = portSetting.testCons();
if (settings == null)
throw ERT.badarg();
for (; settings != null && !settings.isNil(); settings = settings
.tail().testCons()) {
EObject val = settings.head();
ETuple tup;
if ((tup = val.testTuple()) != null) {
ETuple2 tup2;
if ((tup2 = ETuple2.cast(tup)) != null) {
if (tup2.elem1 == am_args) {
ESeq list = tup2.elem2.testSeq();
EObject[] nargs = list.toArray();
String[] new_cmd = new String[nargs.length + 1];
new_cmd[0] = cmd[0];
for (int i = 0; i < nargs.length; i++) {
new_cmd[i + 1] = EString.make(nargs[i])
.stringValue();
}
cmd = new_cmd;
} else if (tup2.elem1 == am_arg0) {
String[] new_cmd = new String[2];
new_cmd[0] = cmd[0];
new_cmd[1] = EString.make(tup2.elem2).stringValue();
} else if (tup2.elem1 == am_packet) {
packet = tup2.elem2.asInt();
mode = Mode.PACKET;
} else if (tup2.elem1 == am_cd) {
cwd = EString.make(tup2.elem2).stringValue();
} else if (tup2.elem1 == am_env) {
ESeq ee;
if ((ee = tup2.elem2.testSeq()) == null) {
throw ERT.badarg();
}
EObject[] envs = ee.toArray();
for (int i = 0; i < envs.length; i++) {
ETuple2 e = ETuple2.cast(envs[i].testTuple());
if (e.elem2 == ERT.FALSE) {
env.remove(EString.make(e.elem1).stringValue());
} else {
env.put(EString.make(e.elem1).stringValue(),
EString.make(e.elem2).stringValue());
}
}
} else if (tup2.elem1 == am_line) {
line_length = tup2.elem2.asInt();
mode = Mode.LINE;
} else {
throw ERT.badarg();
}
}
} else if (val == am_stream) {
mode = Mode.STREAM;
} else if (val == am_use_stdio) {
port_in_fd = 1;
port_out_fd = 2;
} else if (val == am_nouse_stdio) {
port_in_fd = 3;
port_out_fd = 4;
} else if (val == am_hide) {
hide = true;
} else if (val == am_exit_status) {
send_exit_status = true;
} else if (val == EPort.am_stderr_to_stdout) {
stderr_to_stdout = true;
} else if (val == am_eof) {
send_eof = true;
} else if (val == am_in) {
is_in_only = true;
} else if (val == am_out) {
is_out_only = true;
} else if (val == am_binary) {
send_binary_data = true;
}
}
}
@Override
public Task start() {
Task result = super.start();
set_state(STATE.RUNNING);
return result;
}
@Override
public void execute() throws Pausable {
try {
EObject result = null;
try {
// driver main loop
main_loop();
result = am_normal;
} catch (NotImplemented e) {
log.log(Level.SEVERE, "exiting "+self_handle(), e);
result = e.reason();
} catch (ErlangException e) {
// e.printStackTrace();
result = e.reason();
} catch (ErlangExitSignal e) {
// e.printStackTrace();
result = e.reason();
} catch (Throwable e) {
e.printStackTrace();
ESeq erl_trace = ErlangError.decodeTrace(e.getStackTrace());
ETuple java_ex = ETuple.make(am_java_exception, EString
.fromString(ERT.describe_exception(e)));
result = ETuple.make(java_ex, erl_trace);
if (log.isLoggable(Level.FINER)) {
log.finer("EXITING "+result);
}
} finally {
set_state_to_done_and_wait_for_stability();
}
// System.err.println("task "+this+" exited with "+result);
do_proc_termination(result);
} catch (ThreadDeath e) {
throw e;
} catch (Throwable e) {
e.printStackTrace();
}
}
/**
* @throws Pausable
*
*/
protected void main_loop() throws Exception, Pausable {
/** out is used locally later, but we allocate it once and for all. */
List<ByteBuffer> out = new ArrayList<ByteBuffer>();
EObject msg;
next_message: while (true) {
/** if the driver has a registered timeout ... handle that */
if (abs_timeout == 0) {
msg = mbox.get();
} else {
msg = null;
long timeout = abs_timeout - System.currentTimeMillis();
if (timeout > 0) {
msg = mbox.get(timeout);
}
if (msg == null) {
abs_timeout = 0;
this.instance.timeout();
continue next_message;
}
}
ETuple2 t2;
EPortControl ctrl;
ETuple3 t3;
ETuple4 t4;
ETuple tup;
if ((t2 = ETuple2.cast(msg)) != null) {
EObject sender = t2.elem1;
ETuple2 cmd;
if ((cmd = ETuple2.cast(t2.elem2)) != null) {
// cmd must be one of
// {command, iodata()}
// {connect, PID}
if (cmd.elem1 == EPort.am_command) {
if (cmd.elem2.collectIOList(out)) {
EHandle caller = sender.testHandle();
if (caller == null) {
log.warning("*** sender is null? "+sender);
}
if (out.size() == 0) {
instance.outputv(caller, ERT.EMPTY_BYTEBUFFER_ARR);
} else {
instance.outputv(caller, out.toArray(new ByteBuffer[out
.size()]));
}
// if collectIOList fails, do the port task die?
// and how?
}
out.clear();
continue next_message;
} else if (cmd.elem1 == EPort.am_connect) {
EPID new_owner;
if ((new_owner = cmd.elem2.testPID()) == null)
break;
EPID old_owner = this.owner;
this.owner = new_owner;
old_owner.send(this.port, ETuple.make(this.self_handle(),
EPort.am_connected));
continue next_message;
}
} else if (t2.elem2 == am_close) {
this.reply_closed_to = t2.elem1.testPID();
// will call instance.stop()
return;
}
} else if ((ctrl = msg.testPortControl()) != null) {
// port control messages are simply run
ctrl.execute();
continue next_message;
} else if ((t3 = ETuple3.cast(msg)) != null) {
// {'EXIT', From, Reason} comes in this way
if (t3.elem1 == ERT.am_EXIT) {
// close is handled by exception handling code
return;
}
} else if ((tup = msg.testTuple()) != null && tup.arity() == 5) {
// {'DOWN', ref, process, pid, reason}
if (tup.elm(1) == ERT.am_DOWN) {
ERef ref = tup.elm(2).testReference();
instance.processExit(ref);
}
}
break;
}
throw new ErlangError(ERT.am_badsig, msg);
}
/**
* implementation of port_control
* @param caller
*
* @param op
* @param cmd2
* @throws Pausable
*/
public EObject control(EProc caller, int op, ByteBuffer cmd2) throws Pausable {
int pstate = get_state_dirtyread();
if (pstate == STATE.RUNNING.ordinal() || pstate == STATE.INIT.ordinal()) {
// ok
} else {
log.warning("port "+this.self_handle()+" in state: "+pstate);
throw ERT.badarg();
}
if (log.isLoggable(Level.FINE))
log.fine("ctrl: cmd="+op+"; arg="+EBinary.make(cmd2));
while (mbox.hasMessage()) {
Task.yield();
}
long old_to = abs_timeout;
ByteBuffer bb = instance.control(caller.self_handle(), op, cmd2);
long new_to = abs_timeout;
if (old_to != new_to) {
mbox.put(new EPortControl() {
@Override
public void execute() throws Pausable, IOException {
// do nothing, just trigger main loop
}
});
}
if (bb == null || bb.position() == 0) {
return ERT.NIL;
} else {
bb.flip();
return EString.make(bb.array(), bb.arrayOffset() + bb.position(), bb.remaining());
}
}
/**
* @param out
* @return
*/
public static ByteBuffer flatten(ByteBuffer[] out) {
if (out.length == 0) {
return ERT.EMPTY_BYTEBUFFER;
} else if (out.length == 1) {
return out[0];
} else {
long size = 0;
for (int i = 0; i < out.length; i++) {
size += out[i].limit();
}
if (size > Integer.MAX_VALUE)
throw new IllegalArgumentException("buffer too large to flatten "+size);
ByteBuffer res = ByteBuffer.allocate((int)size);
for (int i = 0; i < out.length; i++) {
res.put(out[i]);
}
res.flip();
return res;
}
}
/**
* @param op
* @param data
* @return
* @throws Pausable
*/
public EObject call(EProc caller, int op, EObject data) throws Pausable {
int pstate = get_state_dirtyread();
if (pstate != STATE.RUNNING.ordinal()) {
throw ERT.badarg();
}
EObject result = instance.call(caller.self_handle(), op, data);
if (result == null) {
return ERT.NIL;
} else {
return result;
}
}
/**
* erlang:port_command uses this, since error handling happens in the BIF
* @param caller TODO
* @param out
*
* @return
* @throws Pausable
*/
public void command(final EHandle caller, final ByteBuffer[] out) throws Pausable {
mbox.put(new EPortControl() {
@Override
public void execute() throws Pausable, IOException {
instance.outputv(caller, out);
}
});
}
public void close() throws Pausable {
final Throwable t = new Throwable();
mbox.put(new EPortControl() {
@Override
public void execute() throws Exception, Pausable {
throw new ErlangExitSignal(am_normal, t);
}
});
}
/** our owner died, do something! */
@Override
protected void process_incoming_exit(EHandle from, EObject reason, boolean exitToSender) throws Pausable
{
// TODO: do special things for reason=kill ?
// System.err.println("sending exit msg to self "+this);
mbox.put(ETuple.make(ERT.am_EXIT, from, reason));
}
/* (non-Javadoc)
* @see erjang.ETask#send_exit_to_all_linked(erjang.EObject)
*/
@Override
protected void do_proc_termination(EObject result) throws Pausable {
if (this.reply_closed_to != null) {
this.reply_closed_to.send(self_handle(), ETuple.make(self_handle(), am_closed));
}
super.do_proc_termination(result);
if (result != am_normal) {
owner.send(self_handle(), ETuple.make(ERT.am_EXIT, self_handle(), result));
}
//this.port.done();
all_ports.remove(this.id);
EDriverControl i = instance;
if (i != null)
i.stop(result);
}
/*
* (non-Javadoc)
*
* @see erjang.driver.IOHandler#ready(java.nio.channels.SelectableChannel,
* int)
*/
@Override
public void ready(final SelectableChannel ch, final int readyOps) {
mbox.putb(new EPortControl() {
@Override
public void execute() throws Pausable {
if ((readyOps & EDriverInstance.ERL_DRV_READ) == EDriverInstance.ERL_DRV_READ) {
instance.readyInput(ch);
}
if ((readyOps & EDriverInstance.ERL_DRV_WRITE) == EDriverInstance.ERL_DRV_WRITE) {
instance.readyOutput(ch);
}
if ((readyOps & EDriverInstance.ERL_DRV_CONNECT) == EDriverInstance.ERL_DRV_CONNECT) {
instance.readyConnect(ch);
}
if ((readyOps & EDriverInstance.ERL_DRV_ACCEPT) == EDriverInstance.ERL_DRV_ACCEPT) {
instance.readyAccept(ch);
}
}
});
}
/*
* (non-Javadoc)
*
* @see
* erjang.driver.IOHandler#released(java.nio.channels.SelectableChannel)
*/
@Override
public void released(final SelectableChannel ch) {
mbox.putb(new EPortControl() {
@Override
public void execute() throws Pausable {
instance.stopSelect(ch);
}
});
}
/*
* (non-Javadoc)
*
* @see
* erjang.driver.IOHandler#exception(java.nio.channels.SelectableChannel,
* java.io.IOException)
*/
@Override
public void exception(SelectableChannel ch, IOException e) {
// TODO Auto-generated method stub
}
/**
* @param howlong
*/
public void set_timer(long howlong) {
this.abs_timeout = System.currentTimeMillis() + howlong;
}
public long read_timer() {
return this.abs_timeout - System.currentTimeMillis();
}
/**
* @param port2
*/
public void cancel_timer(EPort port2) {
assert port2 == this.port : "can only cancel timer on self" ;
this.abs_timeout = 0;
}
/**
* @param job
*/
public void async_done(final EAsync job) {
mbox.putb(new EPortControl() {
@Override
public void execute() throws Pausable {
instance.readyAsync(job);
}
});
}
/**
* @param out
* @throws Pausable
*/
public void output_from_driver(EObject out) throws Pausable {
output_term_from_driver(new ETuple2(port, new ETuple2(am_data, out)));
}
public void output_from_driver_b(EObject out) {
output_term_from_driver_b(new ETuple2(port, new ETuple2(am_data, out)));
}
public void output_term_from_driver(EObject out) throws Pausable {
if (log.isLoggable(Level.FINE)) log.fine(""+owner+" ! "+out);
owner.send(port, out);
}
public void output_term_from_driver_b(EObject out) {
if (log.isLoggable(Level.FINE)) log.fine(""+owner+" ! "+out);
owner.sendb(out);
}
/**
* @throws Pausable
*
*/
public void eof_from_driver_b() {
output_term_from_driver_b(new ETuple2(port, am_eof));
}
public void eof_from_driver() throws Pausable {
output_term_from_driver(new ETuple2(port, am_eof));
}
public void exit_status_from_driver(int code) throws Pausable {
output_term_from_driver(new ETuple2(port, new ETuple2(am_exit_status, ERT.box(code))));
}
public void exit_status_from_driver_b(int code) {
output_term_from_driver_b(new ETuple2(port, new ETuple2(am_exit_status, ERT.box(code))));
}
public static ESeq all_ports() {
ESeq res = ERT.NIL;
for (EDriverTask dt : all_ports.values()) {
if (dt.isDone()) continue;
res = res.cons(dt.self_handle());
}
return res;
}
public EObject port_info(EAtom spec) {
if (spec == am_connected) {
return new ETuple2(am_connected, owner);
}
if (spec == am_name) {
return new ETuple2(am_name, getName());
}
if (spec == EProc.am_links) {
return new ETuple2(EProc.am_links, links());
}
throw new NotImplemented("port_info(" + spec + ")");
}
public void exit(final EObject reason) {
mbox.putb(new EPortControl() {
@Override
public void execute() throws Pausable, IOException {
throw new ErlangExit(reason);
}
});
}
public EPeer node() {
return this.peer;
}
public void node(EPeer peer) {
this.peer = peer;
this.status |= ERTS_PORT_SFLG_DISTRIBUTION;
}
/** magic direct call ! */
public void outputv(EHandle sender, ByteBuffer[] ev) throws IOException, Pausable {
this.command(sender, ev);
}
public boolean send_binary_data() {
return send_binary_data;
}
}
| |
/*
* Copyright 2002-2014 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.remoting.httpinvoker;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.util.Locale;
import java.util.zip.GZIPInputStream;
import org.apache.http.Header;
import org.apache.http.HttpResponse;
import org.apache.http.NoHttpResponseException;
import org.apache.http.StatusLine;
import org.apache.http.client.HttpClient;
import org.apache.http.client.config.RequestConfig;
import org.apache.http.client.methods.Configurable;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.config.Registry;
import org.apache.http.config.RegistryBuilder;
import org.apache.http.conn.socket.ConnectionSocketFactory;
import org.apache.http.conn.socket.PlainConnectionSocketFactory;
import org.apache.http.conn.ssl.SSLConnectionSocketFactory;
import org.apache.http.entity.ByteArrayEntity;
import org.apache.http.impl.client.HttpClientBuilder;
import org.apache.http.impl.conn.PoolingHttpClientConnectionManager;
import org.springframework.context.i18n.LocaleContext;
import org.springframework.context.i18n.LocaleContextHolder;
import org.springframework.remoting.support.RemoteInvocationResult;
import org.springframework.util.Assert;
import org.springframework.util.StringUtils;
/**
* {@link org.springframework.remoting.httpinvoker.HttpInvokerRequestExecutor} implementation that uses
* <a href="http://hc.apache.org/httpcomponents-client-ga/httpclient/">Apache HttpComponents HttpClient</a>
* to execute POST requests.
*
* <p>Allows to use a pre-configured {@link org.apache.http.client.HttpClient}
* instance, potentially with authentication, HTTP connection pooling, etc.
* Also designed for easy subclassing, providing specific template methods.
*
* <p>As of Spring 4.1, this request executor requires Apache HttpComponents 4.3 or higher.
*
* @author Juergen Hoeller
* @author Stephane Nicoll
* @since 3.1
* @see org.springframework.remoting.httpinvoker.SimpleHttpInvokerRequestExecutor
*/
public class HttpComponentsHttpInvokerRequestExecutor extends AbstractHttpInvokerRequestExecutor {
private static final int DEFAULT_MAX_TOTAL_CONNECTIONS = 100;
private static final int DEFAULT_MAX_CONNECTIONS_PER_ROUTE = 5;
private static final int DEFAULT_READ_TIMEOUT_MILLISECONDS = (60 * 1000);
private HttpClient httpClient;
private RequestConfig requestConfig;
/**
* Create a new instance of the HttpComponentsHttpInvokerRequestExecutor with a default
* {@link HttpClient} that uses a default {@code org.apache.http.impl.conn.PoolingClientConnectionManager}.
*/
public HttpComponentsHttpInvokerRequestExecutor() {
this(createDefaultHttpClient(), RequestConfig.custom()
.setSocketTimeout(DEFAULT_READ_TIMEOUT_MILLISECONDS).build());
}
private static HttpClient createDefaultHttpClient() {
Registry<ConnectionSocketFactory> schemeRegistry = RegistryBuilder.<ConnectionSocketFactory>create()
.register("http", PlainConnectionSocketFactory.getSocketFactory())
.register("https", SSLConnectionSocketFactory.getSocketFactory())
.build();
PoolingHttpClientConnectionManager connectionManager
= new PoolingHttpClientConnectionManager(schemeRegistry);
connectionManager.setMaxTotal(DEFAULT_MAX_TOTAL_CONNECTIONS);
connectionManager.setDefaultMaxPerRoute(DEFAULT_MAX_CONNECTIONS_PER_ROUTE);
return HttpClientBuilder.create().setConnectionManager(connectionManager).build();
}
/**
* Create a new instance of the HttpComponentsClientHttpRequestFactory
* with the given {@link HttpClient} instance.
* @param httpClient the HttpClient instance to use for this request executor
*/
public HttpComponentsHttpInvokerRequestExecutor(HttpClient httpClient) {
this(httpClient, null);
}
private HttpComponentsHttpInvokerRequestExecutor(HttpClient httpClient, RequestConfig requestConfig) {
this.httpClient = httpClient;
this.requestConfig = requestConfig;
}
/**
* Set the {@link HttpClient} instance to use for this request executor.
*/
public void setHttpClient(HttpClient httpClient) {
this.httpClient = httpClient;
}
/**
* Return the {@link HttpClient} instance that this request executor uses.
*/
public HttpClient getHttpClient() {
return this.httpClient;
}
/**
* Set the connection timeout for the underlying HttpClient.
* A timeout value of 0 specifies an infinite timeout.
* <p>Additional properties can be configured by specifying a
* {@link RequestConfig} instance on a custom {@link HttpClient}.
* @param timeout the timeout value in milliseconds
* @see RequestConfig#getConnectTimeout()
*/
public void setConnectTimeout(int timeout) {
Assert.isTrue(timeout >= 0, "Timeout must be a non-negative value");
this.requestConfig = cloneRequestConfig()
.setConnectTimeout(timeout).build();
setLegacyConnectionTimeout(getHttpClient(), timeout);
}
/**
* Apply the specified connection timeout to deprecated {@link HttpClient}
* implementations.
* <p>As of HttpClient 4.3, default parameters have to be exposed through a
* {@link RequestConfig} instance instead of setting the parameters on the
* client. Unfortunately, this behavior is not backward-compatible and older
* {@link HttpClient} implementations will ignore the {@link RequestConfig}
* object set in the context.
* <p>If the specified client is an older implementation, we set the custom
* connection timeout through the deprecated API. Otherwise, we just return
* as it is set through {@link RequestConfig} with newer clients.
* @param client the client to configure
* @param timeout the custom connection timeout
*/
@SuppressWarnings("deprecation")
private void setLegacyConnectionTimeout(HttpClient client, int timeout) {
if (org.apache.http.impl.client.AbstractHttpClient.class.isInstance(client)) {
client.getParams().setIntParameter(
org.apache.http.params.CoreConnectionPNames.CONNECTION_TIMEOUT, timeout);
}
}
/**
* Set the timeout in milliseconds used when requesting a connection from the connection
* manager using the underlying HttpClient.
* A timeout value of 0 specifies an infinite timeout.
* <p>Additional properties can be configured by specifying a
* {@link RequestConfig} instance on a custom {@link HttpClient}.
* @param connectionRequestTimeout the timeout value to request a connection in milliseconds
* @see RequestConfig#getConnectionRequestTimeout()
*/
public void setConnectionRequestTimeout(int connectionRequestTimeout) {
this.requestConfig = cloneRequestConfig()
.setConnectionRequestTimeout(connectionRequestTimeout).build();
}
/**
* Set the socket read timeout for the underlying HttpClient.
* A timeout value of 0 specifies an infinite timeout.
* <p>Additional properties can be configured by specifying a
* {@link RequestConfig} instance on a custom {@link HttpClient}.
* @param timeout the timeout value in milliseconds
* @see #DEFAULT_READ_TIMEOUT_MILLISECONDS
* @see RequestConfig#getSocketTimeout()
*/
public void setReadTimeout(int timeout) {
Assert.isTrue(timeout >= 0, "Timeout must be a non-negative value");
this.requestConfig = cloneRequestConfig()
.setSocketTimeout(timeout).build();
setLegacySocketTimeout(getHttpClient(), timeout);
}
/**
* Apply the specified socket timeout to deprecated {@link HttpClient}
* implementations. See {@link #setLegacyConnectionTimeout}.
* @param client the client to configure
* @param timeout the custom socket timeout
* @see #setLegacyConnectionTimeout
*/
@SuppressWarnings("deprecation")
private void setLegacySocketTimeout(HttpClient client, int timeout) {
if (org.apache.http.impl.client.AbstractHttpClient.class.isInstance(client)) {
client.getParams().setIntParameter(
org.apache.http.params.CoreConnectionPNames.SO_TIMEOUT, timeout);
}
}
private RequestConfig.Builder cloneRequestConfig() {
return this.requestConfig != null ? RequestConfig.copy(this.requestConfig) : RequestConfig.custom();
}
/**
* Execute the given request through the HttpClient.
* <p>This method implements the basic processing workflow:
* The actual work happens in this class's template methods.
* @see #createHttpPost
* @see #setRequestBody
* @see #executeHttpPost
* @see #validateResponse
* @see #getResponseBody
*/
@Override
protected RemoteInvocationResult doExecuteRequest(
HttpInvokerClientConfiguration config, ByteArrayOutputStream baos)
throws IOException, ClassNotFoundException {
HttpPost postMethod = createHttpPost(config);
setRequestBody(config, postMethod, baos);
try {
HttpResponse response = executeHttpPost(config, getHttpClient(), postMethod);
validateResponse(config, response);
InputStream responseBody = getResponseBody(config, response);
return readRemoteInvocationResult(responseBody, config.getCodebaseUrl());
}
finally {
postMethod.releaseConnection();
}
}
/**
* Create a HttpPost for the given configuration.
* <p>The default implementation creates a standard HttpPost with
* "application/x-java-serialized-object" as "Content-Type" header.
* @param config the HTTP invoker configuration that specifies the
* target service
* @return the HttpPost instance
* @throws java.io.IOException if thrown by I/O methods
*/
protected HttpPost createHttpPost(HttpInvokerClientConfiguration config) throws IOException {
HttpPost httpPost = new HttpPost(config.getServiceUrl());
RequestConfig requestConfig = createRequestConfig(config);
if (requestConfig != null) {
httpPost.setConfig(requestConfig);
}
LocaleContext localeContext = LocaleContextHolder.getLocaleContext();
if (localeContext != null) {
Locale locale = localeContext.getLocale();
if (locale != null) {
httpPost.addHeader(HTTP_HEADER_ACCEPT_LANGUAGE, StringUtils.toLanguageTag(locale));
}
}
if (isAcceptGzipEncoding()) {
httpPost.addHeader(HTTP_HEADER_ACCEPT_ENCODING, ENCODING_GZIP);
}
return httpPost;
}
/**
* Create a {@link RequestConfig} for the given configuration. Can return {@code null}
* to indicate that no custom request config should be set and the defaults of the
* {@link HttpClient} should be used.
* <p>The default implementation tries to merge the defaults of the client with the
* local customizations of the instance, if any.
* @param config the HTTP invoker configuration that specifies the
* target service
* @return the RequestConfig to use
*/
protected RequestConfig createRequestConfig(HttpInvokerClientConfiguration config) {
HttpClient client = getHttpClient();
if (client instanceof Configurable) {
RequestConfig clientRequestConfig = ((Configurable) client).getConfig();
return mergeRequestConfig(clientRequestConfig);
}
return this.requestConfig;
}
private RequestConfig mergeRequestConfig(RequestConfig defaultRequestConfig) {
if (this.requestConfig == null) { // nothing to merge
return defaultRequestConfig;
}
RequestConfig.Builder builder = RequestConfig.copy(defaultRequestConfig);
int connectTimeout = this.requestConfig.getConnectTimeout();
if (connectTimeout >= 0) {
builder.setConnectTimeout(connectTimeout);
}
int connectionRequestTimeout = this.requestConfig.getConnectionRequestTimeout();
if (connectionRequestTimeout >= 0) {
builder.setConnectionRequestTimeout(connectionRequestTimeout);
}
int socketTimeout = this.requestConfig.getSocketTimeout();
if (socketTimeout >= 0) {
builder.setSocketTimeout(socketTimeout);
}
return builder.build();
}
/**
* Set the given serialized remote invocation as request body.
* <p>The default implementation simply sets the serialized invocation as the
* HttpPost's request body. This can be overridden, for example, to write a
* specific encoding and to potentially set appropriate HTTP request headers.
* @param config the HTTP invoker configuration that specifies the target service
* @param httpPost the HttpPost to set the request body on
* @param baos the ByteArrayOutputStream that contains the serialized
* RemoteInvocation object
* @throws java.io.IOException if thrown by I/O methods
*/
protected void setRequestBody(
HttpInvokerClientConfiguration config, HttpPost httpPost, ByteArrayOutputStream baos)
throws IOException {
ByteArrayEntity entity = new ByteArrayEntity(baos.toByteArray());
entity.setContentType(getContentType());
httpPost.setEntity(entity);
}
/**
* Execute the given HttpPost instance.
* @param config the HTTP invoker configuration that specifies the target service
* @param httpClient the HttpClient to execute on
* @param httpPost the HttpPost to execute
* @return the resulting HttpResponse
* @throws java.io.IOException if thrown by I/O methods
*/
protected HttpResponse executeHttpPost(
HttpInvokerClientConfiguration config, HttpClient httpClient, HttpPost httpPost)
throws IOException {
return httpClient.execute(httpPost);
}
/**
* Validate the given response as contained in the HttpPost object,
* throwing an exception if it does not correspond to a successful HTTP response.
* <p>Default implementation rejects any HTTP status code beyond 2xx, to avoid
* parsing the response body and trying to deserialize from a corrupted stream.
* @param config the HTTP invoker configuration that specifies the target service
* @param response the resulting HttpResponse to validate
* @throws java.io.IOException if validation failed
*/
protected void validateResponse(HttpInvokerClientConfiguration config, HttpResponse response)
throws IOException {
StatusLine status = response.getStatusLine();
if (status.getStatusCode() >= 300) {
throw new NoHttpResponseException(
"Did not receive successful HTTP response: status code = " + status.getStatusCode() +
", status message = [" + status.getReasonPhrase() + "]");
}
}
/**
* Extract the response body from the given executed remote invocation request.
* <p>The default implementation simply fetches the HttpPost's response body stream.
* If the response is recognized as GZIP response, the InputStream will get wrapped
* in a GZIPInputStream.
* @param config the HTTP invoker configuration that specifies the target service
* @param httpResponse the resulting HttpResponse to read the response body from
* @return an InputStream for the response body
* @throws java.io.IOException if thrown by I/O methods
* @see #isGzipResponse
* @see java.util.zip.GZIPInputStream
*/
protected InputStream getResponseBody(HttpInvokerClientConfiguration config, HttpResponse httpResponse)
throws IOException {
if (isGzipResponse(httpResponse)) {
return new GZIPInputStream(httpResponse.getEntity().getContent());
}
else {
return httpResponse.getEntity().getContent();
}
}
/**
* Determine whether the given response indicates a GZIP response.
* <p>The default implementation checks whether the HTTP "Content-Encoding"
* header contains "gzip" (in any casing).
* @param httpResponse the resulting HttpResponse to check
* @return whether the given response indicates a GZIP response
*/
protected boolean isGzipResponse(HttpResponse httpResponse) {
Header encodingHeader = httpResponse.getFirstHeader(HTTP_HEADER_CONTENT_ENCODING);
return (encodingHeader != null && encodingHeader.getValue() != null &&
encodingHeader.getValue().toLowerCase().contains(ENCODING_GZIP));
}
}
| |
/*
* Copyright 2016 Jeremy Schiemann, Jean-Pierre Hotz
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package de.jeanpierrehotz.messaging.javaclient.androidcompat;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileReader;
import java.io.FileWriter;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
/**
* This class may be used to easily store low-level data (meaning primitive
* typed data including Strings).<br>
* Each value saved is assigned to a key of the type String, with which you can get the
* value.<br>
* A key does not have to be unique within the whole preference, wvwn though it has to
* within a datatype and it is good practice to keep your keys unique.<br>
* A preference may be edited by obtaining a {@link Editor}-object by
* calling the {@link #edit()}-method on the preference you wish to edit.
* @author Jean-Pierre Hotz 10. May 2016
*/
public class Preference {
/*
* The file in which this preferences values were or are supposed to be saved in
*/
private final File mXMLFile;
/*
* The name of this preference (must equal the filename of mXMLFile)
*/
private String mName;
/*
* A HashMap for each datatype to make types easier to keep apart thus
* preventing complicated typeof-Code in each get[...]-method
* Though it makes it possible to multiply use a key.
* Little fuin-fact: I tested the class SharedPreferences in Android
* (doing the same thing as this class; this one is inspired by SharedPreferences)
* and apparantly they saved all values in one HashMap but still didn't include any
* typeof-testing, which makes it give you a ClassCastException when using a
* key which has a value of another datatype than the methods return-datatype.
* TODO: Prevent a key from being assigned multiple times
*/
private final HashMap<String, Boolean> mBooleanValues;
private final HashMap<String, Character> mCharValues;
private final HashMap<String, String> mStringValues;
private final HashMap<String, Byte> mByteValues;
private final HashMap<String, Short> mShortValues;
private final HashMap<String, Integer> mIntValues;
private final HashMap<String, Long> mLongValues;
private final HashMap<String, Float> mFloatValues;
private final HashMap<String, Double> mDoubleValues;
/**
* This constructor gives you the Preference with the given name in the
* default-folder, which is: "<first root directory>\preferences\"
* (or as example in Windows "C:\preferences\").<br>
* The name of the preference only differentiates which dataset to use.<br>
* You may create a subdirectory in the default-directory for your application
* by doing following (please be aware that the subdirectories are then part of
* the preferences name):<br>
* <pre><code>
* name of the preference = "bar"
* first folders name = "foo"
* second folders name = "lol"
* => prefName = "foo" + File.separatorChar + "lol" + File.separatorChar + "bar"
* => (under windows) "foo\\lol\\bar"
* </code></pre>
* If the directories are not existent yet they will be created.<br>
* If the preference itself doesn't yet exist it will be created without any values.
* @param prefName the name of the preference you want
*/
public Preference(String prefName) {
mBooleanValues = new HashMap<>();
mCharValues = new HashMap<>();
mStringValues = new HashMap<>();
mByteValues = new HashMap<>();
mShortValues = new HashMap<>();
mIntValues = new HashMap<>();
mLongValues = new HashMap<>();
mFloatValues = new HashMap<>();
mDoubleValues = new HashMap<>();
mXMLFile = new File(File.listRoots()[0].getAbsolutePath()
+ File.separatorChar
+ "preferences"
+ File.separatorChar
+ prefName + ".xml");
initializeValues(prefName);
}
/**
* This constructor gives you the preference with the given name in the folder
* whose path is given.<br>
* Please note:<br>
* <pre><code>
* The folders path has to be absolute:
* e.g.: "C:\Users\Admin\Documents\MyApplication\preferences\"
*
* It might be good practice to keep your applications preference-folder-path
* as a constant somewhere and if you need a subdirectory just add the directories
* to the beginning of the preferences name (see {@link #Preference(java.lang.String)}):
*
* name to the preferences name:
* name of the preference = "bar"
* first folders name = "foo"
* second folders name = "lol"
* => prefName = "foo" + File.separatorChar + "lol" + File.separatorChar + "bar"
* => (under windows) "foo\\lol\\bar"
* </code></pre>
* The name of the preference only differentiates which dataset to use.<br>
* If the directories are not existent yet they will be created.<br>
* If the preference itself doesn't yet exist it will be created without any values.
* @param folder the folder in which the preference you want lies
* @param prefName the name of the preference you want
*/
public Preference(String folder, String prefName) {
mBooleanValues = new HashMap<>();
mCharValues = new HashMap<>();
mStringValues = new HashMap<>();
mByteValues = new HashMap<>();
mShortValues = new HashMap<>();
mIntValues = new HashMap<>();
mLongValues = new HashMap<>();
mFloatValues = new HashMap<>();
mDoubleValues = new HashMap<>();
if(!folder.endsWith(File.separator)){
folder += File.separator;
}
mXMLFile = new File(folder + prefName + ".xml");
initializeValues(prefName);
}
/**
* This method checks whether the preference already exists.<br>
* If it does it loads the content (in form of text) and then extracts all
* the values out of that text.<br>
* If it doesn't it simply writes the empty XML-file to the preference
* @param prefName the name of the preference (to check whether it is
* equal to the real name of the preference in the file)
* @throws RuntimeException if the content of the file has been detected to have been changed
*/
private void initializeValues(String prefName){
if(mXMLFile.exists()){// if the preference exists
String fileContent = "";
BufferedReader read = null;
try {
read = new BufferedReader(new FileReader(mXMLFile));
String temp;
while ((temp = read.readLine()) != null) {// we read in the text
fileContent += temp + "\n";
}
} catch (IOException ex) {
ex.printStackTrace();
// System.out.println(ex.getMessage());
} finally {
if(read != null){
try {
read.close();
} catch (IOException ex) {
ex.printStackTrace();
// System.out.println(ex.getMessage());
}
}
}
// and extract the actual values (with updateContent([...]))
if (!updateContent(fileContent) || !prefName.equals(mName)) {
throw new RuntimeException("The file which contains the data was illegally changed!");
}
}else{// if it doesn't exist
mXMLFile.getParentFile().mkdirs();// we create the directory
mName = prefName;
this.new Editor().commit();// and save the empty XML-file
}
}
/**
* This method updates the values of the preference, by extracting the single values
* out of the given text, which should be the content of the XML-file.
* @param content the content of the XML-file
* @return whether the update was successful
*/
private boolean updateContent(String content) {
try {
Token prefToken = Token.getTokensInside(content)[0];// we get the global parent
mName = prefToken.getAttribute("name");// whose name-attribute should equal the filename
Token[] valueranges = prefToken.getTokensInside();// load the single datatype-trees
Token[][] values = new Token[valueranges.length][];
for(int i = 0; i < valueranges.length; i++){
values[i] = valueranges[i].getTokensInside();// load all the values as Token-objects
}
mBooleanValues.clear();
mCharValues.clear();
mStringValues.clear();
mByteValues.clear();
mShortValues.clear();
mIntValues.clear();
mLongValues.clear();
mFloatValues.clear();
mDoubleValues.clear();
for(int valrange = 0; valrange < values.length; valrange++){// assign every key with its value in its datatype
for(Token t : values[valrange]){
switch(valrange){
case 0:
mBooleanValues.put(t.getAttribute("key"), Boolean.parseBoolean(t.getInterior()));
break;
case 1:
mCharValues.put(t.getAttribute("key"), t.getInterior().charAt(0));
break;
case 2:
mStringValues.put(t.getAttribute("key"), t.getInterior());
break;
case 3:
mByteValues.put(t.getAttribute("key"), Byte.parseByte(t.getInterior()));
break;
case 4:
mShortValues.put(t.getAttribute("key"), Short.parseShort(t.getInterior()));
break;
case 5:
mIntValues.put(t.getAttribute("key"), Integer.parseInt(t.getInterior()));
break;
case 6:
mLongValues.put(t.getAttribute("key"), Long.parseLong(t.getInterior()));
break;
case 7:
mFloatValues.put(t.getAttribute("key"), Float.parseFloat(t.getInterior()));
break;
case 8:
mDoubleValues.put(t.getAttribute("key"), Double.parseDouble(t.getInterior()));
break;
default:
break;
}
}
}
} catch (Exception ex) {
ex.printStackTrace();
// System.out.println(ex.getMessage());
return false;
}
return true;
}
/**
* This method gives you either the boolean value saved under the given key, or the given
* default-value if given key has no assigned value in this datatype
* @param key the key of the wanted value
* @param def the default-value which is returned if the key has no value
* @return either the value with given key or the default-value
*/
public boolean getBoolean(String key, boolean def) {
if (mBooleanValues.containsKey(key)) {
return mBooleanValues.get(key);
}
return def;
}
/**
* This method gives you either the char value saved under the given key, or the given
* default-value if given key has no assigned value in this datatype
* @param key the key of the wanted value
* @param def the default-value which is returned if the key has no value
* @return either the value with given key or the default-value
*/
public char getChar(String key, char def) {
if (mCharValues.containsKey(key)) {
return mCharValues.get(key);
}
return def;
}
/**
* This method gives you either the String value saved under the given key, or the given
* default-value if given key has no assigned value in this datatype
* @param key the key of the wanted value
* @param def the default-value which is returned if the key has no value
* @return either the value with given key or the default-value
*/
public String getString(String key, String def) {
if (mStringValues.containsKey(key)) {
return mStringValues.get(key);
}
return def;
}
/**
* This method gives you either the byte value saved under the given key, or the given
* default-value if given key has no assigned value in this datatype
* @param key the key of the wanted value
* @param def the default-value which is returned if the key has no value
* @return either the value with given key or the default-value
*/
public byte getByte(String key, byte def) {
if (mByteValues.containsKey(key)) {
return mByteValues.get(key);
}
return def;
}
/**
* This method gives you either the short value saved under the given key, or the given
* default-value if given key has no assigned value in this datatype
* @param key the key of the wanted value
* @param def the default-value which is returned if the key has no value
* @return either the value with given key or the default-value
*/
public short getShort(String key, short def) {
if (mShortValues.containsKey(key)) {
return mShortValues.get(key);
}
return def;
}
/**
* This method gives you either the int value saved under the given key, or the given
* default-value if given key has no assigned value in this datatype
* @param key the key of the wanted value
* @param def the default-value which is returned if the key has no value
* @return either the value with given key or the default-value
*/
public int getInt(String key, int def) {
if (mIntValues.containsKey(key)) {
return mIntValues.get(key);
}
return def;
}
/**
* This method gives you either the long value saved under the given key, or the given
* default-value if given key has no assigned value in this datatype
* @param key the key of the wanted value
* @param def the default-value which is returned if the key has no value
* @return either the value with given key or the default-value
*/
public long getLong(String key, long def) {
if (mLongValues.containsKey(key)) {
return mLongValues.get(key);
}
return def;
}
/**
* This method gives you either the float value saved under the given key, or the given
* default-value if given key has no assigned value in this datatype
* @param key the key of the wanted value
* @param def the default-value which is returned if the key has no value
* @return either the value with given key or the default-value
*/
public float getFloat(String key, float def) {
if (mFloatValues.containsKey(key)) {
return mFloatValues.get(key);
}
return def;
}
/**
* This method gives you either the double value saved under the given key, or the given
* default-value if given key has no assigned value in this datatype
* @param key the key of the wanted value
* @param def the default-value which is returned if the key has no value
* @return either the value with given key or the default-value
*/
public double getDouble(String key, double def) {
if (mDoubleValues.containsKey(key)) {
return mDoubleValues.get(key);
}
return def;
}
/**
* This method shows you whether there is a value of any datatype assigned to
* the given key
* @param key the key which is to be checked, whether it has a value assigned to it
* @return whether there is a value assigned to given key
*/
public boolean contains(String key) {
return mBooleanValues.containsKey(key) || mCharValues.containsKey(key)
|| mStringValues.containsKey(key) || mByteValues.containsKey(key)
|| mShortValues.containsKey(key) || mIntValues.containsKey(key)
|| mLongValues.containsKey(key) || mFloatValues.containsKey(key)
|| mDoubleValues.containsKey(key);
}
/**
* This method gives you a {@link Editor}-object to edit the Preference
* you called this method on.
* @return a {@link Editor}-object to edit the Preference
*/
public Preference.Editor edit() {
return new Preference.Editor();
}
/**
* This class is used to edit a Preference without it losing its consistency.<br>
* As long as the changes of this object has not yet been applied or commited the old values
* of the Preference remain valid.
*/
public class Editor {
private static final String HEADER = "<!--\r\n" +
" DO NOT MODIFY THIS FILE OR ITS FILENAME!\r\n" +
" IT WILL NOT BE GUARANTEED TO BE USABLE WITH THE FOR THIS FILE AWAITED ALGORITHM ANYMORE!\r\n" +
" \r\n" +
" Copyright 2016 Jean-Pierre Hotz\r\n" +
"-->\r\n";
/*
* A HashMap to cache the values of the preference and changes to it.
* TODO: Prevent a key from being assigned multiple times
*/
private final HashMap<String, Boolean> mTempBooleanValues;
private final HashMap<String, Character> mTempCharValues;
private final HashMap<String, String> mTempStringValues;
private final HashMap<String, Byte> mTempByteValues;
private final HashMap<String, Short> mTempShortValues;
private final HashMap<String, Integer> mTempIntValues;
private final HashMap<String, Long> mTempLongValues;
private final HashMap<String, Float> mTempFloatValues;
private final HashMap<String, Double> mTempDoubleValues;
private Editor() {
mTempBooleanValues = new HashMap<>(mBooleanValues);// cache all the values currently saved in the Preference
mTempCharValues = new HashMap<>(mCharValues);
mTempStringValues = new HashMap<>(mStringValues);
mTempByteValues = new HashMap<>(mByteValues);
mTempShortValues = new HashMap<>(mShortValues);
mTempIntValues = new HashMap<>(mIntValues);
mTempLongValues = new HashMap<>(mLongValues);
mTempFloatValues = new HashMap<>(mFloatValues);
mTempDoubleValues = new HashMap<>(mDoubleValues);
}
/**
* This method deletes all the values in this Editor
* @return the modified Editor
*/
public Preference.Editor clear(){
mTempBooleanValues.clear();
mTempCharValues.clear();
mTempStringValues.clear();
mTempByteValues.clear();
mTempShortValues.clear();
mTempIntValues.clear();
mTempLongValues.clear();
mTempFloatValues.clear();
mTempDoubleValues.clear();
return this;
}
/**
* This method removes <u>all</u> values with given key from the Editor
* @param key the key whose values are supposed to be deleted
* @return the modified Editor
*/
public Preference.Editor remove(String key){
mTempBooleanValues.remove(key);
mTempCharValues.remove(key);
mTempStringValues.remove(key);
mTempByteValues.remove(key);
mTempShortValues.remove(key);
mTempIntValues.remove(key);
mTempLongValues.remove(key);
mTempFloatValues.remove(key);
mTempDoubleValues.remove(key);
return this;
}
/**
* This method puts a given boolean value with given key to the Editor
* @param key the key to assign the value to
* @param val the value to be assigned to the key
* @return the modified Editor
*/
public Preference.Editor putBoolean(String key, boolean val){
mTempBooleanValues.put(key, val);
return this;
}
/**
* This method puts a given char value with given key to the Editor
* @param key the key to assign the value to
* @param val the value to be assigned to the key
* @return the modified Editor
*/
public Preference.Editor putChar(String key, char val){
mTempCharValues.put(key, val);
return this;
}
/**
* This method puts a given String value with given key to the Editor
* @param key the key to assign the value to
* @param val the value to be assigned to the key
* @return the modified Editor
*/
public Preference.Editor putString(String key, String val){
mTempStringValues.put(key, val);
return this;
}
/**
* This method puts a given byte value with given key to the Editor
* @param key the key to assign the value to
* @param val the value to be assigned to the key
* @return the modified Editor
*/
public Preference.Editor putByte(String key, byte val){
mTempByteValues.put(key, val);
return this;
}
/**
* This method puts a given short value with given key to the Editor
* @param key the key to assign the value to
* @param val the value to be assigned to the key
* @return the modified Editor
*/
public Preference.Editor putShort(String key, short val){
mTempShortValues.put(key, val);
return this;
}
/**
* This method puts a given int value with given key to the Editor
* @param key the key to assign the value to
* @param val the value to be assigned to the key
* @return the modified Editor
*/
public Preference.Editor putInt(String key, int val){
mTempIntValues.put(key, val);
return this;
}
/**
* This method puts a given long value with given key to the Editor
* @param key the key to assign the value to
* @param val the value to be assigned to the key
* @return the modified Editor
*/
public Preference.Editor putLong(String key, long val){
mTempLongValues.put(key, val);
return this;
}
/**
* This method puts a given float value with given key to the Editor
* @param key the key to assign the value to
* @param val the value to be assigned to the key
* @return the modified Editor
*/
public Preference.Editor putFloat(String key, float val){
mTempFloatValues.put(key, val);
return this;
}
/**
* This method puts a given double value with given key to the Editor
* @param key the key to assign the value to
* @param val the value to be assigned to the key
* @return the modified Editor
*/
public Preference.Editor putDouble(String key, double val){
mTempDoubleValues.put(key, val);
return this;
}
/**
* This method starts a thread to apply the changes you did to this Editor
* to the XML-file and the Preference itself.<br>
* You should definitely consider using this method in case you have many values to save
* and don't need to access them immediately after, since it will relieve
* the strain on your current thread.
*/
public void apply(){
new Thread(() -> {
try{
writeToFile();
initializeValues(mName);
}catch(Exception exc){}
}).start();
}
/**
* This method immediately commits the changes you did to this Editor to the XML-file
* and the Preference itself.<br>
* In the end it shows you whether the values were successfully saved.<br>
* You should only use this method if you either need to access the changed values immediately
* after being changed or if you need feedback whether the values really were
* successfully saved.
* @return whether the values were successfully saved
*/
public boolean commit(){
try{
writeToFile();
initializeValues(mName);
}catch(Exception exc){
return false;
}
return true;
}
/**
* This method saves the changed values to the XML-file in following form
* (the order of datatypes is of importance; the order of the values is not):
* <pre><code>
* <preference name="preferenceName">
* <boolean_vals>
* <-- template for every value -->
* <value key="key">value</value>
* </boolean_vals>
*
* <char_vals>
* </char_vals>
*
* <string_vals>
* </string_vals>
*
* <byte_vals>
* </byte_vals>
*
* <short_vals>
* </short_vals>
*
* <int_vals>
* </int_vals>
*
* <long_vals>
* </long_vals>
*
* <float_vals>
* </float_vals>
*
* <double_vals>
* </double_vals>
* </preference>
* </code></pre>
* @throws IOException see {@link FileWriter#FileWriter(File)}, {@link BufferedWriter#write(String)}, {@link BufferedWriter#close()}
* for possible reasons for this Exception being thrown
*/
private void writeToFile() throws IOException{
String content = HEADER + "<preference name=\"" + mName + "\">\n\n";
content += "\t<boolean_vals>\n\n";
for(String key : mTempBooleanValues.keySet()){
content += "\t\t<value key=\"" + key + "\">" + mTempBooleanValues.get(key) + "</value>\n";
}
content += "\n\t</boolean_vals>\n\n";
content += "\t<char_vals>\n\n";
for(String key : mTempCharValues.keySet()){
content += "\t\t<value key=\"" + key + "\">" + mTempCharValues.get(key) + "</value>\n";
}
content += "\n\t</char_vals>\n\n";
content += "\t<string_vals>\n\n";
for(String key : mTempStringValues.keySet()){
content += "\t\t<value key=\"" + key + "\">" + mTempStringValues.get(key) + "</value>\n";
}
content += "\n\t</string_vals>\n\n";
content += "\t<byte_vals>\n\n";
for(String key : mTempByteValues.keySet()){
content += "\t\t<value key=\"" + key + "\">" + mTempByteValues.get(key) + "</value>\n";
}
content += "\n\t</byte_vals>\n\n";
content += "\t<short_vals>\n\n";
for(String key : mTempShortValues.keySet()){
content += "\t\t<value key=\"" + key + "\">" + mTempShortValues.get(key) + "</value>\n";
}
content += "\n\t</short_vals>\n\n";
content += "\t<int_vals>\n\n";
for(String key : mTempIntValues.keySet()){
content += "\t\t<value key=\"" + key + "\">" + mTempIntValues.get(key) + "</value>\n";
}
content += "\n\t</int_vals>\n\n";
content += "\t<long_vals>\n\n";
for(String key : mTempLongValues.keySet()){
content += "\t\t<value key=\"" + key + "\">" + mTempLongValues.get(key) + "</value>\n";
}
content += "\n\t</long_vals>\n\n";
content += "\t<float_vals>\n\n";
for(String key : mTempFloatValues.keySet()){
content += "\t\t<value key=\"" + key + "\">" + mTempFloatValues.get(key) + "</value>\n";
}
content += "\n\t</float_vals>\n\n";
content += "\t<double_vals>\n\n";
for(String key : mTempDoubleValues.keySet()){
content += "\t\t<value key=\"" + key + "\">" + mTempDoubleValues.get(key) + "</value>\n";
}
content += "\n\t</double_vals>\n\n";
content += "</preference>";
try (BufferedWriter writer = new BufferedWriter(new FileWriter(mXMLFile))) {
writer.write(content);
writer.close();
}
}
}
/**
* This class represents a xml-token with attributes and something written between
* the opening- and closing-tags.
*/
private static class Token {
private final HashMap<String, String> mAttributes;
private final String mInterior;
/**
* This constructor constructs a Token with given attributes and the given text
* inbetween its tags
* @param attr a HashMap containing the attributes
* @param inter the text between the tags
*/
private Token(HashMap<String, String> attr, String inter){
mAttributes = new HashMap<>(attr);
mInterior = inter;
}
/**
* This method gives you the tokens inside the token you called this method on
* @return the tokens inside the token you called this method on
*/
public Token[] getTokensInside(){
return Token.getTokensInside(mInterior);
}
/**
* This method gives you only (but all) tokens on top-level of the given text.<br>
* Though you must assert that there is no token with the same name inside of a token.
* @param tokenize the text you want the tokens from
* @return all tokens on the top-level of the given text
*/
public static Token[] getTokensInside(String tokenize){
// remove all the comments inside the xml-files
int com_beg;
int com_end;
for(int i = 0; i < tokenize.length(); i++) {
if(tokenize.substring(i).startsWith("<!--")) {
com_beg = i;
com_end = i;
while(!tokenize.substring(0, com_end).endsWith("-->")) com_end++;
tokenize = tokenize.substring(0, com_beg) + tokenize.substring(com_end);
}
}
ArrayList<Token> tokens = new ArrayList<>();
int beg = 0;
int name_end;
int end = 0;
while(beg < tokenize.length()){// we repeat until the beginning is at the end of the text
while(beg < tokenize.length() && tokenize.charAt(beg) != '<'){// we look for the beginning-tag
beg++;
}
name_end = beg;
while(name_end < tokenize.length() && tokenize.charAt(name_end) != ' ' && tokenize.charAt(name_end) != '>'){
name_end++;
}
// if there is no more token before the end of the text we end the searching
if(beg >= tokenize.length() || name_end >= tokenize.length())
break;
// we obtain the name of the beginning-tag
String name = tokenize.substring(beg + 1, name_end);
//with the name of the beginning-tag we search for the end-tag
while(end < tokenize.length() && !tokenize.substring(end).startsWith("</" + name)){
end++;
}
while(end < tokenize.length() && tokenize.charAt(end) != '>'){
end++;
}
// if we found the end of the token before the end of the text
if(end > beg && end < tokenize.length()){
// we try to obtain the actual token in this text section
Token tempToken = getTokenIn(tokenize.substring(beg, end + 1));
// if it was successfully obtained we add it to the list
if(tempToken != null)
tokens.add(tempToken);
}
beg = end;
}
// then we convert the ArrayList into an Array
Token[] toRet = new Token[tokens.size()];
for(int i = 0; i < toRet.length; i++){
toRet[i] = tokens.get(i);
}
return toRet;
}
/**
* This method gives you the top-level token in given string assuming that there only is one.
* @param tokenize the string with the token
* @return the token in the string
*/
private static Token getTokenIn(String tokenize){
int beg_beg = 0;
int name_end = 0;
int beg_end = 0;
int end_beg = tokenize.length() - 1;
int end_end = tokenize.length() - 1;
// we search for the beginning-tag
while (tokenize.charAt(beg_beg) != '<' && beg_beg < tokenize.length()) {
beg_beg++;
}
while (tokenize.charAt(beg_end) != '>' && beg_end < tokenize.length()) {
beg_end++;
}
// we search for the end-tag
while (tokenize.charAt(end_beg) != '<' && end_beg >= 0) {
end_beg--;
}
while (tokenize.charAt(end_end) != '>' && end_end >= 0) {
end_end--;
}
// and we search for the name of the beginning-tag
while(tokenize.charAt(name_end) != ' ' && tokenize.charAt(name_end) != '>' && name_end < tokenize.length()){
name_end++;
}
// we obtain the names of the tags
String beginningTag = tokenize.substring(beg_beg + 1, name_end);
String endingTag = tokenize.substring(end_beg + 2, end_end);
HashMap<String, String> attributes = new HashMap<>();
if (name_end != beg_end) {// if there are attributes
String attributeString = tokenize.substring(name_end + 1, beg_end);
// TODO: Change since there might be a space in the key :/
String[] attr = splitAttributes(attributeString);
// attributeString.split(" ");// we separate all the attributes
for (int i = 0; i < attr.length; i++) {
attr[i] = attr[i].trim();
int key_beg = 0;
int key_end = 0;
int val_beg = 0;
int val_end = attr[i].length() - 1;
// search for the key
while (attr[i].charAt(key_end) != '=' && key_end < attr[i].length()) {
key_end++;
}
// search for its value
while (attr[i].charAt(val_beg) != '\"' && val_beg < attr[i].length()) {
val_beg++;
}
while (attr[i].charAt(val_end) != '\"' && val_end >= 0) {
val_end--;
}
// obtain key and value
String key = attr[i].substring(key_beg, key_end);
String val = attr[i].substring(val_beg + 1, val_end);
// and add those to the attributes
attributes.put(key, val);
}
}
// then we obtain the interior
String interior = tokenize.substring(beg_end + 1, end_beg);
// if there's something weird going on we throw a RuntimeException
if (tokenize.charAt(end_beg + 1) != '/' || !beginningTag.equals(endingTag)) {
throw new RuntimeException("The file which contains the data was illegally changed!");
}
// otherwise we return a token with the determined attributes and interior
return new Token(attributes, interior);
}
private static String[] splitAttributes(String attrString) {
ArrayList<String> attrs = new ArrayList<>();
while(attrString.length() > 0){
int i = 0;
int ctr = 0;
boolean end = false;
while(i < attrString.length() && !end){
i++;
if(attrString.charAt(i) == '\"'){
end = ctr > 0;
ctr++;
if(end){
i++;
}
}
}
String attr = attrString.substring(0, i);
attrString = attrString.substring(i);
if(!attr.trim().equals("")){
attrs.add(attr);
}
}
String[] attrsToReturn = new String[attrs.size()];
for(int i = 0; i < attrsToReturn.length; i++) {
attrsToReturn[i] = attrs.get(i);
}
return attrsToReturn;
}
/**
* This method gives you the attribute of the token with given key
* @param key the key whose value you want
* @return the attribute with given key
*/
public String getAttribute(String key) {
return mAttributes.get(key);
}
/**
* This method gives you what was written inside the tag
* @return the interior
*/
public String getInterior(){
return mInterior;
}
}
}
| |
/*
* Copyright (C) 2016 Jared Rummler <jared.rummler@gmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.jaredrummler.fontreader.complexscripts.util;
import com.jaredrummler.fontreader.util.CharUtilities;
import java.util.Arrays;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.Map;
import java.util.Set;
/**
* <p>Script related utilities.</p>
*
* <p>This work was originally authored by Glenn Adams (gadams@apache.org).</p>
*/
public final class CharScript {
// CSOFF: LineLength
//
// The following script codes are based on ISO 15924. Codes less than 1000 are
// official assignments from 15924; those equal to or greater than 1000 are FOP
// implementation specific.
//
/** hebrew script constant */
public static final int SCRIPT_HEBREW = 125; // 'hebr'
/** mongolian script constant */
public static final int SCRIPT_MONGOLIAN = 145; // 'mong'
/** arabic script constant */
public static final int SCRIPT_ARABIC = 160; // 'arab'
/** greek script constant */
public static final int SCRIPT_GREEK = 200; // 'grek'
/** latin script constant */
public static final int SCRIPT_LATIN = 215; // 'latn'
/** cyrillic script constant */
public static final int SCRIPT_CYRILLIC = 220; // 'cyrl'
/** georgian script constant */
public static final int SCRIPT_GEORGIAN = 240; // 'geor'
/** bopomofo script constant */
public static final int SCRIPT_BOPOMOFO = 285; // 'bopo'
/** hangul script constant */
public static final int SCRIPT_HANGUL = 286; // 'hang'
/** gurmukhi script constant */
public static final int SCRIPT_GURMUKHI = 310; // 'guru'
/** gurmukhi 2 script constant */
public static final int SCRIPT_GURMUKHI_2 = 1310;
// 'gur2' -- MSFT (pseudo) script tag for variant shaping semantics
/** devanagari script constant */
public static final int SCRIPT_DEVANAGARI = 315; // 'deva'
/** devanagari 2 script constant */
public static final int SCRIPT_DEVANAGARI_2 = 1315;
// 'dev2' -- MSFT (pseudo) script tag for variant shaping semantics
/** gujarati script constant */
public static final int SCRIPT_GUJARATI = 320; // 'gujr'
/** gujarati 2 script constant */
public static final int SCRIPT_GUJARATI_2 = 1320;
// 'gjr2' -- MSFT (pseudo) script tag for variant shaping semantics
/** bengali script constant */
public static final int SCRIPT_BENGALI = 326; // 'beng'
/** bengali 2 script constant */
public static final int SCRIPT_BENGALI_2 = 1326;
// 'bng2' -- MSFT (pseudo) script tag for variant shaping semantics
/** oriya script constant */
public static final int SCRIPT_ORIYA = 327; // 'orya'
/** oriya 2 script constant */
public static final int SCRIPT_ORIYA_2 = 1327;
// 'ory2' -- MSFT (pseudo) script tag for variant shaping semantics
/** tibetan script constant */
public static final int SCRIPT_TIBETAN = 330; // 'tibt'
/** telugu script constant */
public static final int SCRIPT_TELUGU = 340; // 'telu'
/** telugu 2 script constant */
public static final int SCRIPT_TELUGU_2 = 1340;
// 'tel2' -- MSFT (pseudo) script tag for variant shaping semantics
/** kannada script constant */
public static final int SCRIPT_KANNADA = 345; // 'knda'
/** kannada 2 script constant */
public static final int SCRIPT_KANNADA_2 = 1345;
// 'knd2' -- MSFT (pseudo) script tag for variant shaping semantics
/** tamil script constant */
public static final int SCRIPT_TAMIL = 346; // 'taml'
/** tamil 2 script constant */
public static final int SCRIPT_TAMIL_2 = 1346;
// 'tml2' -- MSFT (pseudo) script tag for variant shaping semantics
/** malayalam script constant */
public static final int SCRIPT_MALAYALAM = 347; // 'mlym'
/** malayalam 2 script constant */
public static final int SCRIPT_MALAYALAM_2 = 1347;
// 'mlm2' -- MSFT (pseudo) script tag for variant shaping semantics
/** sinhalese script constant */
public static final int SCRIPT_SINHALESE = 348; // 'sinh'
/** burmese script constant */
public static final int SCRIPT_BURMESE = 350; // 'mymr'
/** thai script constant */
public static final int SCRIPT_THAI = 352; // 'thai'
/** khmer script constant */
public static final int SCRIPT_KHMER = 355; // 'khmr'
/** lao script constant */
public static final int SCRIPT_LAO = 356; // 'laoo'
/** hiragana script constant */
public static final int SCRIPT_HIRAGANA = 410; // 'hira'
/** ethiopic script constant */
public static final int SCRIPT_ETHIOPIC = 430; // 'ethi'
/** han script constant */
public static final int SCRIPT_HAN = 500; // 'hani'
/** katakana script constant */
public static final int SCRIPT_KATAKANA = 410; // 'kana'
/** math script constant */
public static final int SCRIPT_MATH = 995; // 'zmth'
/** symbol script constant */
public static final int SCRIPT_SYMBOL = 996; // 'zsym'
/** undetermined script constant */
public static final int SCRIPT_UNDETERMINED = 998; // 'zyyy'
/** uncoded script constant */
public static final int SCRIPT_UNCODED = 999; // 'zzzz'
/**
* A static (class) parameter indicating whether V2 indic shaping
* rules apply or not, with default being <code>true</code>.
*/
private static final boolean USE_V2_INDIC = true;
private CharScript() {
}
/**
* Determine if character c is punctuation.
*
* @param c
* a character represented as a unicode scalar value
* @return true if character is punctuation
*/
public static boolean isPunctuation(int c) {
if ((c >= 0x0021) && (c <= 0x002F)) { // basic latin punctuation
return true;
} else if ((c >= 0x003A) && (c <= 0x0040)) { // basic latin punctuation
return true;
} else if ((c >= 0x005F) && (c <= 0x0060)) { // basic latin punctuation
return true;
} else if ((c >= 0x007E) && (c <= 0x007E)) { // basic latin punctuation
return true;
} else if ((c >= 0x00A1) && (c <= 0x00BF)) { // latin supplement punctuation
return true;
} else if ((c >= 0x00D7) && (c <= 0x00D7)) { // latin supplement punctuation
return true;
} else if ((c >= 0x00F7) && (c <= 0x00F7)) { // latin supplement punctuation
return true;
} else // general punctuation
// [TBD] - not complete
return (c >= 0x2000) && (c <= 0x206F);
}
/**
* Determine if character c is a digit.
*
* @param c
* a character represented as a unicode scalar value
* @return true if character is a digit
*/
public static boolean isDigit(int c) {
// basic latin digits
// [TBD] - not complete
return (c >= 0x0030) && (c <= 0x0039);
}
/**
* Determine if character c belong to the hebrew script.
*
* @param c
* a character represented as a unicode scalar value
* @return true if character belongs to hebrew script
*/
public static boolean isHebrew(int c) {
if ((c >= 0x0590) && (c <= 0x05FF)) { // hebrew block
return true;
} else // hebrew presentation forms block
return (c >= 0xFB00) && (c <= 0xFB4F);
}
/**
* Determine if character c belong to the mongolian script.
*
* @param c
* a character represented as a unicode scalar value
* @return true if character belongs to mongolian script
*/
public static boolean isMongolian(int c) {
// mongolian block
return (c >= 0x1800) && (c <= 0x18AF);
}
/**
* Determine if character c belong to the arabic script.
*
* @param c
* a character represented as a unicode scalar value
* @return true if character belongs to arabic script
*/
public static boolean isArabic(int c) {
if ((c >= 0x0600) && (c <= 0x06FF)) { // arabic block
return true;
} else if ((c >= 0x0750) && (c <= 0x077F)) { // arabic supplement block
return true;
} else if ((c >= 0xFB50) && (c <= 0xFDFF)) { // arabic presentation forms a block
return true;
} else // arabic presentation forms b block
return (c >= 0xFE70) && (c <= 0xFEFF);
}
/**
* Determine if character c belong to the greek script.
*
* @param c
* a character represented as a unicode scalar value
* @return true if character belongs to greek script
*/
public static boolean isGreek(int c) {
if ((c >= 0x0370) && (c <= 0x03FF)) { // greek (and coptic) block
return true;
} else // greek extended block
return (c >= 0x1F00) && (c <= 0x1FFF);
}
/**
* Determine if character c belong to the latin script.
*
* @param c
* a character represented as a unicode scalar value
* @return true if character belongs to latin script
*/
public static boolean isLatin(int c) {
if ((c >= 0x0041) && (c <= 0x005A)) { // basic latin upper case
return true;
} else if ((c >= 0x0061) && (c <= 0x007A)) { // basic latin lower case
return true;
} else if ((c >= 0x00C0) && (c <= 0x00D6)) { // latin supplement upper case
return true;
} else if ((c >= 0x00D8) && (c <= 0x00DF)) { // latin supplement upper case
return true;
} else if ((c >= 0x00E0) && (c <= 0x00F6)) { // latin supplement lower case
return true;
} else if ((c >= 0x00F8) && (c <= 0x00FF)) { // latin supplement lower case
return true;
} else if ((c >= 0x0100) && (c <= 0x017F)) { // latin extended a
return true;
} else if ((c >= 0x0180) && (c <= 0x024F)) { // latin extended b
return true;
} else if ((c >= 0x1E00) && (c <= 0x1EFF)) { // latin extended additional
return true;
} else if ((c >= 0x2C60) && (c <= 0x2C7F)) { // latin extended c
return true;
} else if ((c >= 0xA720) && (c <= 0xA7FF)) { // latin extended d
return true;
} else // latin ligatures
return (c >= 0xFB00) && (c <= 0xFB0F);
}
/**
* Determine if character c belong to the cyrillic script.
*
* @param c
* a character represented as a unicode scalar value
* @return true if character belongs to cyrillic script
*/
public static boolean isCyrillic(int c) {
if ((c >= 0x0400) && (c <= 0x04FF)) { // cyrillic block
return true;
} else if ((c >= 0x0500) && (c <= 0x052F)) { // cyrillic supplement block
return true;
} else if ((c >= 0x2DE0) && (c <= 0x2DFF)) { // cyrillic extended-a block
return true;
} else // cyrillic extended-b block
return (c >= 0xA640) && (c <= 0xA69F);
}
/**
* Determine if character c belong to the georgian script.
*
* @param c
* a character represented as a unicode scalar value
* @return true if character belongs to georgian script
*/
public static boolean isGeorgian(int c) {
if ((c >= 0x10A0) && (c <= 0x10FF)) { // georgian block
return true;
} else // georgian supplement block
return (c >= 0x2D00) && (c <= 0x2D2F);
}
/**
* Determine if character c belong to the hangul script.
*
* @param c
* a character represented as a unicode scalar value
* @return true if character belongs to hangul script
*/
public static boolean isHangul(int c) {
if ((c >= 0x1100) && (c <= 0x11FF)) { // hangul jamo
return true;
} else if ((c >= 0x3130) && (c <= 0x318F)) { // hangul compatibility jamo
return true;
} else if ((c >= 0xA960) && (c <= 0xA97F)) { // hangul jamo extended a
return true;
} else if ((c >= 0xAC00) && (c <= 0xD7A3)) { // hangul syllables
return true;
} else // hangul jamo extended a
return (c >= 0xD7B0) && (c <= 0xD7FF);
}
/**
* Determine if character c belong to the gurmukhi script.
*
* @param c
* a character represented as a unicode scalar value
* @return true if character belongs to gurmukhi script
*/
public static boolean isGurmukhi(int c) {
// gurmukhi block
return (c >= 0x0A00) && (c <= 0x0A7F);
}
/**
* Determine if character c belong to the devanagari script.
*
* @param c
* a character represented as a unicode scalar value
* @return true if character belongs to devanagari script
*/
public static boolean isDevanagari(int c) {
if ((c >= 0x0900) && (c <= 0x097F)) { // devangari block
return true;
} else // devangari extended block
return (c >= 0xA8E0) && (c <= 0xA8FF);
}
/**
* Determine if character c belong to the gujarati script.
*
* @param c
* a character represented as a unicode scalar value
* @return true if character belongs to gujarati script
*/
public static boolean isGujarati(int c) {
// gujarati block
return (c >= 0x0A80) && (c <= 0x0AFF);
}
/**
* Determine if character c belong to the bengali script.
*
* @param c
* a character represented as a unicode scalar value
* @return true if character belongs to bengali script
*/
public static boolean isBengali(int c) {
// bengali block
return (c >= 0x0980) && (c <= 0x09FF);
}
/**
* Determine if character c belong to the oriya script.
*
* @param c
* a character represented as a unicode scalar value
* @return true if character belongs to oriya script
*/
public static boolean isOriya(int c) {
// oriya block
return (c >= 0x0B00) && (c <= 0x0B7F);
}
/**
* Determine if character c belong to the tibetan script.
*
* @param c
* a character represented as a unicode scalar value
* @return true if character belongs to tibetan script
*/
public static boolean isTibetan(int c) {
// tibetan block
return (c >= 0x0F00) && (c <= 0x0FFF);
}
/**
* Determine if character c belong to the telugu script.
*
* @param c
* a character represented as a unicode scalar value
* @return true if character belongs to telugu script
*/
public static boolean isTelugu(int c) {
// telugu block
return (c >= 0x0C00) && (c <= 0x0C7F);
}
/**
* Determine if character c belong to the kannada script.
*
* @param c
* a character represented as a unicode scalar value
* @return true if character belongs to kannada script
*/
public static boolean isKannada(int c) {
// kannada block
return (c >= 0x0C00) && (c <= 0x0C7F);
}
/**
* Determine if character c belong to the tamil script.
*
* @param c
* a character represented as a unicode scalar value
* @return true if character belongs to tamil script
*/
public static boolean isTamil(int c) {
// tamil block
return (c >= 0x0B80) && (c <= 0x0BFF);
}
/**
* Determine if character c belong to the malayalam script.
*
* @param c
* a character represented as a unicode scalar value
* @return true if character belongs to malayalam script
*/
public static boolean isMalayalam(int c) {
// malayalam block
return (c >= 0x0D00) && (c <= 0x0D7F);
}
/**
* Determine if character c belong to the sinhalese script.
*
* @param c
* a character represented as a unicode scalar value
* @return true if character belongs to sinhalese script
*/
public static boolean isSinhalese(int c) {
// sinhala block
return (c >= 0x0D80) && (c <= 0x0DFF);
}
/**
* Determine if character c belong to the burmese script.
*
* @param c
* a character represented as a unicode scalar value
* @return true if character belongs to burmese script
*/
public static boolean isBurmese(int c) {
if ((c >= 0x1000) && (c <= 0x109F)) { // burmese (myanmar) block
return true;
} else // burmese (myanmar) extended block
return (c >= 0xAA60) && (c <= 0xAA7F);
}
/**
* Determine if character c belong to the thai script.
*
* @param c
* a character represented as a unicode scalar value
* @return true if character belongs to thai script
*/
public static boolean isThai(int c) {
// thai block
return (c >= 0x0E00) && (c <= 0x0E7F);
}
/**
* Determine if character c belong to the khmer script.
*
* @param c
* a character represented as a unicode scalar value
* @return true if character belongs to khmer script
*/
public static boolean isKhmer(int c) {
if ((c >= 0x1780) && (c <= 0x17FF)) { // khmer block
return true;
} else // khmer symbols block
return (c >= 0x19E0) && (c <= 0x19FF);
}
/**
* Determine if character c belong to the lao script.
*
* @param c
* a character represented as a unicode scalar value
* @return true if character belongs to lao script
*/
public static boolean isLao(int c) {
// lao block
return (c >= 0x0E80) && (c <= 0x0EFF);
}
/**
* Determine if character c belong to the ethiopic (amharic) script.
*
* @param c
* a character represented as a unicode scalar value
* @return true if character belongs to ethiopic (amharic) script
*/
public static boolean isEthiopic(int c) {
if ((c >= 0x1200) && (c <= 0x137F)) { // ethiopic block
return true;
} else if ((c >= 0x1380) && (c <= 0x139F)) { // ethoipic supplement block
return true;
} else if ((c >= 0x2D80) && (c <= 0x2DDF)) { // ethoipic extended block
return true;
} else // ethoipic extended-a block
return (c >= 0xAB00) && (c <= 0xAB2F);
}
/**
* Determine if character c belong to the han (unified cjk) script.
*
* @param c
* a character represented as a unicode scalar value
* @return true if character belongs to han (unified cjk) script
*/
public static boolean isHan(int c) {
if ((c >= 0x3400) && (c <= 0x4DBF)) {
return true; // cjk unified ideographs extension a
} else if ((c >= 0x4E00) && (c <= 0x9FFF)) {
return true; // cjk unified ideographs
} else if ((c >= 0xF900) && (c <= 0xFAFF)) {
return true; // cjk compatibility ideographs
} else if ((c >= 0x20000) && (c <= 0x2A6DF)) {
return true; // cjk unified ideographs extension b
} else if ((c >= 0x2A700) && (c <= 0x2B73F)) {
return true; // cjk unified ideographs extension c
} else // cjk compatibility ideographs supplement
return (c >= 0x2F800) && (c <= 0x2FA1F);
}
/**
* Determine if character c belong to the bopomofo script.
*
* @param c
* a character represented as a unicode scalar value
* @return true if character belongs to bopomofo script
*/
public static boolean isBopomofo(int c) {
return (c >= 0x3100) && (c <= 0x312F);
}
/**
* Determine if character c belong to the hiragana script.
*
* @param c
* a character represented as a unicode scalar value
* @return true if character belongs to hiragana script
*/
public static boolean isHiragana(int c) {
return (c >= 0x3040) && (c <= 0x309F);
}
/**
* Determine if character c belong to the katakana script.
*
* @param c
* a character represented as a unicode scalar value
* @return true if character belongs to katakana script
*/
public static boolean isKatakana(int c) {
if ((c >= 0x30A0) && (c <= 0x30FF)) {
return true;
} else return (c >= 0x31F0) && (c <= 0x31FF);
}
/**
* Obtain ISO15924 numeric script code of character. If script is not or cannot be determined,
* then the script code 998 ('zyyy') is returned.
*
* @param c
* the character to obtain script
* @return an ISO15924 script code
*/
public static int scriptOf(int c) { // [TBD] - needs optimization!!!
if (CharUtilities.isAnySpace(c)) {
return SCRIPT_UNDETERMINED;
} else if (isPunctuation(c)) {
return SCRIPT_UNDETERMINED;
} else if (isDigit(c)) {
return SCRIPT_UNDETERMINED;
} else if (isLatin(c)) {
return SCRIPT_LATIN;
} else if (isCyrillic(c)) {
return SCRIPT_CYRILLIC;
} else if (isGreek(c)) {
return SCRIPT_GREEK;
} else if (isHan(c)) {
return SCRIPT_HAN;
} else if (isBopomofo(c)) {
return SCRIPT_BOPOMOFO;
} else if (isKatakana(c)) {
return SCRIPT_KATAKANA;
} else if (isHiragana(c)) {
return SCRIPT_HIRAGANA;
} else if (isHangul(c)) {
return SCRIPT_HANGUL;
} else if (isArabic(c)) {
return SCRIPT_ARABIC;
} else if (isHebrew(c)) {
return SCRIPT_HEBREW;
} else if (isMongolian(c)) {
return SCRIPT_MONGOLIAN;
} else if (isGeorgian(c)) {
return SCRIPT_GEORGIAN;
} else if (isGurmukhi(c)) {
return useV2IndicRules(SCRIPT_GURMUKHI);
} else if (isDevanagari(c)) {
return useV2IndicRules(SCRIPT_DEVANAGARI);
} else if (isGujarati(c)) {
return useV2IndicRules(SCRIPT_GUJARATI);
} else if (isBengali(c)) {
return useV2IndicRules(SCRIPT_BENGALI);
} else if (isOriya(c)) {
return useV2IndicRules(SCRIPT_ORIYA);
} else if (isTibetan(c)) {
return SCRIPT_TIBETAN;
} else if (isTelugu(c)) {
return useV2IndicRules(SCRIPT_TELUGU);
} else if (isKannada(c)) {
return useV2IndicRules(SCRIPT_KANNADA);
} else if (isTamil(c)) {
return useV2IndicRules(SCRIPT_TAMIL);
} else if (isMalayalam(c)) {
return useV2IndicRules(SCRIPT_MALAYALAM);
} else if (isSinhalese(c)) {
return SCRIPT_SINHALESE;
} else if (isBurmese(c)) {
return SCRIPT_BURMESE;
} else if (isThai(c)) {
return SCRIPT_THAI;
} else if (isKhmer(c)) {
return SCRIPT_KHMER;
} else if (isLao(c)) {
return SCRIPT_LAO;
} else if (isEthiopic(c)) {
return SCRIPT_ETHIOPIC;
} else {
return SCRIPT_UNDETERMINED;
}
}
/**
* Obtain the V2 indic script code corresponding to V1 indic script code SC if
* and only iff V2 indic rules apply; otherwise return SC.
*
* @param sc
* a V1 indic script code
* @return either SC or the V2 flavor of SC if V2 indic rules apply
*/
public static int useV2IndicRules(int sc) {
if (USE_V2_INDIC) {
return (sc < 1000) ? (sc + 1000) : sc;
} else {
return sc;
}
}
/**
* Obtain the script codes of each character in a character sequence. If script
* is not or cannot be determined for some character, then the script code 998
* ('zyyy') is returned.
*
* @param cs
* the character sequence
* @return a (possibly empty) array of script codes
*/
public static int[] scriptsOf(CharSequence cs) {
Set s = new HashSet();
for (int i = 0, n = cs.length(); i < n; i++) {
s.add(Integer.valueOf(scriptOf(cs.charAt(i))));
}
int[] sa = new int[s.size()];
int ns = 0;
for (Iterator it = s.iterator(); it.hasNext(); ) {
sa[ns++] = ((Integer) it.next()).intValue();
}
Arrays.sort(sa);
return sa;
}
/**
* Determine the dominant script of a character sequence.
*
* @param cs
* the character sequence
* @return the dominant script or SCRIPT_UNDETERMINED
*/
public static int dominantScript(CharSequence cs) {
Map m = new HashMap();
for (int i = 0, n = cs.length(); i < n; i++) {
int c = cs.charAt(i);
int s = scriptOf(c);
Integer k = Integer.valueOf(s);
Integer v = (Integer) m.get(k);
if (v != null) {
m.put(k, Integer.valueOf(v.intValue() + 1));
} else {
m.put(k, Integer.valueOf(0));
}
}
int sMax = -1;
int cMax = -1;
for (Iterator it = m.entrySet().iterator(); it.hasNext(); ) {
Map.Entry e = (Map.Entry) it.next();
Integer k = (Integer) e.getKey();
int s = k.intValue();
switch (s) {
case SCRIPT_UNDETERMINED:
case SCRIPT_UNCODED:
break;
default:
Integer v = (Integer) e.getValue();
assert v != null;
int c = v.intValue();
if (c > cMax) {
cMax = c;
sMax = s;
}
break;
}
}
if (sMax < 0) {
sMax = SCRIPT_UNDETERMINED;
}
return sMax;
}
/**
* Determine if script tag denotes an 'Indic' script, where a
* script is an 'Indic' script if it is intended to be processed by
* the generic 'Indic' Script Processor.
*
* @param script
* a script tag
* @return true if script tag is a designated 'Indic' script
*/
public static boolean isIndicScript(String script) {
return isIndicScript(scriptCodeFromTag(script));
}
/**
* Determine if script tag denotes an 'Indic' script, where a
* script is an 'Indic' script if it is intended to be processed by
* the generic 'Indic' Script Processor.
*
* @param script
* a script code
* @return true if script code is a designated 'Indic' script
*/
public static boolean isIndicScript(int script) {
switch (script) {
case SCRIPT_BENGALI:
case SCRIPT_BENGALI_2:
case SCRIPT_BURMESE:
case SCRIPT_DEVANAGARI:
case SCRIPT_DEVANAGARI_2:
case SCRIPT_GUJARATI:
case SCRIPT_GUJARATI_2:
case SCRIPT_GURMUKHI:
case SCRIPT_GURMUKHI_2:
case SCRIPT_KANNADA:
case SCRIPT_KANNADA_2:
case SCRIPT_MALAYALAM:
case SCRIPT_MALAYALAM_2:
case SCRIPT_ORIYA:
case SCRIPT_ORIYA_2:
case SCRIPT_TAMIL:
case SCRIPT_TAMIL_2:
case SCRIPT_TELUGU:
case SCRIPT_TELUGU_2:
return true;
default:
return false;
}
}
/**
* Determine the script tag associated with an internal script code.
*
* @param code
* the script code
* @return a script tag
*/
public static String scriptTagFromCode(int code) {
Map<Integer, String> m = getScriptTagsMap();
if (m != null) {
String tag;
if ((tag = m.get(Integer.valueOf(code))) != null) {
return tag;
} else {
return "";
}
} else {
return "";
}
}
/**
* Determine the internal script code associated with a script tag.
*
* @param tag
* the script tag
* @return a script code
*/
public static int scriptCodeFromTag(String tag) {
Map<String, Integer> m = getScriptCodeMap();
if (m != null) {
Integer c;
if ((c = m.get(tag)) != null) {
return c;
} else {
return SCRIPT_UNDETERMINED;
}
} else {
return SCRIPT_UNDETERMINED;
}
}
private static Map<Integer, String> scriptTagsMap;
private static Map<String, Integer> scriptCodeMap;
private static void putScriptTag(Map tm, Map cm, int code, String tag) {
assert tag != null;
assert tag.length() != 0;
assert code >= 0;
assert code < 2000;
tm.put(Integer.valueOf(code), tag);
cm.put(tag, Integer.valueOf(code));
}
private static void makeScriptMaps() {
HashMap<Integer, String> tm = new HashMap<Integer, String>();
HashMap<String, Integer> cm = new HashMap<String, Integer>();
putScriptTag(tm, cm, SCRIPT_HEBREW, "hebr");
putScriptTag(tm, cm, SCRIPT_MONGOLIAN, "mong");
putScriptTag(tm, cm, SCRIPT_ARABIC, "arab");
putScriptTag(tm, cm, SCRIPT_GREEK, "grek");
putScriptTag(tm, cm, SCRIPT_LATIN, "latn");
putScriptTag(tm, cm, SCRIPT_CYRILLIC, "cyrl");
putScriptTag(tm, cm, SCRIPT_GEORGIAN, "geor");
putScriptTag(tm, cm, SCRIPT_BOPOMOFO, "bopo");
putScriptTag(tm, cm, SCRIPT_HANGUL, "hang");
putScriptTag(tm, cm, SCRIPT_GURMUKHI, "guru");
putScriptTag(tm, cm, SCRIPT_GURMUKHI_2, "gur2");
putScriptTag(tm, cm, SCRIPT_DEVANAGARI, "deva");
putScriptTag(tm, cm, SCRIPT_DEVANAGARI_2, "dev2");
putScriptTag(tm, cm, SCRIPT_GUJARATI, "gujr");
putScriptTag(tm, cm, SCRIPT_GUJARATI_2, "gjr2");
putScriptTag(tm, cm, SCRIPT_BENGALI, "beng");
putScriptTag(tm, cm, SCRIPT_BENGALI_2, "bng2");
putScriptTag(tm, cm, SCRIPT_ORIYA, "orya");
putScriptTag(tm, cm, SCRIPT_ORIYA_2, "ory2");
putScriptTag(tm, cm, SCRIPT_TIBETAN, "tibt");
putScriptTag(tm, cm, SCRIPT_TELUGU, "telu");
putScriptTag(tm, cm, SCRIPT_TELUGU_2, "tel2");
putScriptTag(tm, cm, SCRIPT_KANNADA, "knda");
putScriptTag(tm, cm, SCRIPT_KANNADA_2, "knd2");
putScriptTag(tm, cm, SCRIPT_TAMIL, "taml");
putScriptTag(tm, cm, SCRIPT_TAMIL_2, "tml2");
putScriptTag(tm, cm, SCRIPT_MALAYALAM, "mlym");
putScriptTag(tm, cm, SCRIPT_MALAYALAM_2, "mlm2");
putScriptTag(tm, cm, SCRIPT_SINHALESE, "sinh");
putScriptTag(tm, cm, SCRIPT_BURMESE, "mymr");
putScriptTag(tm, cm, SCRIPT_THAI, "thai");
putScriptTag(tm, cm, SCRIPT_KHMER, "khmr");
putScriptTag(tm, cm, SCRIPT_LAO, "laoo");
putScriptTag(tm, cm, SCRIPT_HIRAGANA, "hira");
putScriptTag(tm, cm, SCRIPT_ETHIOPIC, "ethi");
putScriptTag(tm, cm, SCRIPT_HAN, "hani");
putScriptTag(tm, cm, SCRIPT_KATAKANA, "kana");
putScriptTag(tm, cm, SCRIPT_MATH, "zmth");
putScriptTag(tm, cm, SCRIPT_SYMBOL, "zsym");
putScriptTag(tm, cm, SCRIPT_UNDETERMINED, "zyyy");
putScriptTag(tm, cm, SCRIPT_UNCODED, "zzzz");
scriptTagsMap = tm;
scriptCodeMap = cm;
}
private static Map<Integer, String> getScriptTagsMap() {
if (scriptTagsMap == null) {
makeScriptMaps();
}
return scriptTagsMap;
}
private static Map<String, Integer> getScriptCodeMap() {
if (scriptCodeMap == null) {
makeScriptMaps();
}
return scriptCodeMap;
}
}
| |
package apoc.export.cypher.formatter;
import apoc.export.util.ExportConfig;
import apoc.export.util.ExportFormat;
import apoc.export.util.Reporter;
import apoc.util.Util;
import org.apache.commons.lang3.StringUtils;
import org.neo4j.graphdb.GraphDatabaseService;
import org.neo4j.graphdb.Label;
import org.neo4j.graphdb.Node;
import org.neo4j.graphdb.Relationship;
import org.neo4j.graphdb.RelationshipType;
import org.neo4j.graphdb.Transaction;
import org.neo4j.internal.helpers.collection.Iterables;
import java.io.PrintWriter;
import java.util.*;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.function.Function;
import java.util.stream.Collectors;
import java.util.stream.StreamSupport;
import static apoc.export.cypher.formatter.CypherFormatterUtils.Q_UNIQUE_ID_LABEL;
import static apoc.export.cypher.formatter.CypherFormatterUtils.UNIQUE_ID_PROP;
import static apoc.export.cypher.formatter.CypherFormatterUtils.quote;
/**
* @author AgileLARUS
*
* @since 16-06-2017
*/
abstract class AbstractCypherFormatter implements CypherFormatter {
private static final String STATEMENT_CONSTRAINTS = "CREATE CONSTRAINT%s ON (node:%s) ASSERT (%s) %s;";
private static final String STATEMENT_NODE_FULLTEXT_IDX = "CREATE FULLTEXT INDEX %s FOR (n:%s) ON EACH [%s];";
private static final String STATEMENT_REL_FULLTEXT_IDX = "CREATE FULLTEXT INDEX %s FOR ()-[rel:%s]-() ON EACH [%s];";
@Override
public String statementForCleanUp(int batchSize) {
return "MATCH (n:" + Q_UNIQUE_ID_LABEL + ") " +
" WITH n LIMIT " + batchSize +
" REMOVE n:" + Q_UNIQUE_ID_LABEL + " REMOVE n." + quote(UNIQUE_ID_PROP) + ";";
}
@Override
public String statementForNodeIndex(String label, Iterable<String> keys, boolean ifNotExists) {
return String.format("CREATE INDEX%s FOR (node:%s) ON (%s);",
getIfNotExists(ifNotExists),
Util.quote(label),
getPropertiesQuoted(keys, "node."));
}
@Override
public String statementForIndexRelationship(String type, Iterable<String> keys, boolean ifNotExists) {
return String.format("CREATE INDEX%s FOR ()-[rel:%s]-() ON (%s);",
getIfNotExists(ifNotExists),
Util.quote(type),
getPropertiesQuoted(keys, "rel."));
}
@Override
public String statementForNodeFullTextIndex(String name, Iterable<Label> labels, Iterable<String> keys) {
String label = StreamSupport.stream(labels.spliterator(), false)
.map(Label::name)
.map(Util::quote)
.collect(Collectors.joining("|"));
String key = getPropertiesQuoted(keys, "n.");
return String.format(STATEMENT_NODE_FULLTEXT_IDX, name, label, key);
}
@Override
public String statementForRelationshipFullTextIndex(String name, Iterable<RelationshipType> types, Iterable<String> keys) {
String type = StreamSupport.stream(types.spliterator(), false)
.map(RelationshipType::name)
.map(Util::quote)
.collect(Collectors.joining("|"));
String key = getPropertiesQuoted(keys, "rel.");
return String.format(STATEMENT_REL_FULLTEXT_IDX, name, type, key);
}
@Override
public String statementForConstraint(String label, Iterable<String> keys, boolean ifNotExists) {
String keysString = getPropertiesQuoted(keys, "node.");
return String.format(STATEMENT_CONSTRAINTS, getIfNotExists(ifNotExists), Util.quote(label), keysString, Iterables.count(keys) > 1 ? "IS NODE KEY" : "IS UNIQUE");
}
private String getIfNotExists(boolean ifNotExists) {
return ifNotExists ? " IF NOT EXISTS" : "";
}
private String getPropertiesQuoted(Iterable<String> keys, String prefix) {
String keysString = StreamSupport.stream(keys.spliterator(), false)
.map(key -> prefix + CypherFormatterUtils.quote(key))
.collect(Collectors.joining(", "));
return keysString;
}
protected String mergeStatementForNode(CypherFormat cypherFormat, Node node, Map<String, Set<String>> uniqueConstraints, Set<String> indexedProperties, Set<String> indexNames) {
StringBuilder result = new StringBuilder(1000);
result.append("MERGE ");
result.append(CypherFormatterUtils.formatNodeLookup("n", node, uniqueConstraints, indexNames));
if (node.getPropertyKeys().iterator().hasNext()) {
String notUniqueProperties = CypherFormatterUtils.formatNotUniqueProperties("n", node, uniqueConstraints, indexedProperties, false);
String notUniqueLabels = CypherFormatterUtils.formatNotUniqueLabels("n", node, uniqueConstraints);
if (!"".equals(notUniqueProperties) || !"".equals(notUniqueLabels)) {
result.append(cypherFormat.equals(CypherFormat.ADD_STRUCTURE) ? " ON CREATE SET " : " SET ");
result.append(notUniqueProperties);
result.append(!"".equals(notUniqueProperties) && !"".equals(notUniqueLabels) ? ", " : "");
result.append(notUniqueLabels);
}
}
result.append(";");
return result.toString();
}
public String mergeStatementForRelationship(CypherFormat cypherFormat, Relationship relationship, Map<String, Set<String>> uniqueConstraints, Set<String> indexedProperties) {
StringBuilder result = new StringBuilder(1000);
result.append("MATCH ");
result.append(CypherFormatterUtils.formatNodeLookup("n1", relationship.getStartNode(), uniqueConstraints, indexedProperties));
result.append(", ");
result.append(CypherFormatterUtils.formatNodeLookup("n2", relationship.getEndNode(), uniqueConstraints, indexedProperties));
result.append(" MERGE (n1)-[r:" + CypherFormatterUtils.quote(relationship.getType().name()) + "]->(n2)");
if (relationship.getPropertyKeys().iterator().hasNext()) {
result.append(cypherFormat.equals(CypherFormat.UPDATE_STRUCTURE) ? " ON CREATE SET " : " SET ");
result.append(CypherFormatterUtils.formatRelationshipProperties("r", relationship, false));
}
result.append(";");
return result.toString();
}
public void buildStatementForNodes(String nodeClause, String setClause,
Iterable<Node> nodes, Map<String, Set<String>> uniqueConstraints,
ExportConfig exportConfig,
PrintWriter out, Reporter reporter,
GraphDatabaseService db) {
AtomicInteger nodeCount = new AtomicInteger(0);
Function<Node, Map.Entry<Set<String>, Set<String>>> keyMapper = (node) -> {
try (Transaction tx = db.beginTx()) {
node = tx.getNodeById(node.getId());
Set<String> idProperties = CypherFormatterUtils.getNodeIdProperties(node, uniqueConstraints).keySet();
Set<String> labels = getLabels(node);
tx.commit();
return new AbstractMap.SimpleImmutableEntry<>(labels, idProperties);
}
};
Map<Map.Entry<Set<String>, Set<String>>, List<Node>> groupedData = StreamSupport.stream(nodes.spliterator(), true)
.collect(Collectors.groupingByConcurrent(keyMapper));
AtomicInteger propertiesCount = new AtomicInteger(0);
AtomicInteger batchCount = new AtomicInteger(0);
groupedData.forEach((key, nodeList) -> {
AtomicInteger unwindCount = new AtomicInteger(0);
final int nodeListSize = nodeList.size();
final Node last = nodeList.get(nodeListSize - 1);
nodeCount.addAndGet(nodeListSize);
for (int index = 0; index < nodeList.size(); index++) {
Node node = nodeList.get(index);
writeBatchBegin(exportConfig, out, batchCount);
writeUnwindStart(exportConfig, out, unwindCount);
batchCount.incrementAndGet();
unwindCount.incrementAndGet();
Map<String, Object> props = node.getAllProperties();
// start element
out.append("{");
// id
Map<String, Object> idMap = CypherFormatterUtils.getNodeIdProperties(node, uniqueConstraints);
writeNodeIds(out, idMap);
// properties
out.append(", ");
out.append("properties:");
propertiesCount.addAndGet(props.size());
props.keySet().removeAll(idMap.keySet());
writeProperties(out, props);
// end element
out.append("}");
if (last.equals(node) || isBatchMatch(exportConfig, batchCount) || isUnwindBatchMatch(exportConfig, unwindCount)) {
closeUnwindNodes(nodeClause, setClause, uniqueConstraints, exportConfig, out, key, last);
writeBatchEnd(exportConfig, out, batchCount);
unwindCount.set(0);
} else {
out.append(", ");
}
}
});
addCommitToEnd(exportConfig, out, batchCount);
reporter.update(nodeCount.get(), 0, propertiesCount.longValue());
}
private void closeUnwindNodes(String nodeClause, String setClause, Map<String, Set<String>> uniqueConstraints, ExportConfig exportConfig, PrintWriter out, Map.Entry<Set<String>, Set<String>> key, Node last) {
writeUnwindEnd(exportConfig, out);
out.append(StringUtils.LF);
out.append(nodeClause);
String label = getUniqueConstrainedLabel(last, uniqueConstraints);
out.append("(n:");
out.append(Util.quote(label));
out.append("{");
writeSetProperties(out, key.getValue());
out.append("}) ");
out.append(setClause);
out.append("n += row.properties");
String addLabels = key.getKey().stream()
.filter(l -> !l.equals(label))
.map(Util::quote)
.collect(Collectors.joining(":"));
if (!addLabels.isEmpty()) {
out.append(" SET n:");
out.append(addLabels);
}
out.append(";");
out.append(StringUtils.LF);
}
private void writeSetProperties(PrintWriter out, Set<String> value) {
writeSetProperties(out, value, null);
}
private void writeSetProperties(PrintWriter out, Set<String> value, String prefix) {
if (prefix == null) prefix = "";
int size = value.size();
for (String s: value) {
--size;
out.append(Util.quote(s) + ": row." + prefix + formatNodeId(s));
if (size > 0) {
out.append(", ");
}
}
}
private boolean isBatchMatch(ExportConfig exportConfig, AtomicInteger batchCount) {
return batchCount.get() % exportConfig.getBatchSize() == 0;
}
public void buildStatementForRelationships(String relationshipClause,
String setClause, Iterable<Relationship> relationship,
Map<String, Set<String>> uniqueConstraints, ExportConfig exportConfig,
PrintWriter out, Reporter reporter,
GraphDatabaseService db) {
AtomicInteger relCount = new AtomicInteger(0);
Function<Relationship, Map<String, Object>> keyMapper = (rel) -> {
try (Transaction tx = db.beginTx()) {
rel = tx.getRelationshipById(rel.getId());
Node start = rel.getStartNode();
Set<String> startLabels = getLabels(start);
// define the end labels
Node end = rel.getEndNode();
Set<String> endLabels = getLabels(end);
// define the type
String type = rel.getType().name();
// create the path
Map<String, Object> key = Util.map("type", type,
"start", new AbstractMap.SimpleImmutableEntry<>(startLabels, CypherFormatterUtils.getNodeIdProperties(start, uniqueConstraints).keySet()),
"end", new AbstractMap.SimpleImmutableEntry<>(endLabels, CypherFormatterUtils.getNodeIdProperties(end, uniqueConstraints).keySet()));
tx.commit();
return key;
}
};
Map<Map<String, Object>, List<Relationship>> groupedData = StreamSupport.stream(relationship.spliterator(), true)
.collect(Collectors.groupingByConcurrent(keyMapper));
AtomicInteger propertiesCount = new AtomicInteger(0);
AtomicInteger batchCount = new AtomicInteger(0);
String start = "start";
String end = "end";
groupedData.forEach((path, relationshipList) -> {
AtomicInteger unwindCount = new AtomicInteger(0);
final int relSize = relationshipList.size();
relCount.addAndGet(relSize);
final Relationship last = relationshipList.get(relSize - 1);
for (int index = 0; index < relationshipList.size(); index++) {
Relationship rel = relationshipList.get(index);
writeBatchBegin(exportConfig, out, batchCount);
writeUnwindStart(exportConfig, out, unwindCount);
batchCount.incrementAndGet();
unwindCount.incrementAndGet();
Map<String, Object> props = rel.getAllProperties();
// start element
out.append("{");
// start node
Node startNode = rel.getStartNode();
writeRelationshipNodeIds(uniqueConstraints, out, start, startNode);
out.append(", ");
// end node
Node endNode = rel.getEndNode();
writeRelationshipNodeIds(uniqueConstraints, out, end, endNode);
// properties
out.append(", ");
out.append("properties:");
writeProperties(out, props);
propertiesCount.addAndGet(props.size());
// end element
out.append("}");
if (last.equals(rel) || isBatchMatch(exportConfig, batchCount) || isUnwindBatchMatch(exportConfig, unwindCount)) {
closeUnwindRelationships(relationshipClause, setClause, uniqueConstraints, exportConfig, out, start, end, path, last);
writeBatchEnd(exportConfig, out, batchCount);
unwindCount.set(0);
} else {
out.append(", ");
}
}
});
addCommitToEnd(exportConfig, out, batchCount);
reporter.update(0, relCount.get(), propertiesCount.longValue());
}
private void closeUnwindRelationships(String relationshipClause, String setClause, Map<String, Set<String>> uniqueConstraints, ExportConfig exportConfig, PrintWriter out, String start, String end, Map<String, Object> path, Relationship last) {
writeUnwindEnd(exportConfig, out);
// match start node
writeRelationshipMatchAsciiNode(last.getStartNode(), out, start, uniqueConstraints);
// match end node
writeRelationshipMatchAsciiNode(last.getEndNode(), out, end, uniqueConstraints);
out.append(StringUtils.LF);
// create the relationship (depends on the strategy)
out.append(relationshipClause);
out.append("(start)-[r:" + Util.quote(path.get("type").toString()) + "]->(end) ");
out.append(setClause);
out.append("r += row.properties;");
out.append(StringUtils.LF);
}
private boolean isUnwindBatchMatch(ExportConfig exportConfig, AtomicInteger batchCount) {
return batchCount.get() % exportConfig.getUnwindBatchSize() == 0;
}
private void writeBatchEnd(ExportConfig exportConfig, PrintWriter out, AtomicInteger batchCount) {
if (isBatchMatch(exportConfig, batchCount)) {
out.append(exportConfig.getFormat().commit());
}
}
public void writeProperties(PrintWriter out, Map<String, Object> props) {
out.append("{");
if (!props.isEmpty()) {
int size = props.size();
for (Map.Entry<String, Object> es : props.entrySet()) {
--size;
out.append(Util.quote(es.getKey()));
out.append(":");
out.append(CypherFormatterUtils.toString(es.getValue()));
if (size > 0) {
out.append(", ");
}
}
}
out.append("}");
}
private String formatNodeId(String key) {
if (CypherFormatterUtils.UNIQUE_ID_PROP.equals(key)) {
key = "_id";
}
return Util.quote(key);
}
private void addCommitToEnd(ExportConfig exportConfig, PrintWriter out, AtomicInteger batchCount) {
if (batchCount.get() % exportConfig.getBatchSize() != 0) {
out.append(exportConfig.getFormat().commit());
}
}
private void writeBatchBegin(ExportConfig exportConfig, PrintWriter out, AtomicInteger batchCount) {
if (isBatchMatch(exportConfig, batchCount)) {
out.append(exportConfig.getFormat().begin());
}
}
private void writeUnwindStart(ExportConfig exportConfig, PrintWriter out, AtomicInteger batchCount) {
if (isUnwindBatchMatch(exportConfig, batchCount)) {
String start = (exportConfig.getFormat() == ExportFormat.CYPHER_SHELL
&& exportConfig.getOptimizationType() == ExportConfig.OptimizationType.UNWIND_BATCH_PARAMS) ?
":param rows => [" : "UNWIND [";
out.append(start);
}
}
private void writeUnwindEnd(ExportConfig exportConfig, PrintWriter out) {
out.append("]");
if (exportConfig.getFormat() == ExportFormat.CYPHER_SHELL
&& exportConfig.getOptimizationType() == ExportConfig.OptimizationType.UNWIND_BATCH_PARAMS) {
out.append(StringUtils.LF);
out.append("UNWIND $rows");
}
out.append(" AS row");
}
private String getUniqueConstrainedLabel(Node node, Map<String, Set<String>> uniqueConstraints) {
return uniqueConstraints.entrySet().stream()
.filter(e -> node.hasLabel(Label.label(e.getKey())) && e.getValue().stream().anyMatch(k -> node.hasProperty(k)))
.map(e -> e.getKey())
.findFirst()
.orElse(CypherFormatterUtils.UNIQUE_ID_LABEL);
}
private Set<String> getUniqueConstrainedProperties(Map<String, Set<String>> uniqueConstraints, String uniqueConstrainedLabel) {
Set<String> props = uniqueConstraints.get(uniqueConstrainedLabel);
if (props == null || props.isEmpty()) {
props = Collections.singleton(UNIQUE_ID_PROP);
}
return props;
}
private Set<String> getLabels(Node node) {
Set<String> labels = StreamSupport.stream(node.getLabels().spliterator(), false)
.map(Label::name)
.collect(Collectors.toSet());
if (labels.isEmpty()) {
labels.add(CypherFormatterUtils.UNIQUE_ID_LABEL);
}
return labels;
}
private void writeRelationshipMatchAsciiNode(Node node, PrintWriter out, String key, Map<String, Set<String>> uniqueConstraints) {
String uniqueConstrainedLabel = getUniqueConstrainedLabel(node, uniqueConstraints);
Set<String> uniqueConstrainedProps = getUniqueConstrainedProperties(uniqueConstraints, uniqueConstrainedLabel);
out.append(StringUtils.LF);
out.append("MATCH ");
out.append("(");
out.append(key);
out.append(":");
out.append(Util.quote(uniqueConstrainedLabel));
out.append("{");
writeSetProperties(out, uniqueConstrainedProps, key + ".");
out.append("})");
}
private void writeRelationshipNodeIds(Map<String, Set<String>> uniqueConstraints, PrintWriter out, String key, Node node) {
String uniqueConstrainedLabel = getUniqueConstrainedLabel(node, uniqueConstraints);
Set<String> props = getUniqueConstrainedProperties(uniqueConstraints, uniqueConstrainedLabel);
Map<String, Object> properties;
if (!props.contains(UNIQUE_ID_PROP)) {
String[] propsArray = props.toArray(new String[props.size()]);
properties = node.getProperties(propsArray);
} else {
// UNIQUE_ID_PROP is always the only member of the Set
properties = Util.map(UNIQUE_ID_PROP, node.getId());
}
out.append(key + ": ");
out.append("{");
writeNodeIds(out, properties);
out.append("}");
}
private void writeNodeIds(PrintWriter out, Map<String, Object> properties) {
int size = properties.size();
for (Map.Entry<String, Object> es : properties.entrySet()) {
--size;
out.append(formatNodeId(es.getKey()));
out.append(":");
out.append(CypherFormatterUtils.toString(es.getValue()));
if (size > 0) {
out.append(", ");
}
}
}
}
| |
/* Copyright 2016 Google Inc
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.api.codegen.discovery.transformer.csharp;
import com.google.api.codegen.discovery.config.AuthType;
import com.google.api.codegen.discovery.config.FieldInfo;
import com.google.api.codegen.discovery.config.MethodInfo;
import com.google.api.codegen.discovery.config.SampleConfig;
import com.google.api.codegen.discovery.config.TypeInfo;
import com.google.api.codegen.discovery.transformer.SampleMethodToViewTransformer;
import com.google.api.codegen.discovery.transformer.SampleNamer;
import com.google.api.codegen.discovery.transformer.SampleTransformerContext;
import com.google.api.codegen.discovery.transformer.SampleTypeTable;
import com.google.api.codegen.discovery.viewmodel.SampleAuthView;
import com.google.api.codegen.discovery.viewmodel.SampleFieldView;
import com.google.api.codegen.discovery.viewmodel.SamplePageStreamingView;
import com.google.api.codegen.discovery.viewmodel.SampleView;
import com.google.api.codegen.transformer.StandardImportSectionTransformer;
import com.google.api.codegen.util.Name;
import com.google.api.codegen.util.SymbolTable;
import com.google.api.codegen.util.TypedValue;
import com.google.api.codegen.util.csharp.CSharpNameFormatter;
import com.google.api.codegen.util.csharp.CSharpTypeTable;
import com.google.api.codegen.viewmodel.ViewModel;
import com.google.common.base.Joiner;
import com.google.common.base.Strings;
import com.google.protobuf.Field;
import com.google.protobuf.Field.Cardinality;
import com.google.protobuf.Method;
import java.util.ArrayList;
import java.util.List;
/*
* Transforms a Method and SampleConfig into the standard discovery surface for
* C#.
*/
public class CSharpSampleMethodToViewTransformer implements SampleMethodToViewTransformer {
private static final String TEMPLATE_FILENAME = "csharp/sample.snip";
private final StandardImportSectionTransformer importSectionTransformer =
new StandardImportSectionTransformer();
@Override
public ViewModel transform(Method method, SampleConfig sampleConfig) {
SampleTypeTable sampleTypeTable =
new SampleTypeTable(
new CSharpTypeTable(""),
new CSharpSampleTypeNameConverter(
sampleConfig.packagePrefix(),
sampleConfig.methods().get(method.getName()).nameComponents()));
CSharpSampleNamer csharpSampleNamer = new CSharpSampleNamer();
SampleTransformerContext context =
SampleTransformerContext.create(
sampleConfig, sampleTypeTable, csharpSampleNamer, method.getName());
return createSampleView(context);
}
private SampleView createSampleView(SampleTransformerContext context) {
addStaticImports(context);
SampleConfig config = context.getSampleConfig();
MethodInfo methodInfo = config.methods().get(context.getMethodName());
SampleNamer namer = context.getSampleNamer();
SampleTypeTable typeTable = context.getSampleTypeTable();
SymbolTable symbolTable = SymbolTable.fromSeed(CSharpNameFormatter.RESERVED_IDENTIFIER_SET);
SampleView.Builder builder = SampleView.newBuilder();
String serviceVarName = symbolTable.getNewSymbol(namer.getServiceVarName(config.apiTypeName()));
String serviceTypeName = typeTable.getAndSaveNicknameForServiceType(config.apiTypeName());
String requestVarName = symbolTable.getNewSymbol(namer.getRequestVarName());
String requestTypeName =
typeTable.getAndSaveNicknameForRequestType(config.apiTypeName(), methodInfo.requestType());
List<String> methodCallFieldVarNames = new ArrayList<>();
boolean hasRequestBody = methodInfo.requestBodyType() != null;
List<SampleFieldView> requestBodyFields = new ArrayList<>();
if (hasRequestBody) {
String requestBodyVarName = symbolTable.getNewSymbol(namer.getRequestBodyVarName());
builder.requestBodyVarName(requestBodyVarName);
builder.requestBodyTypeName(typeTable.getAndSaveNicknameFor(methodInfo.requestBodyType()));
methodCallFieldVarNames.add(requestBodyVarName);
for (FieldInfo fieldInfo : methodInfo.requestBodyType().message().fields().values()) {
requestBodyFields.add(createSampleFieldView(methodInfo, fieldInfo, context, symbolTable));
}
}
List<SampleFieldView> requiredFields = new ArrayList<>();
List<SampleFieldView> optionalFields = new ArrayList<>();
for (FieldInfo field : methodInfo.fields().values()) {
SampleFieldView sampleFieldView =
createSampleFieldView(methodInfo, field, context, symbolTable);
if (sampleFieldView.required()) {
requiredFields.add(sampleFieldView);
methodCallFieldVarNames.add(sampleFieldView.name());
} else {
optionalFields.add(sampleFieldView);
}
}
// The page streaming view model is generated close to last to avoid taking naming precedence in
// the symbol table.
if (methodInfo.isPageStreaming()) {
builder.pageStreaming(createSamplePageStreamingView(context, symbolTable));
}
boolean hasResponse = methodInfo.responseType() != null;
if (hasResponse) {
builder.responseVarName(symbolTable.getNewSymbol(namer.getResponseVarName()));
builder.responseTypeName(typeTable.getAndSaveNicknameFor(methodInfo.responseType()));
typeTable.saveNicknameFor("System.Console");
}
String dataNamespace = "";
if (hasRequestBody || hasResponse) {
dataNamespace = Joiner.on('.').join(config.packagePrefix(), "Data");
}
return builder
.templateFileName(TEMPLATE_FILENAME)
.outputPath(context.getMethodName() + ".frag.cs")
.apiTitle(config.apiTitle())
.apiName(config.apiName())
.apiVersion(config.apiVersion())
.appName(namer.getSampleApplicationName(config.apiCanonicalName()))
.className(namer.getSampleClassName(config.apiCanonicalName()))
.auth(createSampleAuthView(context))
.serviceVarName(serviceVarName)
.serviceTypeName(serviceTypeName)
.methodVerb(methodInfo.verb())
.methodNameComponents(methodInfo.nameComponents())
.requestVarName(requestVarName)
.requestTypeName(requestTypeName)
.hasRequestBody(hasRequestBody)
.requestBodyFields(requestBodyFields)
.hasResponse(hasResponse)
.requiredFields(requiredFields)
.optionalFields(optionalFields)
.methodCallFieldVarNames(methodCallFieldVarNames)
.isPageStreaming(methodInfo.isPageStreaming())
.hasMediaUpload(methodInfo.hasMediaUpload())
.hasMediaDownload(methodInfo.hasMediaDownload())
.dataNamespace(dataNamespace)
.namespaceName(CSharpSampleNamer.getNamespaceName(config.apiCanonicalName()))
.importSection(importSectionTransformer.generateImportSection(typeTable.getImports()))
.build();
}
private SampleAuthView createSampleAuthView(SampleTransformerContext context) {
SampleConfig config = context.getSampleConfig();
MethodInfo methodInfo = config.methods().get(context.getMethodName());
if (config.authType() == AuthType.APPLICATION_DEFAULT_CREDENTIALS) {
context.getSampleTypeTable().saveNicknameFor("Google.Apis.Auth.OAuth2.GoogleCredential");
context.getSampleTypeTable().saveNicknameFor("System.Threading.Tasks.Task");
}
if (config.authType() == AuthType.OAUTH_3L) {
context.getSampleTypeTable().saveNicknameFor("Google.Apis.Auth.OAuth2.UserCredential");
}
return SampleAuthView.newBuilder()
.type(config.authType())
.instructionsUrl(config.authInstructionsUrl())
.scopes(methodInfo.authScopes())
.isScopesSingular(methodInfo.authScopes().size() == 1)
.build();
}
private SamplePageStreamingView createSamplePageStreamingView(
SampleTransformerContext context, SymbolTable symbolTable) {
MethodInfo methodInfo = context.getSampleConfig().methods().get(context.getMethodName());
FieldInfo fieldInfo = methodInfo.pageStreamingResourceField();
SampleNamer namer = context.getSampleNamer();
SampleTypeTable typeTable = context.getSampleTypeTable();
SamplePageStreamingView.Builder builder = SamplePageStreamingView.newBuilder();
builder.resourceFieldName(Name.lowerCamel(fieldInfo.name()).toUpperCamel());
String resourceTypeName = typeTable.getAndSaveNickNameForElementType(fieldInfo.type());
builder.resourceElementTypeName(resourceTypeName);
String resourceVarName =
namer.getResourceVarName(fieldInfo.type().isMessage() ? resourceTypeName : "");
builder.resourceVarName(symbolTable.getNewSymbol(resourceVarName));
builder.isResourceRepeated(fieldInfo.cardinality() == Cardinality.CARDINALITY_REPEATED);
builder.isResourceMap(fieldInfo.type().isMap());
builder.isResourceSetterInRequestBody(methodInfo.isPageStreamingResourceSetterInRequestBody());
builder.pageTokenName(Name.lowerCamel(methodInfo.requestPageTokenName()).toUpperCamel());
builder.nextPageTokenName(Name.lowerCamel(methodInfo.responsePageTokenName()).toUpperCamel());
return builder.build();
}
private SampleFieldView createSampleFieldView(
MethodInfo methodInfo,
FieldInfo field,
SampleTransformerContext context,
SymbolTable symbolTable) {
SampleNamer namer = context.getSampleNamer();
SampleTypeTable typeTable = context.getSampleTypeTable();
TypeInfo typeInfo = field.type();
String defaultValue;
String typeName;
// TODO(saicheems): Ugly hack to get around enum naming in C# for the time being.
// Longer explanation in CSharpSampleTypeNameConverter.
if (typeInfo.kind() == Field.Kind.TYPE_ENUM) {
TypedValue typedValue =
CSharpSampleTypeNameConverter.getEnumZeroValue(
typeTable.getAndSaveNicknameForRequestType("", methodInfo.requestType()),
field.name());
typeName = typedValue.getTypeName().getNickname();
defaultValue = String.format(typedValue.getValuePattern(), typeName);
} else {
if (!Strings.isNullOrEmpty(field.defaultValue())) {
defaultValue = field.defaultValue();
} else {
defaultValue = typeTable.getZeroValueAndSaveNicknameFor(typeInfo);
}
typeName = typeTable.getAndSaveNicknameFor(typeInfo);
}
return SampleFieldView.newBuilder()
.name(symbolTable.getNewSymbol(field.name()))
.typeName(typeName)
.defaultValue(defaultValue)
.example(field.example())
.description(field.description())
.setterFuncName(namer.getRequestBodyFieldSetterName(field.name()))
.required(field.required())
.build();
}
private void addStaticImports(SampleTransformerContext context) {
context.getSampleTypeTable().saveNicknameFor("Google.Apis.Services.BaseClientService");
context.getSampleTypeTable().saveNicknameFor("Newtonsoft.Json.JsonConvert");
}
}
| |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.xmlui.wing;
import java.util.Stack;
import org.apache.cocoon.transformation.AbstractTransformer;
import org.dspace.app.xmlui.wing.element.Body;
import org.dspace.app.xmlui.wing.element.Options;
import org.dspace.app.xmlui.wing.element.PageMeta;
import org.dspace.app.xmlui.wing.element.UserMeta;
import org.dspace.app.xmlui.wing.element.WingDocument;
import org.dspace.app.xmlui.wing.element.WingMergeableElement;
import org.xml.sax.Attributes;
import org.xml.sax.SAXException;
import org.xml.sax.helpers.NamespaceSupport;
/**
* This class handles receiving SAX events and translating them into DRI events.
* These DRI events are then routed to the individual implementing components
* where they fill in and construct the DRI document. The document they
* construct is known as the feeder document, this is merged into the main
* document that was generated from the previous component in the Cocoon
* pipeline. The merge takes place in accordance with the DRI schema's rules for
* merging two DRI documents.
*
*
* @author Scott Phillips
*/
public abstract class AbstractWingTransformer extends AbstractTransformer
implements WingTransformer
{
/**
* Simple variable to indicate weather a new namespace context is needed. If
* several namespaces are declared on the same attribute then they are
* considered in the same 'context'. Each time an element is opened this
* flag is reset to true, and each time a new namespace is declared it is
* set to false. Using this information new contexts are opened
* conservatively.
*/
private boolean needNewNamespaceContext = true;
/**
* The namespace support object keeps track of registered URI prefixes. This
* is used by the WingElements so that they may attach the correctp prefix
* when assigning elements to namespaces.
*/
private NamespaceSupport namespaces;
/**
* The feeder document is the document being merged into the main,
* pre-existing document, that is the result of the previous Cocoon
* component in the pipeline.
*/
private WingDocument feederDocument;
/**
* The wing context is where the namespace support is stored along with the
* content and lexical handlers so that the wing elements can have access to
* them when they perform their toSAX() method.
*/
private WingContext wingContext;
/**
* This is a stack to the current location in the merge while it is in
* progress.
*/
private Stack<WingMergeableElement> stack;
/**
* Set up the transformer so that it can build a feeder Wing document and
* merge it into the main document
*
* FIXME: Update document: - this method must be called to initialize the
* framework. It must be called after the component's setup has been called
* and the implementing object setup.
*
*/
public void setupWing() throws WingException
{
this.wingContext = new WingContext();
this.wingContext.setLogger(this.getLogger());
this.wingContext.setComponentName(this.getComponentName());
this.wingContext.setObjectManager(this.getObjectManager());
feederDocument = this.createWingDocument(wingContext);
this.stack = new Stack<WingMergeableElement>();
}
/**
* Receive notification of the beginning of a document.
*/
public void startDocument() throws SAXException
{
needNewNamespaceContext = true;
namespaces = new NamespaceSupport();
super.startDocument();
}
/**
* Receive notification of the end of a document.
*/
public void endDocument() throws SAXException
{
wingContext.dispose();
super.endDocument();
}
/**
* Begin the scope of a prefix-URI Namespace mapping.
*
* @param prefix
* The Namespace prefix being declared.
* @param uri
* The Namespace URI the prefix is mapped to.
*/
public void startPrefixMapping(String prefix, String uri)
throws SAXException
{
if (needNewNamespaceContext)
{
namespaces.pushContext();
needNewNamespaceContext = false;
}
namespaces.declarePrefix(prefix, uri);
super.startPrefixMapping(prefix, uri);
}
/**
* End the scope of a prefix-URI mapping.
*
* @param prefix
* The prefix that was being mapping.
*/
public void endPrefixMapping(String prefix) throws SAXException
{
if (!needNewNamespaceContext)
{
namespaces.popContext();
needNewNamespaceContext = true;
}
super.endPrefixMapping(prefix);
}
/**
* Receive notification of the beginning of an element.
*
* @param namespaceURI
* The Namespace URI, or the empty string if the element has no
* Namespace URI or if Namespace processing is not being
* performed.
* @param localName
* The local name (without prefix), or the empty string if
* Namespace processing is not being performed.
* @param qName
* The raw XML 1.0 name (with prefix), or the empty string if raw
* names are not available.
* @param attributes
* The attributes attached to the element. If there are no
* attributes, it shall be an empty Attributes object.
*/
public void startElement(String namespaceURI, String localName,
String qName, Attributes attributes) throws SAXException
{
// Reset the namespace context flag.
needNewNamespaceContext = true;
try
{
if (stack == null)
{
throw new WingException("Stack not initialized.");
}
// Deal with the stack jump start issue of having a document all
// ready on the stack.
if (stack.size() == 0)
{
if (feederDocument.mergeEqual(namespaceURI, localName, qName,
attributes))
{
attributes = feederDocument.merge(attributes);
stack.push(feederDocument);
}
else
{
throw new WingException(
"Attempting to merge DRI documents but the source document is not compatable with the feeder document.");
}
}
else if (stack.size() > 0)
{
WingMergeableElement peek = stack.peek();
WingMergeableElement child = null;
if (peek != null)
{
child = peek.mergeChild(namespaceURI, localName, qName,
attributes);
}
// Check if we should construct a new portion of the document.
if (child instanceof UserMeta)
{
// Create the UserMeta
this.addUserMeta((UserMeta) child);
}
else if (child instanceof PageMeta)
{
// Create the PageMeta
this.addPageMeta((PageMeta) child);
}
else if (child instanceof Body)
{
// Create the Body
this.addBody((Body) child);
}
else if (child instanceof Options)
{
// Create the Options
this.addOptions((Options) child);
}
// Update any attributes of this merged element.
if (child != null)
{
attributes = child.merge(attributes);
}
stack.push(child);
}
// Send off the event with nothing modified except for the
// attributes (possibly)
super.startElement(namespaceURI, localName, qName, attributes);
}
catch (SAXException saxe)
{
throw saxe;
}
catch (Exception e)
{
handleException(e);
}
}
/**
* Receive notification of the end of an element.
*
* @param namespaceURI
* The Namespace URI, or the empty string if the element has no
* Namespace URI or if Namespace processing is not being
* performed.
* @param localName
* The local name (without prefix), or the empty string if
* Namespace processing is not being performed.
* @param qName
* The raw XML 1.0 name (with prefix), or the empty string if raw
* names are not available.
*/
public void endElement(String namespaceURI, String localName, String qName)
throws SAXException
{
try
{
if (stack.size() > 0)
{
WingMergeableElement poped = stack.pop();
if (poped != null)
{
poped.toSAX(contentHandler, lexicalHandler, namespaces);
poped.dispose();
}
}
// Send the event on unmodified
super.endElement(namespaceURI, localName, qName);
}
catch (SAXException saxe)
{
throw saxe;
}
catch (Exception e)
{
handleException(e);
}
}
/**
* Handle exceptions that occurred during the document's creation. When
* errors occur a SAX event is being processed it will be sent through this
* method. This allows implementing classes to override this method for
* specific error handling hooks.
*
* @param e
* The thrown exception
*/
protected void handleException(Exception e) throws SAXException
{
throw new SAXException(
"An error was incountered while processing the Wing based component: "
+ this.getClass().getName(), e);
}
/**
* Construct a new WingDocument.
*
* @param wingContext
* The current wing context this transformer is operating under.
*/
protected WingDocument createWingDocument(WingContext wingContext)
throws WingException
{
return new WingDocument(wingContext);
}
/** Abstract implementations of WingTransformer */
public void addBody(Body body) throws Exception
{
// Do nothing
}
public void addOptions(Options options) throws Exception
{
// do nothing
}
public void addUserMeta(UserMeta userMeta) throws Exception
{
// Do nothing
}
public void addPageMeta(PageMeta pageMeta) throws Exception
{
// Do nothing
}
/**
* Return the ObjectManager associated with this component. If no
* objectManager needed then return null.
*/
public ObjectManager getObjectManager()
{
return null;
}
/**
* Return the name of this component. Typicaly the name is just
* the class name of the component.
*/
public String getComponentName()
{
return this.getClass().getName();
}
/**
* Return the default i18n message catalogue that should be used
* when no others are specified.
*/
public static String getDefaultMessageCatalogue()
{
return "default";
}
/**
* This is a short cut method for creating a new message object, this
* allows them to be created with one simple method call that uses
* the default catalogue.
*
* @param key
* The catalogue key used to look up a message.
* @return A new message object.
*/
public static Message message(String key)
{
return message(getDefaultMessageCatalogue(), key);
}
/**
* This is a short cut method for creating a new message object. This
* version allows the callie to specify a particular catalogue overriding
* the default catalogue supplied.
*
* @param catalogue
* The catalogue where translations will be located.
* @param key
* The catalogue key used to look up a translation within the
* catalogue.
* @return A new message object.
*/
public static Message message(String catalogue, String key)
{
return new Message(catalogue, key);
}
/**
* Recyle
*/
public void recycle()
{
this.namespaces = null;
this.feederDocument = null;
this.wingContext=null;
this.stack =null;
super.recycle();
}
/**
* Dispose
*/
public void dispose() {
this.namespaces = null;
this.feederDocument = null;
this.wingContext=null;
this.stack =null;
//super.dispose(); super dosn't dispose.
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.tez.dag.app.dag.impl;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import static org.mockito.Matchers.any;
import static org.mockito.Mockito.RETURNS_DEEP_STUBS;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.api.records.ContainerId;
import org.apache.hadoop.yarn.api.records.NodeId;
import org.apache.hadoop.yarn.api.records.Resource;
import org.apache.hadoop.yarn.event.DrainDispatcher;
import org.apache.hadoop.yarn.event.EventHandler;
import org.apache.hadoop.yarn.util.SystemClock;
import org.apache.tez.common.counters.TezCounters;
import org.apache.tez.dag.api.TezConfiguration;
import org.apache.tez.dag.api.TezUncheckedException;
import org.apache.tez.dag.api.client.VertexStatus.State;
import org.apache.tez.dag.api.oldrecords.TaskAttemptState;
import org.apache.tez.dag.api.oldrecords.TaskState;
import org.apache.tez.dag.app.AppContext;
import org.apache.tez.dag.app.ContainerContext;
import org.apache.tez.dag.app.TaskAttemptListener;
import org.apache.tez.dag.app.TaskHeartbeatHandler;
import org.apache.tez.dag.app.dag.StateChangeNotifier;
import org.apache.tez.dag.app.dag.TaskAttemptStateInternal;
import org.apache.tez.dag.app.dag.TaskStateInternal;
import org.apache.tez.dag.app.dag.Vertex;
import org.apache.tez.dag.app.dag.event.DAGEventType;
import org.apache.tez.dag.app.dag.event.TaskAttemptEvent;
import org.apache.tez.dag.app.dag.event.TaskAttemptEventType;
import org.apache.tez.dag.app.dag.event.TaskEvent;
import org.apache.tez.dag.app.dag.event.TaskEventRecoverTask;
import org.apache.tez.dag.app.dag.event.TaskEventType;
import org.apache.tez.dag.app.dag.event.VertexEventType;
import org.apache.tez.dag.app.dag.impl.TestTaskAttemptRecovery.MockHistoryEventHandler;
import org.apache.tez.dag.history.events.TaskAttemptFinishedEvent;
import org.apache.tez.dag.history.events.TaskAttemptStartedEvent;
import org.apache.tez.dag.history.events.TaskFinishedEvent;
import org.apache.tez.dag.history.events.TaskStartedEvent;
import org.apache.tez.dag.records.TaskAttemptTerminationCause;
import org.apache.tez.dag.records.TezDAGID;
import org.apache.tez.dag.records.TezTaskAttemptID;
import org.apache.tez.dag.records.TezTaskID;
import org.apache.tez.dag.records.TezVertexID;
import org.apache.tez.runtime.api.OutputCommitter;
import org.apache.tez.runtime.api.OutputCommitterContext;
import org.junit.Before;
import org.junit.Test;
import com.google.common.collect.Lists;
public class TestTaskRecovery {
private TaskImpl task;
private DrainDispatcher dispatcher;
private int taskAttemptCounter = 0;
private Configuration conf = new Configuration();
private AppContext mockAppContext;
private MockHistoryEventHandler mockHistoryEventHandler;
private ApplicationId appId = ApplicationId.newInstance(
System.currentTimeMillis(), 1);
private TezDAGID dagId = TezDAGID.getInstance(appId, 1);
private TezVertexID vertexId = TezVertexID.getInstance(dagId, 1);
private Vertex vertex;
private String vertexName = "v1";
private long taskScheduledTime = 100L;
private long taskStartTime = taskScheduledTime + 100L;
private long taskFinishTime = taskStartTime + 100L;
private TaskAttemptEventHandler taEventHandler =
new TaskAttemptEventHandler();
private class TaskEventHandler implements EventHandler<TaskEvent> {
@Override
public void handle(TaskEvent event) {
task.handle(event);
}
}
private class TaskAttemptEventHandler implements
EventHandler<TaskAttemptEvent> {
private List<TaskAttemptEvent> events = Lists.newArrayList();
@Override
public void handle(TaskAttemptEvent event) {
events.add(event);
((TaskAttemptImpl) task.getAttempt(event.getTaskAttemptID()))
.handle(event);
}
public List<TaskAttemptEvent> getEvents() {
return events;
}
}
private class TestOutputCommitter extends OutputCommitter {
boolean recoverySupported = false;
boolean throwExceptionWhenRecovery = false;
public TestOutputCommitter(OutputCommitterContext committerContext,
boolean recoverySupported, boolean throwExceptionWhenRecovery) {
super(committerContext);
this.recoverySupported = recoverySupported;
this.throwExceptionWhenRecovery = throwExceptionWhenRecovery;
}
@Override
public void recoverTask(int taskIndex, int previousDAGAttempt)
throws Exception {
if (throwExceptionWhenRecovery) {
throw new Exception("fail recovery Task");
}
}
@Override
public boolean isTaskRecoverySupported() {
return recoverySupported;
}
@Override
public void initialize() throws Exception {
}
@Override
public void setupOutput() throws Exception {
}
@Override
public void commitOutput() throws Exception {
}
@Override
public void abortOutput(State finalState) throws Exception {
}
}
@Before
public void setUp() {
dispatcher = new DrainDispatcher();
dispatcher.register(DAGEventType.class, mock(EventHandler.class));
dispatcher.register(VertexEventType.class, mock(EventHandler.class));
dispatcher.register(TaskEventType.class, new TaskEventHandler());
dispatcher.register(TaskAttemptEventType.class, taEventHandler);
dispatcher.init(new Configuration());
dispatcher.start();
vertex = mock(Vertex.class, RETURNS_DEEP_STUBS);
when(vertex.getProcessorDescriptor().getClassName()).thenReturn("");
mockAppContext = mock(AppContext.class, RETURNS_DEEP_STUBS);
when(mockAppContext.getCurrentDAG().getVertex(any(TezVertexID.class)))
.thenReturn(vertex);
mockHistoryEventHandler = new MockHistoryEventHandler(mockAppContext);
when(mockAppContext.getHistoryHandler()).thenReturn(mockHistoryEventHandler);
task =
new TaskImpl(vertexId, 0, dispatcher.getEventHandler(),
new Configuration(), mock(TaskAttemptListener.class),
new SystemClock(), mock(TaskHeartbeatHandler.class),
mockAppContext, false, Resource.newInstance(1, 1),
mock(ContainerContext.class), mock(StateChangeNotifier.class), vertex);
Map<String, OutputCommitter> committers =
new HashMap<String, OutputCommitter>();
committers.put("out1", new TestOutputCommitter(
mock(OutputCommitterContext.class), true, false));
when(task.getVertex().getOutputCommitters()).thenReturn(committers);
}
private void restoreFromTaskStartEvent() {
TaskState recoveredState =
task.restoreFromEvent(new TaskStartedEvent(task.getTaskId(),
vertexName, taskScheduledTime, taskStartTime));
assertEquals(TaskState.SCHEDULED, recoveredState);
assertEquals(0, task.getFinishedAttemptsCount());
assertEquals(taskScheduledTime, task.scheduledTime);
assertEquals(0, task.getAttempts().size());
}
private void restoreFromFirstTaskAttemptStartEvent(TezTaskAttemptID taId) {
long taStartTime = taskStartTime + 100L;
TaskState recoveredState =
task.restoreFromEvent(new TaskAttemptStartedEvent(taId, vertexName,
taStartTime, mock(ContainerId.class), mock(NodeId.class), "", "", "", 0, null));
assertEquals(TaskState.RUNNING, recoveredState);
assertEquals(0, task.getFinishedAttemptsCount());
assertEquals(taskScheduledTime, task.scheduledTime);
assertEquals(1, task.getAttempts().size());
assertEquals(TaskAttemptStateInternal.NEW,
((TaskAttemptImpl) task.getAttempt(taId)).getInternalState());
assertEquals(1, task.getUncompletedAttemptsCount());
}
/**
* New -> RecoverTransition
*/
@Test(timeout = 5000)
public void testRecovery_New() {
task.handle(new TaskEventRecoverTask(task.getTaskId()));
assertEquals(TaskStateInternal.NEW, task.getInternalState());
}
/**
* -> restoreFromTaskFinishEvent ( no TaskStartEvent )
*/
@Test(timeout = 5000)
public void testRecovery_NoStartEvent() {
try {
task.restoreFromEvent(new TaskFinishedEvent(task.getTaskId(), vertexName,
taskStartTime, taskFinishTime, null, TaskState.SUCCEEDED, "",
new TezCounters(), 0));
fail("Should fail due to no TaskStartEvent before TaskFinishEvent");
} catch (Throwable e) {
assertTrue(e.getMessage().contains(
"Finished Event seen but"
+ " no Started Event was encountered earlier"));
}
}
/**
* -> restoreFromTaskFinishEvent ( no TaskStartEvent )
*/
@Test(timeout = 5000)
public void testRecoveryNewToKilled_NoStartEvent() {
task.restoreFromEvent(new TaskFinishedEvent(task.getTaskId(), vertexName,
taskStartTime, taskFinishTime, null, TaskState.KILLED, "",
new TezCounters(), 0));
}
/**
* restoreFromTaskStartedEvent -> RecoverTransition
*/
@Test(timeout = 5000)
public void testRecovery_Started() {
restoreFromTaskStartEvent();
task.handle(new TaskEventRecoverTask(task.getTaskId()));
assertEquals(TaskStateInternal.RUNNING, task.getInternalState());
// new task attempt is scheduled
assertEquals(1, task.getAttempts().size());
assertEquals(0, task.getFinishedAttemptsCount());
assertEquals(0, task.failedAttempts);
assertEquals(null, task.successfulAttempt);
}
/**
* restoreFromTaskStartedEvent -> restoreFromTaskAttemptFinishedEvent (KILLED) ->
* RecoverTranstion
*/
@Test(timeout = 5000)
public void testRecovery_OnlyTAFinishedEvent_KILLED() {
restoreFromTaskStartEvent();
TezTaskAttemptID taId = getNewTaskAttemptID(task.getTaskId());
task.restoreFromEvent(new TaskAttemptFinishedEvent(taId, vertexName,
0L, 0L, TaskAttemptState.KILLED, TaskAttemptTerminationCause.TERMINATED_BY_CLIENT,"", new TezCounters(), 0, null));
task.handle(new TaskEventRecoverTask(task.getTaskId()));
// wait for the second task attempt is scheduled
dispatcher.await();
assertEquals(TaskStateInternal.RUNNING, task.getInternalState());
// taskAttempt_1 is recovered to KILLED, and new task attempt is scheduled
assertEquals(2, task.getAttempts().size());
assertEquals(1, task.getFinishedAttemptsCount());
assertEquals(0, task.failedAttempts);
assertEquals(null, task.successfulAttempt);
}
/**
* restoreFromTaskStartedEvent -> restoreFromTaskAttemptFinishedEvent (FAILED) ->
* RecoverTranstion
*/
@Test(timeout = 5000)
public void testRecovery_OnlyTAFinishedEvent_FAILED() {
restoreFromTaskStartEvent();
TezTaskAttemptID taId = getNewTaskAttemptID(task.getTaskId());
task.restoreFromEvent(new TaskAttemptFinishedEvent(taId, vertexName,
0L, 0L, TaskAttemptState.FAILED, TaskAttemptTerminationCause.CONTAINER_LAUNCH_FAILED,"", new TezCounters(), 0, null));
task.handle(new TaskEventRecoverTask(task.getTaskId()));
// wait for the second task attempt is scheduled
dispatcher.await();
assertEquals(TaskStateInternal.RUNNING, task.getInternalState());
// taskAttempt_1 is recovered to FAILED, and new task attempt is scheduled
assertEquals(2, task.getAttempts().size());
assertEquals(1, task.getFinishedAttemptsCount());
assertEquals(1, task.failedAttempts);
assertEquals(null, task.successfulAttempt);
}
/**
* restoreFromTaskStartedEvent -> restoreFromTaskAttemptFinishedEvent (SUCCEEDED) ->
* RecoverTranstion
*/
@Test(timeout = 5000)
public void testRecovery_OnlyTAFinishedEvent_SUCCEEDED() {
restoreFromTaskStartEvent();
TezTaskAttemptID taId = getNewTaskAttemptID(task.getTaskId());
try {
task.restoreFromEvent(new TaskAttemptFinishedEvent(taId, vertexName,
0L, 0L, TaskAttemptState.SUCCEEDED, null ,"", new TezCounters(), 0, null));
fail("Should fail due to no TaskAttemptStartedEvent but with TaskAttemptFinishedEvent(Succeeded)");
} catch (TezUncheckedException e) {
assertTrue(e.getMessage().contains("Could not find task attempt when trying to recover"));
}
}
/**
* restoreFromTaskStartedEvent -> restoreFromTaskAttemptStartedEvent ->
* RecoverTranstion
*/
@Test(timeout = 5000)
public void testRecovery_OneTAStarted() {
restoreFromTaskStartEvent();
TezTaskAttemptID taId = getNewTaskAttemptID(task.getTaskId());
restoreFromFirstTaskAttemptStartEvent(taId);
task.handle(new TaskEventRecoverTask(task.getTaskId()));
// wait for the second task attempt is scheduled
dispatcher.await();
assertEquals(TaskStateInternal.RUNNING, task.getInternalState());
// taskAttempt_1 is recovered to KILLED, and new task attempt is scheduled
assertEquals(2, task.getAttempts().size());
assertEquals(1, task.getFinishedAttemptsCount());
assertEquals(0, task.failedAttempts);
assertEquals(null, task.successfulAttempt);
}
/**
* restoreFromTaskStartedEvent -> restoreFromTaskAttemptStartedEvent ->
* restoreFromTaskAttemptFinishedEvent (SUCCEEDED) -> RecoverTransition
*/
@Test(timeout = 5000)
public void testRecovery_OneTAStarted_SUCCEEDED() {
restoreFromTaskStartEvent();
TezTaskAttemptID taId = getNewTaskAttemptID(task.getTaskId());
restoreFromFirstTaskAttemptStartEvent(taId);
long taStartTime = taskStartTime + 100L;
long taFinishTime = taStartTime + 100L;
TaskState recoveredState =
task.restoreFromEvent(new TaskAttemptFinishedEvent(taId, vertexName,
taStartTime, taFinishTime, TaskAttemptState.SUCCEEDED, null,
"", new TezCounters(), 0, null));
assertEquals(TaskState.SUCCEEDED, recoveredState);
assertEquals(1, task.getAttempts().size());
assertEquals(1, task.getFinishedAttemptsCount());
assertEquals(0, task.failedAttempts);
assertEquals(0, task.getUncompletedAttemptsCount());
assertEquals(taId, task.successfulAttempt);
task.handle(new TaskEventRecoverTask(task.getTaskId()));
assertEquals(TaskStateInternal.SUCCEEDED, task.getInternalState());
assertEquals(1, task.getAttempts().size());
assertEquals(1, task.getFinishedAttemptsCount());
assertEquals(0, task.failedAttempts);
assertEquals(0, task.getUncompletedAttemptsCount());
assertEquals(taId, task.successfulAttempt);
mockHistoryEventHandler.verifyTaskFinishedEvent(task.getTaskId(), TaskState.SUCCEEDED, 1);
}
/**
* restoreFromTaskStartedEvent -> restoreFromTaskAttemptStartedEvent ->
* restoreFromTaskAttemptFinishedEvent (FAILED) -> RecoverTransition
*/
@Test(timeout = 5000)
public void testRecovery_OneTAStarted_FAILED() {
restoreFromTaskStartEvent();
TezTaskAttemptID taId = getNewTaskAttemptID(task.getTaskId());
restoreFromFirstTaskAttemptStartEvent(taId);
long taStartTime = taskStartTime + 100L;
long taFinishTime = taStartTime + 100L;
TaskState recoveredState =
task.restoreFromEvent(new TaskAttemptFinishedEvent(taId, vertexName,
taStartTime, taFinishTime, TaskAttemptState.FAILED, null,
"", new TezCounters(), 0, null));
assertEquals(TaskState.RUNNING, recoveredState);
assertEquals(1, task.getAttempts().size());
assertEquals(1, task.getFinishedAttemptsCount());
assertEquals(1, task.failedAttempts);
assertEquals(0, task.getUncompletedAttemptsCount());
assertEquals(null, task.successfulAttempt);
task.handle(new TaskEventRecoverTask(task.getTaskId()));
assertEquals(TaskStateInternal.RUNNING, task.getInternalState());
// new task attempt is scheduled
assertEquals(2, task.getAttempts().size());
assertEquals(1, task.getFinishedAttemptsCount());
assertEquals(1, task.failedAttempts);
assertEquals(1, task.getUncompletedAttemptsCount());
assertEquals(null, task.successfulAttempt);
}
/**
* restoreFromTaskStartedEvent -> restoreFromTaskAttemptStartedEvent ->
* restoreFromTaskAttemptFinishedEvent (KILLED) -> RecoverTransition
*/
@Test(timeout = 5000)
public void testRecovery_OneTAStarted_KILLED() {
restoreFromTaskStartEvent();
TezTaskAttemptID taId = getNewTaskAttemptID(task.getTaskId());
restoreFromFirstTaskAttemptStartEvent(taId);
long taStartTime = taskStartTime + 100L;
long taFinishTime = taStartTime + 100L;
TaskState recoveredState =
task.restoreFromEvent(new TaskAttemptFinishedEvent(taId, vertexName,
taStartTime, taFinishTime, TaskAttemptState.KILLED, null,
"", new TezCounters(), 0, null));
assertEquals(TaskState.RUNNING, recoveredState);
assertEquals(1, task.getAttempts().size());
assertEquals(1, task.getFinishedAttemptsCount());
assertEquals(0, task.failedAttempts);
assertEquals(0, task.getUncompletedAttemptsCount());
assertEquals(null, task.successfulAttempt);
task.handle(new TaskEventRecoverTask(task.getTaskId()));
assertEquals(TaskStateInternal.RUNNING, task.getInternalState());
// new task attempt is scheduled
assertEquals(2, task.getAttempts().size());
assertEquals(1, task.getFinishedAttemptsCount());
assertEquals(0, task.failedAttempts);
assertEquals(1, task.getUncompletedAttemptsCount());
assertEquals(null, task.successfulAttempt);
}
/**
* restoreFromTaskStartedEvent -> restoreFromTaskAttemptStartedEvent ->
* restoreFromTaskAttemptFinishedEvent (SUCCEEDED) ->
* restoreFromTaskFinishedEvent -> RecoverTransition
*/
@Test(timeout = 5000)
public void testRecovery_OneTAStarted_SUCCEEDED_Finished() {
restoreFromTaskStartEvent();
TezTaskAttemptID taId = getNewTaskAttemptID(task.getTaskId());
restoreFromFirstTaskAttemptStartEvent(taId);
long taStartTime = taskStartTime + 100L;
long taFinishTime = taStartTime + 100L;
TaskState recoveredState =
task.restoreFromEvent(new TaskAttemptFinishedEvent(taId, vertexName,
taStartTime, taFinishTime, TaskAttemptState.SUCCEEDED, null,
"", new TezCounters(), 0, null));
assertEquals(TaskState.SUCCEEDED, recoveredState);
assertEquals(1, task.getAttempts().size());
assertEquals(1, task.getFinishedAttemptsCount());
assertEquals(0, task.failedAttempts);
assertEquals(0, task.getUncompletedAttemptsCount());
assertEquals(taId, task.successfulAttempt);
recoveredState =
task.restoreFromEvent(new TaskFinishedEvent(task.getTaskId(),
vertexName, taskStartTime, taskFinishTime, taId,
TaskState.SUCCEEDED, "", new TezCounters(), 0));
assertEquals(TaskState.SUCCEEDED, recoveredState);
assertEquals(taId, task.successfulAttempt);
task.handle(new TaskEventRecoverTask(task.getTaskId()));
assertEquals(TaskStateInternal.SUCCEEDED, task.getInternalState());
assertEquals(1, task.getAttempts().size());
assertEquals(1, task.getFinishedAttemptsCount());
assertEquals(0, task.failedAttempts);
assertEquals(0, task.getUncompletedAttemptsCount());
assertEquals(taId, task.successfulAttempt);
mockHistoryEventHandler.verifyTaskFinishedEvent(task.getTaskId(), TaskState.SUCCEEDED, 1);
}
/**
* restoreFromTaskStartedEvent -> restoreFromTaskAttemptStartedEvent ->
* restoreFromTaskAttemptFinishedEvent (SUCCEEDED) ->
* restoreFromTaskAttemptFinishedEvent (Failed due to output_failure)
* restoreFromTaskFinishedEvent -> RecoverTransition
*/
@Test(timeout = 5000)
public void testRecovery_OneTAStarted_SUCCEEDED_FAILED() {
restoreFromTaskStartEvent();
TezTaskAttemptID taId = getNewTaskAttemptID(task.getTaskId());
restoreFromFirstTaskAttemptStartEvent(taId);
long taStartTime = taskStartTime + 100L;
long taFinishTime = taStartTime + 100L;
TaskState recoveredState =
task.restoreFromEvent(new TaskAttemptFinishedEvent(taId, vertexName,
taStartTime, taFinishTime, TaskAttemptState.SUCCEEDED, null,
"", new TezCounters(), 0, null));
assertEquals(TaskState.SUCCEEDED, recoveredState);
assertEquals(1, task.getAttempts().size());
assertEquals(1, task.getFinishedAttemptsCount());
assertEquals(0, task.failedAttempts);
assertEquals(0, task.getUncompletedAttemptsCount());
assertEquals(taId, task.successfulAttempt);
// it is possible for TaskAttempt transit from SUCCEEDED to FAILURE due to output failure.
recoveredState =
task.restoreFromEvent(new TaskAttemptFinishedEvent(taId, vertexName,
taStartTime, taFinishTime, TaskAttemptState.FAILED, null,
"", new TezCounters(), 0, null));
assertEquals(TaskState.RUNNING, recoveredState);
assertEquals(1, task.getAttempts().size());
assertEquals(1, task.getFinishedAttemptsCount());
assertEquals(1, task.failedAttempts);
assertEquals(0, task.getUncompletedAttemptsCount());
assertEquals(null, task.successfulAttempt);
task.handle(new TaskEventRecoverTask(task.getTaskId()));
assertEquals(TaskStateInternal.RUNNING, task.getInternalState());
assertEquals(2, task.getAttempts().size());
assertEquals(1, task.getFinishedAttemptsCount());
assertEquals(1, task.failedAttempts);
assertEquals(1, task.getUncompletedAttemptsCount());
assertEquals(null, task.successfulAttempt);
}
/**
* restoreFromTaskStartedEvent -> restoreFromTaskAttemptStartedEvent ->
* restoreFromTaskAttemptFinishedEvent (SUCCEEDED) ->
* restoreFromTaskAttemptFinishedEvent (KILLED due to node failed )
* restoreFromTaskFinishedEvent -> RecoverTransition
*/
@Test(timeout = 5000)
public void testRecovery_OneTAStarted_SUCCEEDED_KILLED() {
restoreFromTaskStartEvent();
TezTaskAttemptID taId = getNewTaskAttemptID(task.getTaskId());
restoreFromFirstTaskAttemptStartEvent(taId);
long taStartTime = taskStartTime + 100L;
long taFinishTime = taStartTime + 100L;
TaskState recoveredState =
task.restoreFromEvent(new TaskAttemptFinishedEvent(taId, vertexName,
taStartTime, taFinishTime, TaskAttemptState.SUCCEEDED, null,
"", new TezCounters(), 0, null));
assertEquals(TaskState.SUCCEEDED, recoveredState);
assertEquals(1, task.getAttempts().size());
assertEquals(1, task.getFinishedAttemptsCount());
assertEquals(0, task.failedAttempts);
assertEquals(0, task.getUncompletedAttemptsCount());
assertEquals(taId, task.successfulAttempt);
// it is possible for TaskAttempt transit from SUCCEEDED to KILLED due to node failure.
recoveredState =
task.restoreFromEvent(new TaskAttemptFinishedEvent(taId, vertexName,
taStartTime, taFinishTime, TaskAttemptState.KILLED, null,
"", new TezCounters(), 0, null));
assertEquals(TaskState.RUNNING, recoveredState);
assertEquals(1, task.getAttempts().size());
assertEquals(1, task.getFinishedAttemptsCount());
assertEquals(0, task.failedAttempts);
assertEquals(0, task.getUncompletedAttemptsCount());
assertEquals(null, task.successfulAttempt);
task.handle(new TaskEventRecoverTask(task.getTaskId()));
assertEquals(TaskStateInternal.RUNNING, task.getInternalState());
assertEquals(2, task.getAttempts().size());
assertEquals(1, task.getFinishedAttemptsCount());
assertEquals(0, task.failedAttempts);
assertEquals(1, task.getUncompletedAttemptsCount());
assertEquals(null, task.successfulAttempt);
}
/**
* restoreFromTaskStartedEvent -> restoreFromTaskAttemptStartedEvent ->
* restoreFromTaskAttemptFinishedEvent (SUCCEEDED) -> RecoverTransition
*/
@Test(timeout = 5000)
public void testRecovery_Commit_Failed_Recovery_Not_Supported() {
Map<String, OutputCommitter> committers =
new HashMap<String, OutputCommitter>();
committers.put("out1", new TestOutputCommitter(
mock(OutputCommitterContext.class), false, false));
when(task.getVertex().getOutputCommitters()).thenReturn(committers);
restoreFromTaskStartEvent();
TezTaskAttemptID taId = getNewTaskAttemptID(task.getTaskId());
restoreFromFirstTaskAttemptStartEvent(taId);
// restoreFromTaskAttemptFinishedEvent (SUCCEEDED)
long taStartTime = taskStartTime + 100L;
long taFinishTime = taStartTime + 100L;
TaskState recoveredState =
task.restoreFromEvent(new TaskAttemptFinishedEvent(taId, vertexName,
taStartTime, taFinishTime, TaskAttemptState.SUCCEEDED, null,
"", new TezCounters(), 0, null));
assertEquals(TaskState.SUCCEEDED, recoveredState);
assertEquals(1, task.getAttempts().size());
assertEquals(1, task.getFinishedAttemptsCount());
assertEquals(0, task.failedAttempts);
assertEquals(0, task.getUncompletedAttemptsCount());
assertEquals(taId, task.successfulAttempt);
task.handle(new TaskEventRecoverTask(task.getTaskId()));
assertEquals(TaskStateInternal.RUNNING, task.getInternalState());
// new task attempt is scheduled
assertEquals(2, task.getAttempts().size());
assertEquals(1, task.getFinishedAttemptsCount());
assertEquals(0, task.failedAttempts);
assertEquals(1, task.getUncompletedAttemptsCount());
assertEquals(null, task.successfulAttempt);
}
/**
* restoreFromTaskStartedEvent -> restoreFromTaskAttemptStartedEvent ->
* restoreFromTaskAttemptFinishedEvent (SUCCEEDED) -> RecoverTransition
*/
@Test(timeout = 5000)
public void testRecovery_Commit_Failed_recover_fail() {
Map<String, OutputCommitter> committers =
new HashMap<String, OutputCommitter>();
committers.put("out1", new TestOutputCommitter(
mock(OutputCommitterContext.class), true, true));
when(task.getVertex().getOutputCommitters()).thenReturn(committers);
restoreFromTaskStartEvent();
TezTaskAttemptID taId = getNewTaskAttemptID(task.getTaskId());
restoreFromFirstTaskAttemptStartEvent(taId);
// restoreFromTaskAttemptFinishedEvent (SUCCEEDED)
long taStartTime = taskStartTime + 100L;
long taFinishTime = taStartTime + 100L;
TaskState recoveredState =
task.restoreFromEvent(new TaskAttemptFinishedEvent(taId, vertexName,
taStartTime, taFinishTime, TaskAttemptState.SUCCEEDED, null,
"", new TezCounters(), 0, null));
assertEquals(TaskState.SUCCEEDED, recoveredState);
assertEquals(1, task.getAttempts().size());
assertEquals(1, task.getFinishedAttemptsCount());
assertEquals(0, task.failedAttempts);
assertEquals(0, task.getUncompletedAttemptsCount());
assertEquals(taId, task.successfulAttempt);
task.handle(new TaskEventRecoverTask(task.getTaskId()));
assertEquals(TaskStateInternal.RUNNING, task.getInternalState());
// new task attempt is scheduled
assertEquals(2, task.getAttempts().size());
assertEquals(1, task.getFinishedAttemptsCount());
assertEquals(0, task.failedAttempts);
assertEquals(1, task.getUncompletedAttemptsCount());
assertEquals(null, task.successfulAttempt);
}
@Test(timeout = 5000)
public void testRecovery_WithDesired_SUCCEEDED() {
restoreFromTaskStartEvent();
TezTaskAttemptID taId = getNewTaskAttemptID(task.getTaskId());
restoreFromFirstTaskAttemptStartEvent(taId);
task.handle(new TaskEventRecoverTask(task.getTaskId(), TaskState.SUCCEEDED,
false));
assertEquals(TaskStateInternal.SUCCEEDED, task.getInternalState());
// no TA_Recovery event sent
assertEquals(0, taEventHandler.getEvents().size());
}
@Test(timeout = 5000)
public void testRecovery_WithDesired_FAILED() {
restoreFromTaskStartEvent();
TezTaskAttemptID taId = getNewTaskAttemptID(task.getTaskId());
restoreFromFirstTaskAttemptStartEvent(taId);
task.handle(new TaskEventRecoverTask(task.getTaskId(), TaskState.FAILED,
false));
assertEquals(TaskStateInternal.FAILED, task.getInternalState());
// no TA_Recovery event sent
assertEquals(0, taEventHandler.getEvents().size());
}
@Test(timeout = 5000)
public void testRecovery_WithDesired_KILLED() {
restoreFromTaskStartEvent();
TezTaskAttemptID taId = getNewTaskAttemptID(task.getTaskId());
restoreFromFirstTaskAttemptStartEvent(taId);
task.handle(new TaskEventRecoverTask(task.getTaskId(), TaskState.KILLED,
false));
assertEquals(TaskStateInternal.KILLED, task.getInternalState());
// no TA_Recovery event sent
assertEquals(0, taEventHandler.getEvents().size());
}
/**
* restoreFromTaskStartedEvent -> restoreFromTaskAttemptStartedEvent ->
* restoreFromTaskAttemptFinishedEvent (KILLED) -> RecoverTransition
*/
@Test(timeout = 5000)
public void testRecovery_OneTAStarted_Killed() {
restoreFromTaskStartEvent();
long taStartTime = taskStartTime + 100L;
TezTaskAttemptID taId = getNewTaskAttemptID(task.getTaskId());
TaskState recoveredState =
task.restoreFromEvent(new TaskAttemptStartedEvent(taId, vertexName,
taStartTime, mock(ContainerId.class), mock(NodeId.class), "", "", "", 0, null));
assertEquals(TaskState.RUNNING, recoveredState);
assertEquals(TaskAttemptStateInternal.NEW,
((TaskAttemptImpl) task.getAttempt(taId)).getInternalState());
assertEquals(1, task.getAttempts().size());
assertEquals(0, task.getFinishedAttemptsCount());
assertEquals(0, task.failedAttempts);
assertEquals(1, task.getUncompletedAttemptsCount());
assertEquals(null, task.successfulAttempt);
long taFinishTime = taStartTime + 100L;
recoveredState =
task.restoreFromEvent(new TaskAttemptFinishedEvent(taId, vertexName,
taStartTime, taFinishTime, TaskAttemptState.KILLED, null,
"", new TezCounters(), 0, null));
assertEquals(TaskState.RUNNING, recoveredState);
assertEquals(TaskAttemptStateInternal.NEW,
((TaskAttemptImpl) task.getAttempt(taId)).getInternalState());
assertEquals(1, task.getAttempts().size());
assertEquals(1, task.getFinishedAttemptsCount());
assertEquals(0, task.failedAttempts);
assertEquals(0, task.getUncompletedAttemptsCount());
assertEquals(null, task.successfulAttempt);
task.handle(new TaskEventRecoverTask(task.getTaskId()));
// wait for Task send TA_RECOVER to TA and TA complete the RecoverTransition
dispatcher.await();
assertEquals(TaskStateInternal.RUNNING, task.getInternalState());
assertEquals(TaskAttemptStateInternal.KILLED,
((TaskAttemptImpl) task.getAttempt(taId)).getInternalState());
// new task attempt is scheduled
assertEquals(2, task.getAttempts().size());
assertEquals(1, task.getFinishedAttemptsCount());
assertEquals(0, task.failedAttempts);
assertEquals(1, task.getUncompletedAttemptsCount());
assertEquals(null, task.successfulAttempt);
}
/**
* n = maxFailedAttempts, in the previous AM attempt, n task attempts are
* killed. When recovering, it should continue to be in running state and
* schedule a new task attempt.
*/
@Test(timeout = 5000)
public void testTaskRecovery_MultipleAttempts1() {
int maxFailedAttempts =
conf.getInt(TezConfiguration.TEZ_AM_TASK_MAX_FAILED_ATTEMPTS,
TezConfiguration.TEZ_AM_TASK_MAX_FAILED_ATTEMPTS_DEFAULT);
restoreFromTaskStartEvent();
for (int i = 0; i < maxFailedAttempts; ++i) {
TezTaskAttemptID taId = getNewTaskAttemptID(task.getTaskId());
task.restoreFromEvent(new TaskAttemptStartedEvent(taId, vertexName, 0L,
mock(ContainerId.class), mock(NodeId.class), "", "", "", 0, null));
task.restoreFromEvent(new TaskAttemptFinishedEvent(taId, vertexName, 0,
0, TaskAttemptState.KILLED, null, "", null, 0, null));
}
assertEquals(maxFailedAttempts, task.getAttempts().size());
assertEquals(0, task.failedAttempts);
task.handle(new TaskEventRecoverTask(task.getTaskId()));
// if the previous task attempt is killed, it should not been take into
// account when checking whether exceed the max attempts
assertEquals(TaskStateInternal.RUNNING, task.getInternalState());
// schedule a new task attempt
assertEquals(maxFailedAttempts + 1, task.getAttempts().size());
}
/**
* n = maxFailedAttempts, in the previous AM attempt, n task attempts are
* failed. When recovering, it should transit to failed because # of
* failed_attempt is exceeded.
*/
@Test(timeout = 5000)
public void testTaskRecovery_MultipleAttempts2() {
int maxFailedAttempts =
conf.getInt(TezConfiguration.TEZ_AM_TASK_MAX_FAILED_ATTEMPTS,
TezConfiguration.TEZ_AM_TASK_MAX_FAILED_ATTEMPTS_DEFAULT);
restoreFromTaskStartEvent();
for (int i = 0; i < maxFailedAttempts; ++i) {
TezTaskAttemptID taId = getNewTaskAttemptID(task.getTaskId());
task.restoreFromEvent(new TaskAttemptStartedEvent(taId, vertexName, 0L,
mock(ContainerId.class), mock(NodeId.class), "", "", "", 0, null));
task.restoreFromEvent(new TaskAttemptFinishedEvent(taId, vertexName, 0,
0, TaskAttemptState.FAILED, null, "", null, 0, null));
}
assertEquals(maxFailedAttempts, task.getAttempts().size());
assertEquals(maxFailedAttempts, task.failedAttempts);
task.handle(new TaskEventRecoverTask(task.getTaskId()));
// it should transit to failed because of the failed task attempt in the
// last application attempt.
assertEquals(TaskStateInternal.FAILED, task.getInternalState());
assertEquals(maxFailedAttempts, task.getAttempts().size());
}
/**
* n = maxFailedAttempts, in the previous AM attempt, n-1 task attempts are
* killed. And last task attempt is still in running state. When recovering,
* the last attempt should transit to killed and task is still in running
* state and new task attempt is scheduled.
*/
@Test(timeout = 5000)
public void testTaskRecovery_MultipleAttempts3() throws InterruptedException {
int maxFailedAttempts =
conf.getInt(TezConfiguration.TEZ_AM_TASK_MAX_FAILED_ATTEMPTS,
TezConfiguration.TEZ_AM_TASK_MAX_FAILED_ATTEMPTS_DEFAULT);
restoreFromTaskStartEvent();
for (int i = 0; i < maxFailedAttempts - 1; ++i) {
TezTaskAttemptID taId = getNewTaskAttemptID(task.getTaskId());
task.restoreFromEvent(new TaskAttemptStartedEvent(taId, vertexName, 0L,
mock(ContainerId.class), mock(NodeId.class), "", "", "", 0, null));
task.restoreFromEvent(new TaskAttemptFinishedEvent(taId, vertexName, 0,
0, TaskAttemptState.FAILED, null, "", null, 0, null));
}
assertEquals(maxFailedAttempts - 1, task.getAttempts().size());
assertEquals(maxFailedAttempts - 1, task.failedAttempts);
TezTaskAttemptID newTaskAttemptId = getNewTaskAttemptID(task.getTaskId());
TaskState recoveredState =
task.restoreFromEvent(new TaskAttemptStartedEvent(newTaskAttemptId,
vertexName, 0, mock(ContainerId.class), mock(NodeId.class), "", "", "", 0, null));
assertEquals(TaskState.RUNNING, recoveredState);
assertEquals(TaskAttemptStateInternal.NEW,
((TaskAttemptImpl) task.getAttempt(newTaskAttemptId))
.getInternalState());
assertEquals(maxFailedAttempts, task.getAttempts().size());
task.handle(new TaskEventRecoverTask(task.getTaskId()));
// wait until task attempt receive the Recover event from task
dispatcher.await();
assertEquals(TaskStateInternal.RUNNING, task.getInternalState());
assertEquals(TaskAttemptStateInternal.KILLED,
((TaskAttemptImpl) (task.getAttempt(newTaskAttemptId)))
.getInternalState());
assertEquals(maxFailedAttempts - 1, task.failedAttempts);
// new task attempt is added
assertEquals(maxFailedAttempts + 1, task.getAttempts().size());
}
private TezTaskAttemptID getNewTaskAttemptID(TezTaskID taskId) {
return TezTaskAttemptID.getInstance(taskId, taskAttemptCounter++);
}
}
| |
package com.hadassah.azrieli.lev_isha.core;
import android.annotation.SuppressLint;
import android.app.Activity;
import android.app.DatePickerDialog;
import android.app.TimePickerDialog;
import android.content.Context;
import android.content.Intent;
import android.content.SharedPreferences;
import android.graphics.Color;
import android.graphics.drawable.ColorDrawable;
import android.os.Bundle;
import android.preference.PreferenceManager;
import android.provider.CalendarContract;
import android.support.annotation.Nullable;
import android.support.v4.app.Fragment;
import android.text.Editable;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.view.Window;
import android.widget.AbsListView;
import android.widget.Button;
import android.widget.CheckBox;
import android.widget.CompoundButton;
import android.widget.DatePicker;
import android.widget.EditText;
import android.widget.ListAdapter;
import android.widget.TimePicker;
import com.hadassah.azrieli.lev_isha.R;
import com.hadassah.azrieli.lev_isha.utility.OverallNotificationManager;
import com.hadassah.azrieli.lev_isha.utility.PersonalProfile;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.util.Calendar;
public class BeforeDoctorFragment extends Fragment {
private SharedPreferences prefs;
private boolean alreadyUpdated = false;
private EditText doctorName;
private EditText address;
private EditText date;
private EditText time;
private Button saveAppointment;
private CheckBox references;
private CheckBox prescription;
private CheckBox previousDiagnoses;
private CheckBox testsResults;
private EditText subjectsIWantToTalkAbout;
private EditText newSymptoms;
private EditText changesInLife;
private EditText personalMedicalHistory;
private EditText familyHealthBackground;
private EditText drugsITake;
private EditText additionalQuestionsToTheDoctor;
public static BeforeDoctorFragment newInstance() {
return new BeforeDoctorFragment();
}
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
}
public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) {
return inflater.inflate(R.layout.fragment_before_doctor, container, false);
}
public void onActivityCreated(@Nullable Bundle savedInstanceState) {
super.onActivityCreated(savedInstanceState);
prefs = PreferenceManager.getDefaultSharedPreferences(getActivity());
bindViews(getView());
setTags();
bindListeners();
}
private class CheckBoxChanged implements CompoundButton.OnCheckedChangeListener {
public void onCheckedChanged(CompoundButton compoundButton, boolean b) {
prefs.edit().putBoolean((String)compoundButton.getTag(),b).apply();
}
}
private class TextChangedListener extends AbsListView {
private EditText textField;
public void afterTextChanged(Editable s) {
super.afterTextChanged(s);
prefs.edit().putString((String)textField.getTag(),s.toString()).apply();
}
public TextChangedListener(Context context, EditText field) {
super(context);
textField = field;
}
public ListAdapter getAdapter() {return null;}
public void setSelection(int i) {}
}
private class ChooserListener implements View.OnFocusChangeListener, View.OnClickListener {
public void onFocusChange(View view, boolean b) {
if(!b)
return;
if(view.equals(date))
openDateChooser();
if(view.equals(time))
openTimeChooser();
}
public void onClick(View view) {
if(view.equals(date))
openDateChooser();
if(view.equals(time))
openTimeChooser();
}
}
public void onAttach(Context context) {
super.onAttach(context);
}
public void onDetach() {
super.onDetach();
}
public void onPause() {
super.onPause();
alreadyUpdated = false;
}
private void bindViews(View parent) {
doctorName = parent.findViewById(R.id.edit_text_doctor_name);
address = parent.findViewById(R.id.edit_text_address);
date = parent.findViewById(R.id.appointment_date);
time = parent.findViewById(R.id.appointment_time);
saveAppointment = parent.findViewById(R.id.add_appointment_to_calendar);
saveAppointment.setEnabled(false);
references = parent.findViewById(R.id.check_box_references);
prescription = parent.findViewById(R.id.check_box_prescription);
previousDiagnoses = parent.findViewById(R.id.check_box_Previous_diagnoses);
testsResults = parent.findViewById(R.id.check_box_tests_results);
subjectsIWantToTalkAbout = parent.findViewById(R.id.text_box_subjects_i_want_to_talk_about);
newSymptoms = parent.findViewById(R.id.text_box_new_symptoms);
changesInLife = parent.findViewById(R.id.text_box_changes_in_life);
personalMedicalHistory = parent.findViewById(R.id.text_box_personal_medical_history);
familyHealthBackground = parent.findViewById(R.id.text_box_family_health_background);
drugsITake = parent.findViewById(R.id.text_box_drugs_i_take);
additionalQuestionsToTheDoctor = parent.findViewById(R.id.text_box_additional_questions_to_the_doctor);
}
private void setTags() {
doctorName.setTag("edit_text_doctor_name");
address.setTag("edit_text_address");
date.setTag("appointment_date");
time.setTag("appointment_time");
saveAppointment.setTag("add_appointment_to_calendar");
references.setTag("check_box_references");
prescription.setTag("check_box_prescription");
previousDiagnoses.setTag("check_box_Previous_diagnoses");
testsResults.setTag("check_box_tests_results");
subjectsIWantToTalkAbout.setTag("text_box_subjects_i_want_to_talk_about");
newSymptoms.setTag("text_box_new_symptoms");
changesInLife.setTag("text_box_changes_in_life");
personalMedicalHistory.setTag("text_box_personal_medical_history");
familyHealthBackground.setTag("text_box_family_health_background");
drugsITake.setTag("text_box_drugs_i_take");
additionalQuestionsToTheDoctor.setTag("text_box_additional_questions_to_the_doctor");
}
private void bindListeners() {
Activity parentActivity = getActivity();
CheckBoxChanged cbcLis = new CheckBoxChanged();
ChooserListener chooserLis = new ChooserListener();
doctorName.addTextChangedListener(new TextChangedListener(parentActivity,doctorName));
address.addTextChangedListener(new TextChangedListener(parentActivity,address));
date.setOnFocusChangeListener(chooserLis);
date.setOnClickListener(chooserLis);
time.setOnFocusChangeListener(chooserLis);
time.setOnClickListener(chooserLis);
references.setOnCheckedChangeListener(cbcLis);
prescription.setOnCheckedChangeListener(cbcLis);
previousDiagnoses.setOnCheckedChangeListener(cbcLis);
testsResults.setOnCheckedChangeListener(cbcLis);
subjectsIWantToTalkAbout.addTextChangedListener(new TextChangedListener(parentActivity,subjectsIWantToTalkAbout));
newSymptoms.addTextChangedListener(new TextChangedListener(parentActivity,newSymptoms));
changesInLife.addTextChangedListener(new TextChangedListener(parentActivity,changesInLife));
personalMedicalHistory.addTextChangedListener(new TextChangedListener(parentActivity,personalMedicalHistory));
familyHealthBackground.addTextChangedListener(new TextChangedListener(parentActivity,familyHealthBackground));
drugsITake.addTextChangedListener(new TextChangedListener(parentActivity,drugsITake));
additionalQuestionsToTheDoctor.addTextChangedListener(new TextChangedListener(parentActivity,additionalQuestionsToTheDoctor));
saveAppointment.setOnClickListener(new View.OnClickListener()
{public void onClick(View view) {addEventToCalendar();}});
}
public void addEventToCalendar() {
Calendar dueDate = getCalendarObjectFromFields();
if(dueDate == null)
return;
Intent calendarIntent = new Intent(Intent.ACTION_INSERT);
calendarIntent.setData(CalendarContract.Events.CONTENT_URI);
calendarIntent.setType("vnd.android.cursor.item/event");
if(doctorName.getText() != null && doctorName.getText().length() > 0)
calendarIntent.putExtra(CalendarContract.Events.TITLE,getResources().getString(R.string.calendar_reminder_header_doc)+doctorName.getText());
else
calendarIntent.putExtra(CalendarContract.Events.TITLE,getResources().getString(R.string.calendar_reminder_header));
calendarIntent.putExtra(CalendarContract.EXTRA_EVENT_ALL_DAY, false);
calendarIntent.putExtra(CalendarContract.EXTRA_EVENT_BEGIN_TIME,dueDate.getTimeInMillis());
dueDate.add(Calendar.HOUR,1);
calendarIntent.putExtra(CalendarContract.EXTRA_EVENT_END_TIME, dueDate.getTimeInMillis());
startActivity(calendarIntent);
}
public void openDateChooser() {
int year;
int month;
int day;
Calendar currentTime = Calendar.getInstance();
final DateFormat df = DateFormat.getDateInstance(DateFormat.DEFAULT, PersonalProfile.getCurrentLocale());
Calendar inMemory = Calendar.getInstance();
try {inMemory.setTime(df.parse(date.getText().toString()));}catch(Exception ignore){inMemory = null;}
year = (inMemory == null) ? currentTime.get(Calendar.YEAR) : inMemory.get(Calendar.YEAR);
month = (inMemory == null) ? currentTime.get(Calendar.MONTH) : inMemory.get(Calendar.MONTH);
day = (inMemory == null) ? currentTime.get(Calendar.DAY_OF_MONTH) : inMemory.get(Calendar.DAY_OF_MONTH);
DatePickerDialog dateDialog = new DatePickerDialog(getActivity(),
android.R.style.Theme_Holo_Light_Dialog,
new DatePickerDialog.OnDateSetListener() {
public void onDateSet(DatePicker datePicker, int i, int i1, int i2) {
Calendar calendar = Calendar.getInstance();
calendar.set(i,i1,i2);
String display = df.format(calendar.getTime());
date.setText(display);
prefs.edit().putString((String)date.getTag(),display).apply();
shouldEnableAppointmentButton();
}
}, year, month, day);
dateDialog.setButton(DatePickerDialog.BUTTON_POSITIVE,getString(R.string.accept),dateDialog);
dateDialog.setButton(DatePickerDialog.BUTTON_NEGATIVE,getString(R.string.cancel),dateDialog);
dateDialog.show();
Window window = dateDialog.getWindow();
if(window != null)
window.setBackgroundDrawable(new ColorDrawable(Color.TRANSPARENT));
}
@SuppressLint("SimpleDateFormat")
public void openTimeChooser() {
int hour;
int min;
final Calendar currentTime = Calendar.getInstance();
//final DateFormat df = DateFormat.getTimeInstance(DateFormat.SHORT);
final DateFormat df = new SimpleDateFormat("HH:mm");
Calendar inMemory = Calendar.getInstance();
try {inMemory.setTime(df.parse(time.getText().toString()));} catch(Exception ignore){inMemory = null;}
hour = (inMemory == null) ? currentTime.get(Calendar.HOUR_OF_DAY) : inMemory.get(Calendar.HOUR_OF_DAY);
min = (inMemory == null) ? currentTime.get(Calendar.MINUTE) : inMemory.get(Calendar.MINUTE);
TimePickerDialog timePicker = new TimePickerDialog(getActivity(),
android.R.style.Theme_Holo_Light_Dialog,
new TimePickerDialog.OnTimeSetListener() {
public void onTimeSet(TimePicker timePicker, int i, int i1) {
Calendar calendar = Calendar.getInstance();
int year = currentTime.get(Calendar.YEAR);
int month = currentTime.get(Calendar.MONTH);
int day = currentTime.get(Calendar.DAY_OF_MONTH);
calendar.set(year,month,day,i,i1);
String display = df.format(calendar.getTime());
time.setText(display);
prefs.edit().putString((String)time.getTag(),display).apply();
shouldEnableAppointmentButton();
}
},hour,min,true);
timePicker.setButton(DatePickerDialog.BUTTON_POSITIVE,getString(R.string.accept),timePicker);
timePicker.setButton(DatePickerDialog.BUTTON_NEGATIVE,getString(R.string.cancel),timePicker);
timePicker.show();
Window window = timePicker.getWindow();
if(window != null)
window.setBackgroundDrawable(new ColorDrawable(Color.TRANSPARENT));
}
public void reloadSavedFields() {
if(alreadyUpdated)
return;
alreadyUpdated = true;
doctorName.setText(prefs.getString((String)doctorName.getTag(),""));
address.setText(prefs.getString((String)address.getTag(),""));
date.setText(prefs.getString((String)date.getTag(),""));
time.setText(prefs.getString((String)time.getTag(),""));
references.setChecked(prefs.getBoolean((String)references.getTag(),false));
prescription.setChecked(prefs.getBoolean((String)prescription.getTag(),false));
previousDiagnoses.setChecked(prefs.getBoolean((String)previousDiagnoses.getTag(),false));
testsResults.setChecked(prefs.getBoolean((String)testsResults.getTag(),false));
subjectsIWantToTalkAbout.setText(prefs.getString((String)subjectsIWantToTalkAbout.getTag(),""));
newSymptoms.setText(prefs.getString((String)newSymptoms.getTag(),""));
changesInLife.setText(prefs.getString((String)changesInLife.getTag(),""));
personalMedicalHistory.setText(prefs.getString((String)personalMedicalHistory.getTag(),""));
familyHealthBackground.setText(prefs.getString((String)familyHealthBackground.getTag(),""));
drugsITake.setText(prefs.getString((String)drugsITake.getTag(),""));
additionalQuestionsToTheDoctor.setText(prefs.getString((String)additionalQuestionsToTheDoctor.getTag(),""));
shouldEnableAppointmentButton();
}
@SuppressLint("ApplySharedPref")
public void shouldEnableAppointmentButton() {
SharedPreferences prefs = PreferenceManager.getDefaultSharedPreferences(getActivity());
if(date.getText().length() != 0 && time.getText().length() != 0)
{
Calendar dueDate = getCalendarObjectFromFields();
if(dueDate == null)
return;
saveAppointment.setEnabled(true);
long inMemory = prefs.getLong("next_doctor_appointment_date_in_ms",-1);
if(inMemory == getCalendarObjectFromFields().getTimeInMillis())
return;
if(inMemory != -1)
OverallNotificationManager.cancelDoctorAppointment(getActivity());
prefs.edit().putLong("next_doctor_appointment_date_in_ms",dueDate.getTimeInMillis()).commit();
OverallNotificationManager.setUpNotificationTimers(getActivity(),OverallNotificationManager.NOTIFICATION_10_MIN_BEFORE_DOCTOR_ID);
OverallNotificationManager.setUpNotificationTimers(getActivity(),OverallNotificationManager.NOTIFICATION_DAY_BEFORE_DOCTOR_ID);
}
else
{
saveAppointment.setEnabled(false);
if(prefs.getLong("next_doctor_appointment_date_in_ms",-1) != -1)
{
OverallNotificationManager.cancelDoctorAppointment(getActivity());
prefs.edit().putLong("next_doctor_appointment_date_in_ms",-1).commit();
}
}
}
public Calendar getCalendarObjectFromFields() {
DateFormat dfDate = DateFormat.getDateInstance(DateFormat.DEFAULT, PersonalProfile.getCurrentLocale());
@SuppressLint("SimpleDateFormat")
DateFormat dfTime = new SimpleDateFormat("HH:mm");
//DateFormat dfTime = DateFormat.getTimeInstance(DateFormat.SHORT);
Calendar timeCal = Calendar.getInstance();
Calendar dateCal = Calendar.getInstance();
Calendar dueDate = Calendar.getInstance();
try{timeCal.setTime(dfTime.parse(time.getText().toString()));} catch(Exception ignore){return null;}
try{dateCal.setTime(dfDate.parse(date.getText().toString()));} catch(Exception ignore){return null;}
int year = dateCal.get(Calendar.YEAR);
int month = dateCal.get(Calendar.MONTH);
int day = dateCal.get(Calendar.DAY_OF_MONTH);
int hour = timeCal.get(Calendar.HOUR_OF_DAY);
int min = timeCal.get(Calendar.MINUTE);
dueDate.set(year, month, day, hour, min);
dueDate.set(Calendar.SECOND, dueDate.getActualMinimum(Calendar.SECOND));
dueDate.set(Calendar.MILLISECOND, dueDate.getActualMinimum(Calendar.MILLISECOND));
return dueDate;
}
public void clearForm() {
prefs.edit().putString((String)doctorName.getTag(),"").apply();
prefs.edit().putString((String)address.getTag(),"").apply();
prefs.edit().putString((String)date.getTag(),"").apply();
prefs.edit().putString((String)time.getTag(),"").apply();
prefs.edit().putBoolean((String)references.getTag(),false).apply();
prefs.edit().putBoolean((String)prescription.getTag(),false).apply();
prefs.edit().putBoolean((String)previousDiagnoses.getTag(),false).apply();
prefs.edit().putBoolean((String)testsResults.getTag(),false).apply();
prefs.edit().putString((String)subjectsIWantToTalkAbout.getTag(),"").apply();
prefs.edit().putString((String)newSymptoms.getTag(),"").apply();
prefs.edit().putString((String)changesInLife.getTag(),"").apply();
prefs.edit().putString((String)personalMedicalHistory.getTag(),"").apply();
prefs.edit().putString((String)familyHealthBackground.getTag(),"").apply();
prefs.edit().putString((String)drugsITake.getTag(),"").apply();
prefs.edit().putString((String)additionalQuestionsToTheDoctor.getTag(),"").apply();
doctorName.setText("");
address.setText("");
date.setText("");
time.setText("");
references.setSelected(false);
prescription.setSelected(false);
previousDiagnoses.setSelected(false);
testsResults.setSelected(false);
subjectsIWantToTalkAbout.setText("");
newSymptoms.setText("");
changesInLife.setText("");
personalMedicalHistory.setText("");
familyHealthBackground.setText("");
drugsITake.setText("");
additionalQuestionsToTheDoctor.setText("");
alreadyUpdated = false;
reloadSavedFields();
}
}
| |
/*Copyright (C) 2017 Roland Hauser, <sourcepond@gmail.com>
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.*/
package ch.sourcepond.io.fileobserver.impl;
import ch.sourcepond.commons.smartswitch.api.SmartSwitchBuilder;
import ch.sourcepond.commons.smartswitch.api.SmartSwitchBuilderFactory;
import ch.sourcepond.io.checksum.api.ResourcesFactory;
import ch.sourcepond.io.fileobserver.api.KeyDeliveryHook;
import ch.sourcepond.io.fileobserver.api.PathChangeListener;
import ch.sourcepond.io.fileobserver.impl.fs.DedicatedFileSystem;
import ch.sourcepond.io.fileobserver.impl.fs.DedicatedFileSystemFactory;
import ch.sourcepond.io.fileobserver.impl.listener.EventDispatcher;
import ch.sourcepond.io.fileobserver.impl.listener.ListenerManager;
import ch.sourcepond.io.fileobserver.spi.WatchedDirectory;
import org.junit.Before;
import org.junit.Test;
import org.mockito.stubbing.Answer;
import java.io.IOException;
import java.nio.file.FileSystem;
import java.nio.file.Path;
import java.nio.file.attribute.BasicFileAttributes;
import java.nio.file.spi.FileSystemProvider;
import java.util.concurrent.ExecutorService;
import java.util.function.Supplier;
import static org.junit.Assert.assertNotNull;
import static org.mockito.ArgumentMatchers.same;
import static org.mockito.Mockito.any;
import static org.mockito.Mockito.doThrow;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.never;
import static org.mockito.Mockito.notNull;
import static org.mockito.Mockito.reset;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.verifyZeroInteractions;
import static org.mockito.Mockito.when;
/**
*
*/
public class VirtualRootTest {
private static final Object ROOT_KEY = new Object();
private static final Object OTHER_KEY = new Object();
private final Config config = mock(Config.class);
private final WatchedDirectory watchedDir = mock(WatchedDirectory.class);
private final FileSystem fs = mock(FileSystem.class);
private final FileSystemProvider provider = mock(FileSystemProvider.class);
private final BasicFileAttributes attrs = mock(BasicFileAttributes.class);
private final Path directory = mock(Path.class);
private final BasicFileAttributes modifiedPathAttrs = mock(BasicFileAttributes.class);
private final Path modifiedPath = mock(Path.class);
private final PathChangeListener listener = mock(PathChangeListener.class);
private final KeyDeliveryHook hook = mock(KeyDeliveryHook.class);
private final ResourcesFactory resourcesFactory = mock(ResourcesFactory.class);
private final DedicatedFileSystem dedicatedFs = mock(DedicatedFileSystem.class);
private final DedicatedFileSystemFactory dedicatedFsFactory = mock(DedicatedFileSystemFactory.class);
private final SmartSwitchBuilderFactory ssbFactory = mock(SmartSwitchBuilderFactory.class);
private final SmartSwitchBuilder<ExecutorService> executorBuilder = mock(SmartSwitchBuilder.class);
private final ListenerManager manager = mock(ListenerManager.class);
private final EventDispatcher dispatcher = mock(EventDispatcher.class);
private ExecutorService dispatcherExecutor;
private ExecutorService listenerExecutor;
private ExecutorService directoryWalkerExecutor;
private VirtualRoot virtualRoot = new VirtualRoot(dedicatedFsFactory, manager);
@Before
public void setup() throws IOException {
when(manager.addListener(listener)).thenReturn(dispatcher);
when(modifiedPath.getFileSystem()).thenReturn(fs);
when(provider.readAttributes(modifiedPath, BasicFileAttributes.class)).thenReturn(modifiedPathAttrs);
when(directory.getFileSystem()).thenReturn(fs);
when(fs.provider()).thenReturn(provider);
when(provider.readAttributes(directory, BasicFileAttributes.class)).thenReturn(attrs);
when(attrs.isDirectory()).thenReturn(true);
when(watchedDir.getKey()).thenReturn(ROOT_KEY);
when(watchedDir.getDirectory()).thenReturn(directory);
when(dedicatedFsFactory.openFileSystem(virtualRoot, fs)).thenReturn(dedicatedFs);
virtualRoot.addRoot(watchedDir);
virtualRoot.addListener(listener);
virtualRoot.activate(config);
}
@Test
public void setResourcesFactory() {
virtualRoot.setResourcesFactory(resourcesFactory);
verify(dedicatedFsFactory).setResourcesFactory(resourcesFactory);
}
@Test
public void setConfig() throws Exception {
verify(dedicatedFsFactory).setConfig(config);
verify(manager).setConfig(config);
}
private void setupDefaultExecutor(final String pFilter, final SmartSwitchBuilder<ExecutorService> pBuilder, final Answer<ExecutorService> pAnswer) {
when(ssbFactory.newBuilder(ExecutorService.class)).thenReturn(executorBuilder);
when(executorBuilder.setFilter(pFilter)).thenReturn(pBuilder);
when(pBuilder.setShutdownHook(notNull())).thenReturn(pBuilder);
when(pBuilder.build(notNull())).thenAnswer(pAnswer);
}
@Test
public void initExecutors() {
final SmartSwitchBuilder<ExecutorService> dispatcherExecutorBuilder = mock(SmartSwitchBuilder.class);
setupDefaultExecutor("(sourcepond.io.fileobserver.dispatcherexecutor=*)", dispatcherExecutorBuilder, inv -> {
final Supplier<ExecutorService> s = inv.getArgument(0);
dispatcherExecutor = s.get();
assertNotNull(dispatcherExecutor);
return dispatcherExecutor;
});
final SmartSwitchBuilder<ExecutorService> listenerExecutorBuilder = mock(SmartSwitchBuilder.class);
setupDefaultExecutor("(sourcepond.io.fileobserver.listenerexecutor=*)", listenerExecutorBuilder, inv -> {
final Supplier<ExecutorService> s = inv.getArgument(0);
listenerExecutor = s.get();
assertNotNull(listenerExecutor);
return listenerExecutor;
});
final SmartSwitchBuilder<ExecutorService> directoryWalkerExecutorBuilder = mock(SmartSwitchBuilder.class);
setupDefaultExecutor("(sourcepond.io.fileobserver.directorywalkerexecutor=*)", directoryWalkerExecutorBuilder, inv -> {
final Supplier<ExecutorService> s = inv.getArgument(0);
directoryWalkerExecutor = s.get();
assertNotNull(directoryWalkerExecutor);
return directoryWalkerExecutor;
});
virtualRoot.initExecutors(ssbFactory);
verify(manager).setExecutors(dispatcherExecutor, listenerExecutor);
verify(dedicatedFsFactory).setExecutors(directoryWalkerExecutor, dispatcherExecutor);
}
@Test
public void verifyActivatorConstructor() {
new VirtualRoot();
}
@Test(expected = NullPointerException.class)
public void addRootWatchedDirectoryIsNull() throws IOException {
virtualRoot.addRoot(null);
}
@Test(expected = NullPointerException.class)
public void addRootKeyIsNull() throws IOException {
when(watchedDir.getKey()).thenReturn(null);
virtualRoot.addRoot(watchedDir);
}
@Test(expected = NullPointerException.class)
public void addRootDirectoryIsNull() throws IOException {
when(watchedDir.getDirectory()).thenReturn(null);
virtualRoot.addRoot(watchedDir);
}
@Test
public void addRoot() throws IOException {
verify(dedicatedFs).registerRootDirectory(same(watchedDir));
verify(watchedDir).addObserver(virtualRoot);
}
@Test
public void addRootDirectoriesCouldNotBeCreated() throws IOException {
virtualRoot = new VirtualRoot(dedicatedFsFactory, manager);
doThrow(IOException.class).when(dedicatedFsFactory).openFileSystem(virtualRoot, fs);
// This should not cause an exception
virtualRoot.addRoot(watchedDir);
}
@Test(expected = NullPointerException.class)
public void addListenerIsNull() {
virtualRoot.addListener(null);
}
@Test
public void addHook() {
virtualRoot.addHook(hook);
verify(manager).addHook(hook);
}
@Test
public void removeHook() {
virtualRoot.removeHook(hook);
verify(manager).removeHook(hook);
}
@Test
public void addListener() {
virtualRoot.removeListener(listener);
verify(dedicatedFs).forceInform(dispatcher);
}
@Test
public void removeListener() throws IOException {
reset(dedicatedFs);
when(watchedDir.getKey()).thenReturn(OTHER_KEY);
virtualRoot.removeListener(listener);
virtualRoot.addRoot(watchedDir);
verify(dedicatedFs).registerRootDirectory(same(watchedDir));
}
@Test(expected = IllegalArgumentException.class)
public void addRootPathIsNotADirectory() throws IOException {
when(watchedDir.getKey()).thenReturn(OTHER_KEY);
when(attrs.isDirectory()).thenReturn(false);
virtualRoot.addRoot(watchedDir);
}
@Test(expected = IllegalArgumentException.class)
public void addWatchedDirectoryWithSameKeyTwiceIsNotAllowed() throws IOException {
virtualRoot.addRoot(watchedDir);
}
@Test(expected = NullPointerException.class)
public void removeRootWatchedDirectoryIsNull() throws IOException {
virtualRoot.removeRoot(null);
}
@Test(expected = NullPointerException.class)
public void removeRootKeyIsNull() throws IOException {
when(watchedDir.getKey()).thenReturn(null);
virtualRoot.removeRoot(watchedDir);
}
@Test(expected = NullPointerException.class)
public void removeRootDirectoryIsNull() throws IOException {
when(watchedDir.getDirectory()).thenReturn(null);
virtualRoot.removeRoot(watchedDir);
}
@Test
public void removeRootNoSuchDirectoryRegistered() throws IOException {
virtualRoot = new VirtualRoot(dedicatedFsFactory, manager);
// This should not cause an exception
virtualRoot.removeRoot(watchedDir);
}
@Test
public void removeRoot() throws IOException {
virtualRoot.removeRoot(watchedDir);
verify(dedicatedFs).unregisterRootDirectory(directory, watchedDir);
verify(watchedDir).removeObserver(virtualRoot);
// This should not cause an exception
virtualRoot.addRoot(watchedDir);
}
@Test
public void deactivate() {
virtualRoot.deactivate();
verify(dedicatedFs).close();
verify(dedicatedFsFactory).shutdown();
final PathChangeListener otherListener = mock(PathChangeListener.class);
virtualRoot.addListener(otherListener);
verifyZeroInteractions(otherListener);
}
@Test
public void removeFileSystem() {
virtualRoot.removeFileSystem(dedicatedFs);
final PathChangeListener otherListener = mock(PathChangeListener.class);
verify(manager).removeFileSystem(fs);
virtualRoot.addListener(otherListener);
verifyZeroInteractions(otherListener);
}
@Test(expected = NullPointerException.class)
public void destintationChangeWatchedDirectoryIsNull() throws IOException {
virtualRoot.destinationChanged(null, directory);
}
@Test(expected = NullPointerException.class)
public void destintationChangePreviousDirectoryIsNull() throws IOException {
virtualRoot.destinationChanged(watchedDir, null);
}
@Test(expected = NullPointerException.class)
public void destintationChangeKeyIsNull() throws IOException {
when(watchedDir.getKey()).thenReturn(null);
virtualRoot.destinationChanged(watchedDir, directory);
}
@Test(expected = NullPointerException.class)
public void destintationChangeCurrentDirIsNull() throws IOException {
when(watchedDir.getDirectory()).thenReturn(null);
virtualRoot.destinationChanged(watchedDir, directory);
}
@Test
public void destintationChangeDirectoryNotMapped() throws IOException {
virtualRoot.removeRoot(watchedDir);
// This should not cause an exception
virtualRoot.destinationChanged(watchedDir, directory);
verify(dedicatedFs, never()).destinationChanged(any(), any());
}
@Test
public void destintationChangePreviousAndCurrentDirAreEqual() throws IOException {
virtualRoot.destinationChanged(watchedDir, directory);
verify(dedicatedFs, never()).destinationChanged(any(), any());
}
@Test
public void destintationChange() throws IOException {
final Path newDirectory = mock(Path.class);
virtualRoot.destinationChanged(watchedDir, newDirectory);
verify(dedicatedFs).destinationChanged(watchedDir, newDirectory);
}
}
| |
package curves;
import java.awt.Color;
import java.awt.Graphics2D;
import java.awt.geom.Ellipse2D;
import java.awt.geom.Line2D;
import java.awt.geom.Point2D;
import java.util.ArrayList;
import java.util.Random;
// This class generates a path to populate
public class Generator {
// Base spline
private Spline s;
public static final int NUM_POINTS = 3;
public static final float SPACING_FACTOR_MIN = 0.33f;
public static final float SPACING_FACTOR_MAX = 0.55f;
public static final float DERIVATIVE_MIN = 150.0f;
public static final float DERIVATIVE_MAX = 200.0f;
public static final float ANGLE_DELTA_POINT = 60; // Change in angle per point in degrees
public static final float ANGLE_DELTA_DERIVATIVE = 60; // Change in derivative angle per point in degrees
// Partitions
private Point[] partitions;
public static final int PARTITIONS = 6;
// Splits
private ArrayList<Spline> splits;
public static final float SPLIT_CHANCE = 0.33f;
public static final float SPLIT_SCALE = 0.5f;
public static final int NUM_POINTS_SPLIT = 3;
public static float dist(float x, float y, Point p) {
float dx = x - p.x;
float dy = y - p.y;
return (float) Math.sqrt(dx * dx + dy * dy);
}
public static final Random random = new Random();
public Point generatePoint(int width, int height, Spline spline, float scale) {
if (spline == null) { // If no points already exist just find a random one
float x = random.nextFloat() * width();
float y = random.nextFloat() * height();
float theta = (float) (random.nextFloat() * Math.PI * 2);
float r = random.nextFloat() * (DERIVATIVE_MAX - DERIVATIVE_MIN) + DERIVATIVE_MIN;
float dx = (float) (Math.cos(theta) * r);
float dy = (float) (Math.sin(theta) * r);
return new Point(x, y, dx, dy);
}
int max = Math.max(width, height);
boolean done = false;
float r = 0;
while (!done) {
r = ((float) Math.sqrt(random.nextFloat()));
if (r > SPACING_FACTOR_MIN && r < SPACING_FACTOR_MAX) {
done = true;
}
r *= max;
}
// Scale r
r *= scale;
// Generate the point at an angle within the allowed delta
Point last = spline.points.get(spline.points.size() - 1);
float theta = (random.nextFloat() * 2 - 1) * ANGLE_DELTA_POINT;
float thetaRad = (float) ((theta * Math.PI / 180) + Math.atan2(last.dy, last.dx));
float x = (float) (Math.cos(thetaRad) * r + last.x);
float y = (float) (Math.sin(thetaRad) * r + last.y);
// Generate a derivative at the point
theta = (random.nextFloat() * 2 - 1) * ANGLE_DELTA_DERIVATIVE;
thetaRad = (float) ((theta * Math.PI / 180) + Math.atan2(last.dy, last.dx));
r = random.nextFloat() * (DERIVATIVE_MAX - DERIVATIVE_MIN) + DERIVATIVE_MIN;
float dx = (float) (Math.cos(thetaRad) * r) * scale;
float dy = (float) (Math.sin(thetaRad) * r) * scale;
return new Point(x, y, dx, dy);
}
// Builds a spline using width and height constraints
public Spline buildSpline(int width, int height, float scale, int numPoints) {
return buildSpline(width, height, generatePoint(width, height, null, scale), scale, numPoints);
}
// Build a spline starting at a specific points with width and height constraints
public Spline buildSpline(int width, int height, Point start, float scale, int numPoints) {
Spline spline = new Spline();
spline.addPoint(start);
// Create three points within the rectangle that are reasonably spaced
for (int i = 1; i < numPoints; i++) {
spline.addPoint(generatePoint(width, height, spline, scale));
}
return spline;
}
private int xmod, ymod;
public Generator() {
// Create basic spline
s = buildSpline(width(), height(), 1.0f, NUM_POINTS);
// Make partitions
/*partitions = new Point[PARTITIONS + 1]; // Make partitions only where points don't already exist
float dt = ((float) NUM_POINTS - 1) / PARTITIONS;
float t = 0.0f;
for (int i = 0; i < partitions.length; i++) {
int curveNum = (int) t;
Curve c;
if (curveNum >= NUM_POINTS - 1) {
c = s.connects.get(s.connects.size() - 1);
curveNum--;
} else {
c = s.connects.get(curveNum);
}
float[] pos = c.pointAt(t - curveNum);
float[] deriv = c.derivativeAt(t - curveNum);
partitions[i] = new Point(pos[0], pos[1], deriv[0], deriv[1]);
t += dt;
}*/
// Determine where to make splits
float t = 0.0f;
splits = new ArrayList<Spline>((int) (PARTITIONS * SPLIT_CHANCE));
for (int i = 0; i < PARTITIONS; i++) { // Check each partition
// If we're making a split here
if (random.nextFloat() <= SPLIT_CHANCE) {
t = ((float) i) / PARTITIONS * (NUM_POINTS - 1); // I'm reusing a variable - sorry
int curveNum = (int) t;
Curve c;
if (curveNum >= NUM_POINTS - 1) {
c = s.connects.get(s.connects.size() - 1);
} else {
c = s.connects.get(curveNum);
}
System.out.println(t - curveNum + (0.5f / PARTITIONS * NUM_POINTS));
float[] pos = c.pointAt(t - curveNum + (0.5f / PARTITIONS * NUM_POINTS));
float[] deriv = c.derivativeAt(t - curveNum + (0.5f / PARTITIONS * NUM_POINTS));
if (random.nextBoolean()) {
splits.add(buildSpline(width(), height(), new Point(pos[0], pos[1], 0 - deriv[1], deriv[0]), SPLIT_SCALE, NUM_POINTS_SPLIT));
} else {
splits.add(buildSpline(width(), height(), new Point(pos[0], pos[1], deriv[1], 0 - deriv[0]), SPLIT_SCALE, NUM_POINTS_SPLIT));
}
}
}
// Determine minimum and maximum x and y coordinates to center the image
float minx = (width()*3/2) * 2, miny = (height()*3/2) * 2, maxx = 0 - (width()*3/2), maxy = 0 - (height()*3/2);
for (Point p : s.points) {
if (p.x < minx) {
minx = p.x;
}
if (p.x > maxx) {
maxx = p.x;
}
if (p.y < miny) {
miny = p.y;
}
if (p.y > maxy) {
maxy = p.y;
}
}
for (Spline spline : splits) {
for (Point p : spline.points) {
if (p.x < minx) {
minx = p.x;
}
if (p.x > maxx) {
maxx = p.x;
}
if (p.y < miny) {
miny = p.y;
}
if (p.y > maxy) {
maxy = p.y;
}
}
}
xmod = (int) (((width()*3/2) - maxx - minx) / 2);
ymod = (int) (((height()*3/2) - maxy - miny) / 2);
// Populate the area surrounding the spline with points
}
public ArrayList<Circle> populate(int numCircles) {
// Populate the base curve
ArrayList<Point2D.Float> points = new ArrayList<Point2D.Float>(numCircles);
return null;
}
public int width() {
return 600;
}
public int height() {
return 600;
}
public static final float DS = 2; // 2px change in length per line segment drawn
public static final float CIRCLE_SIZE = 5; // 5px radius circles
public static final float ARROW_SCALE = 0.1f; // arrows are 5 times smaller than derivative
public void drawCurves(Spline spline, Graphics2D g2D, int xmod, int ymod, Color cColor, Color pColor) {
for (Curve c : spline.connects) {
boolean done = false;
float t = 0.0f;
float[] lastPos = c.pointAt(t);
while (!done) {
if (t >= 1.0f) {
done = true;
continue;
}
t += DS / (c.speedAt(t));
float[] pos = c.pointAt(t);
Point2D.Float lastPosP, posP;
lastPosP = new Point2D.Float(lastPos[0]+xmod, lastPos[1]+ymod);
posP = new Point2D.Float(pos[0]+xmod, pos[1]+ymod);
g2D.setColor(cColor);
g2D.draw(new Line2D.Float(lastPosP, posP));
lastPos = pos;
}
// Draw points and derivatives
Point p1 = c.p1;
g2D.setColor(pColor);
g2D.fill(new Ellipse2D.Float(p1.x - CIRCLE_SIZE+xmod, p1.y - CIRCLE_SIZE+ymod, 2 * CIRCLE_SIZE, 2 * CIRCLE_SIZE));
Point p2 = c.p2;
g2D.setColor(pColor);
g2D.fill(new Ellipse2D.Float(p2.x - CIRCLE_SIZE+xmod, p2.y - CIRCLE_SIZE+ymod, 2 * CIRCLE_SIZE, 2 * CIRCLE_SIZE));
g2D.setColor(Color.RED);
g2D.draw(new Line2D.Float(new Point2D.Float(p1.x+xmod, p1.y+ymod), new Point2D.Float(p1.x+xmod + p1.dx * ARROW_SCALE, p1.y+ymod + p1.dy * ARROW_SCALE)));
g2D.fill(new Ellipse2D.Float(p1.x+xmod + p1.dx * ARROW_SCALE - CIRCLE_SIZE, p1.y+ymod + p1.dy * ARROW_SCALE - CIRCLE_SIZE, 2 * CIRCLE_SIZE, 2 * CIRCLE_SIZE));
g2D.draw(new Line2D.Float(new Point2D.Float(p1.x+xmod, p1.y+ymod), new Point2D.Float(p1.x+xmod + p1.dy * ARROW_SCALE, p1.y+ymod - p1.dx * ARROW_SCALE)));
g2D.setColor(new Color(160, 32, 240)); // Purple
g2D.draw(new Line2D.Float(new Point2D.Float(p1.x+xmod - p1.dy * ARROW_SCALE * 2, p1.y+ymod + p1.dx * ARROW_SCALE * 2), new Point2D.Float(p1.x+xmod, p1.y+ymod)));
g2D.fill(new Ellipse2D.Float(p1.x+xmod - p1.dy * ARROW_SCALE * 2 - CIRCLE_SIZE, p1.y+ymod + p1.dx * ARROW_SCALE * 2 - CIRCLE_SIZE, 2 * CIRCLE_SIZE, 2 * CIRCLE_SIZE));
}
}
public void draw(Graphics2D g2D) {
// Draw a box
g2D.drawRect(0, 0, (width()*3/2), (height()*3/2));
// Draw partitions
/*for (int i = 1; i < partitions.length - 1; i++) {
Point p = partitions[i];
g2D.setColor(Color.GRAY);
g2D.fill(new Ellipse2D.Float(p.x+xmod - CIRCLE_SIZE, p.y+ymod - CIRCLE_SIZE, 2 * CIRCLE_SIZE, 2 * CIRCLE_SIZE));
g2D.setColor(Color.RED);
g2D.draw(new Line2D.Float(new Point2D.Float(p.x+xmod, p.y+ymod), new Point2D.Float(p.x+xmod + p.dx * ARROW_SCALE, p.y+ymod + p.dy * ARROW_SCALE)));
g2D.fill(new Ellipse2D.Float(p.x+xmod + p.dx * ARROW_SCALE - CIRCLE_SIZE, p.y+ymod + p.dy * ARROW_SCALE - CIRCLE_SIZE, 2 * CIRCLE_SIZE, 2 * CIRCLE_SIZE));
g2D.draw(new Line2D.Float(new Point2D.Float(p.x+xmod, p.y+ymod), new Point2D.Float(p.x+xmod + p.dy * ARROW_SCALE, p.y+ymod - p.dx * ARROW_SCALE)));
g2D.setColor(new Color(160, 32, 240)); // Purple
g2D.draw(new Line2D.Float(new Point2D.Float(p.x+xmod - p.dy * ARROW_SCALE * 2, p.y+ymod + p.dx * ARROW_SCALE * 2), new Point2D.Float(p.x+xmod, p.y+ymod)));
g2D.fill(new Ellipse2D.Float(p.x+xmod - p.dy * ARROW_SCALE * 2 - CIRCLE_SIZE, p.y+ymod + p.dx * ARROW_SCALE * 2 - CIRCLE_SIZE, 2 * CIRCLE_SIZE, 2 * CIRCLE_SIZE));
}*/
// Draw base spline
drawCurves(s, g2D, xmod, ymod, Color.black, Color.gray);
// Draw splits
for (Spline spline : splits) {
drawCurves(spline, g2D, xmod, ymod, Color.green, Color.blue);
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.factories;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.table.api.AmbiguousTableFactoryException;
import org.apache.flink.table.api.NoMatchingTableFactoryException;
import org.apache.flink.table.api.TableException;
import org.apache.flink.table.descriptors.Descriptor;
import org.apache.flink.table.descriptors.FormatDescriptorValidator;
import org.apache.flink.table.descriptors.Schema;
import org.apache.flink.util.Preconditions;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.ServiceConfigurationError;
import java.util.ServiceLoader;
import java.util.Set;
import java.util.stream.Collectors;
import static org.apache.flink.table.descriptors.CatalogDescriptorValidator.CATALOG_PROPERTY_VERSION;
import static org.apache.flink.table.descriptors.ConnectorDescriptorValidator.CONNECTOR_PROPERTY_VERSION;
import static org.apache.flink.table.descriptors.FormatDescriptorValidator.FORMAT_PROPERTY_VERSION;
/**
* Unified class to search for a {@link TableFactory} of provided type and properties.
*/
public class TableFactoryService {
private static final ServiceLoader<TableFactory> defaultLoader = ServiceLoader.load(TableFactory.class);
private static final Logger LOG = LoggerFactory.getLogger(TableFactoryService.class);
/**
* Finds a table factory of the given class and descriptor.
*
* @param factoryClass desired factory class
* @param descriptor descriptor describing the factory configuration
* @param <T> factory class type
* @return the matching factory
*/
public static <T extends TableFactory> T find(Class<T> factoryClass, Descriptor descriptor) {
Preconditions.checkNotNull(descriptor);
return findSingleInternal(factoryClass, descriptor.toProperties(), Optional.empty());
}
/**
* Finds a table factory of the given class, descriptor, and classloader.
*
* @param factoryClass desired factory class
* @param descriptor descriptor describing the factory configuration
* @param classLoader classloader for service loading
* @param <T> factory class type
* @return the matching factory
*/
public static <T extends TableFactory> T find(
Class<T> factoryClass,
Descriptor descriptor,
ClassLoader classLoader) {
Preconditions.checkNotNull(descriptor);
Preconditions.checkNotNull(classLoader);
return findSingleInternal(factoryClass, descriptor.toProperties(), Optional.of(classLoader));
}
/**
* Finds a table factory of the given class and property map.
*
* @param factoryClass desired factory class
* @param propertyMap properties that describe the factory configuration
* @param <T> factory class type
* @return the matching factory
*/
public static <T extends TableFactory> T find(Class<T> factoryClass, Map<String, String> propertyMap) {
return findSingleInternal(factoryClass, propertyMap, Optional.empty());
}
/**
* Finds a table factory of the given class, property map, and classloader.
*
* @param factoryClass desired factory class
* @param propertyMap properties that describe the factory configuration
* @param classLoader classloader for service loading
* @param <T> factory class type
* @return the matching factory
*/
public static <T extends TableFactory> T find(
Class<T> factoryClass,
Map<String, String> propertyMap,
ClassLoader classLoader) {
Preconditions.checkNotNull(classLoader);
return findSingleInternal(factoryClass, propertyMap, Optional.of(classLoader));
}
/**
* Finds all table factories of the given class and property map.
*
* @param factoryClass desired factory class
* @param propertyMap properties that describe the factory configuration
* @param <T> factory class type
* @return all the matching factories
*/
public static <T extends TableFactory> List<T> findAll(Class<T> factoryClass, Map<String, String> propertyMap) {
return findAllInternal(factoryClass, propertyMap, Optional.empty());
}
/**
* Finds a table factory of the given class, property map, and classloader.
*
* @param factoryClass desired factory class
* @param properties properties that describe the factory configuration
* @param classLoader classloader for service loading
* @param <T> factory class type
* @return the matching factory
*/
private static <T extends TableFactory> T findSingleInternal(
Class<T> factoryClass,
Map<String, String> properties,
Optional<ClassLoader> classLoader) {
List<TableFactory> tableFactories = discoverFactories(classLoader);
List<T> filtered = filter(tableFactories, factoryClass, properties);
if (filtered.size() > 1) {
throw new AmbiguousTableFactoryException(
filtered,
factoryClass,
tableFactories,
properties);
} else {
return filtered.get(0);
}
}
/**
* Finds a table factory of the given class, property map, and classloader.
*
* @param factoryClass desired factory class
* @param properties properties that describe the factory configuration
* @param classLoader classloader for service loading
* @param <T> factory class type
* @return the matching factory
*/
private static <T extends TableFactory> List<T> findAllInternal(
Class<T> factoryClass,
Map<String, String> properties,
Optional<ClassLoader> classLoader) {
List<TableFactory> tableFactories = discoverFactories(classLoader);
return filter(tableFactories, factoryClass, properties);
}
/**
* Filters found factories by factory class and with matching context.
*/
private static <T extends TableFactory> List<T> filter(
List<TableFactory> foundFactories,
Class<T> factoryClass,
Map<String, String> properties) {
Preconditions.checkNotNull(factoryClass);
Preconditions.checkNotNull(properties);
List<T> classFactories = filterByFactoryClass(
factoryClass,
properties,
foundFactories);
List<T> contextFactories = filterByContext(
factoryClass,
properties,
foundFactories,
classFactories);
return filterBySupportedProperties(
factoryClass,
properties,
foundFactories,
contextFactories);
}
/**
* Searches for factories using Java service providers.
*
* @return all factories in the classpath
*/
private static List<TableFactory> discoverFactories(Optional<ClassLoader> classLoader) {
try {
List<TableFactory> result = new LinkedList<>();
if (classLoader.isPresent()) {
ServiceLoader
.load(TableFactory.class, classLoader.get())
.iterator()
.forEachRemaining(result::add);
} else {
defaultLoader.iterator().forEachRemaining(result::add);
}
return result;
} catch (ServiceConfigurationError e) {
LOG.error("Could not load service provider for table factories.", e);
throw new TableException("Could not load service provider for table factories.", e);
}
}
/**
* Filters factories with matching context by factory class.
*/
@SuppressWarnings("unchecked")
private static <T> List<T> filterByFactoryClass(
Class<T> factoryClass,
Map<String, String> properties,
List<TableFactory> foundFactories) {
List<TableFactory> classFactories = foundFactories.stream()
.filter(p -> factoryClass.isAssignableFrom(p.getClass()))
.collect(Collectors.toList());
if (classFactories.isEmpty()) {
throw new NoMatchingTableFactoryException(
String.format("No factory implements '%s'.", factoryClass.getCanonicalName()),
factoryClass,
foundFactories,
properties);
}
return (List<T>) classFactories;
}
/**
* Filters for factories with matching context.
*
* @return all matching factories
*/
private static <T extends TableFactory> List<T> filterByContext(
Class<T> factoryClass,
Map<String, String> properties,
List<TableFactory> foundFactories,
List<T> classFactories) {
List<T> matchingFactories = classFactories.stream().filter(factory -> {
Map<String, String> requestedContext = normalizeContext(factory);
Map<String, String> plainContext = new HashMap<>(requestedContext);
// we remove the version for now until we have the first backwards compatibility case
// with the version we can provide mappings in case the format changes
plainContext.remove(CONNECTOR_PROPERTY_VERSION);
plainContext.remove(FORMAT_PROPERTY_VERSION);
plainContext.remove(CATALOG_PROPERTY_VERSION);
// check if required context is met
return plainContext.keySet()
.stream()
.allMatch(e -> properties.containsKey(e) && properties.get(e).equals(plainContext.get(e)));
}).collect(Collectors.toList());
if (matchingFactories.isEmpty()) {
throw new NoMatchingTableFactoryException(
"No context matches.",
factoryClass,
foundFactories,
properties);
}
return matchingFactories;
}
/**
* Prepares the properties of a context to be used for match operations.
*/
private static Map<String, String> normalizeContext(TableFactory factory) {
Map<String, String> requiredContext = factory.requiredContext();
if (requiredContext == null) {
throw new TableException(
String.format("Required context of factory '%s' must not be null.", factory.getClass().getName()));
}
return requiredContext.keySet().stream()
.collect(Collectors.toMap(String::toLowerCase, requiredContext::get));
}
/**
* Filters the matching class factories by supported properties.
*/
private static <T extends TableFactory> List<T> filterBySupportedProperties(
Class<T> factoryClass,
Map<String, String> properties,
List<TableFactory> foundFactories,
List<T> classFactories) {
final List<String> plainGivenKeys = new LinkedList<>();
properties.keySet().forEach(k -> {
// replace arrays with wildcard
String key = k.replaceAll(".\\d+", ".#");
// ignore duplicates
if (!plainGivenKeys.contains(key)) {
plainGivenKeys.add(key);
}
});
Optional<String> lastKey = Optional.empty();
List<T> supportedFactories = new LinkedList<>();
for (T factory: classFactories) {
Set<String> requiredContextKeys = normalizeContext(factory).keySet();
Tuple2<List<String>, List<String>> tuple2 = normalizeSupportedProperties(factory);
// ignore context keys
List<String> givenContextFreeKeys = plainGivenKeys.stream()
.filter(p -> !requiredContextKeys.contains(p))
.collect(Collectors.toList());
List<String> givenFilteredKeys = filterSupportedPropertiesFactorySpecific(
factory,
givenContextFreeKeys);
boolean allTrue = true;
for (String k: givenFilteredKeys) {
lastKey = Optional.of(k);
if (!(tuple2.f0.contains(k) || tuple2.f1.stream().anyMatch(k::startsWith))) {
allTrue = false;
break;
}
}
if (allTrue) {
supportedFactories.add(factory);
}
}
if (supportedFactories.isEmpty() && classFactories.size() == 1 && lastKey.isPresent()) {
// special case: when there is only one matching factory but the last property key
// was incorrect
TableFactory factory = classFactories.get(0);
Tuple2<List<String>, List<String>> tuple2 = normalizeSupportedProperties(factory);
String errorMessage = String.format(
"The matching factory '%s' doesn't support '%s'.\n\nSupported properties of " +
"this factory are:\n%s",
factory.getClass().getName(),
lastKey.get(),
String.join("\n", tuple2.f0));
throw new NoMatchingTableFactoryException(
errorMessage,
factoryClass,
foundFactories,
properties);
} else if (supportedFactories.isEmpty()) {
throw new NoMatchingTableFactoryException(
"No factory supports all properties.",
factoryClass,
foundFactories,
properties);
}
return supportedFactories;
}
/**
* Prepares the supported properties of a factory to be used for match operations.
*/
private static Tuple2<List<String>, List<String>> normalizeSupportedProperties(TableFactory factory) {
List<String> supportedProperties = factory.supportedProperties();
if (supportedProperties == null) {
throw new TableException(
String.format("Supported properties of factory '%s' must not be null.",
factory.getClass().getName()));
}
List<String> supportedKeys = supportedProperties.stream()
.map(String::toLowerCase)
.collect(Collectors.toList());
// extract wildcard prefixes
List<String> wildcards = extractWildcardPrefixes(supportedKeys);
return Tuple2.of(supportedKeys, wildcards);
}
/**
* Converts the prefix of properties with wildcards (e.g., "format.*").
*/
private static List<String> extractWildcardPrefixes(List<String> propertyKeys) {
return propertyKeys.stream()
.filter(p -> p.endsWith("*"))
.map(s -> s.substring(0, s.length() - 1))
.collect(Collectors.toList());
}
/**
* Performs filtering for special cases (i.e. table format factories with schema derivation).
*/
private static List<String> filterSupportedPropertiesFactorySpecific(TableFactory factory, List<String> keys) {
if (factory instanceof TableFormatFactory) {
boolean includeSchema = ((TableFormatFactory) factory).supportsSchemaDerivation();
return keys.stream().filter(k -> {
if (includeSchema) {
return k.startsWith(Schema.SCHEMA + ".") ||
k.startsWith(FormatDescriptorValidator.FORMAT + ".");
} else {
return k.startsWith(FormatDescriptorValidator.FORMAT + ".");
}
}).collect(Collectors.toList());
} else {
return keys;
}
}
}
| |
// Copyright 2000-2021 JetBrains s.r.o. and contributors. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package org.jetbrains.kotlin.idea.highlighter;
import com.intellij.testFramework.TestDataPath;
import org.jetbrains.kotlin.test.JUnit3RunnerWithInners;
import org.jetbrains.kotlin.test.KotlinTestUtils;
import org.jetbrains.kotlin.test.TestMetadata;
import org.jetbrains.kotlin.test.TestRoot;
import org.junit.runner.RunWith;
/**
* This class is generated by {@link org.jetbrains.kotlin.testGenerator.generator.TestGenerator}.
* DO NOT MODIFY MANUALLY.
*/
@SuppressWarnings("all")
@TestRoot("idea/tests")
@TestDataPath("$CONTENT_ROOT")
@RunWith(JUnit3RunnerWithInners.class)
@TestMetadata("testData/highlighter")
public abstract class HighlightingTestGenerated extends AbstractHighlightingTest {
@RunWith(JUnit3RunnerWithInners.class)
@TestMetadata("testData/highlighter/deprecated")
public static class Deprecated extends AbstractHighlightingTest {
private void runTest(String testDataFilePath) throws Exception {
KotlinTestUtils.runTest(this::doTest, this, testDataFilePath);
}
@TestMetadata("Class.kt")
public void testClass() throws Exception {
runTest("testData/highlighter/deprecated/Class.kt");
}
@TestMetadata("ClassObject.kt")
public void testClassObject() throws Exception {
runTest("testData/highlighter/deprecated/ClassObject.kt");
}
@TestMetadata("Constructor.kt")
public void testConstructor() throws Exception {
runTest("testData/highlighter/deprecated/Constructor.kt");
}
@TestMetadata("ExtensionFunction.kt")
public void testExtensionFunction() throws Exception {
runTest("testData/highlighter/deprecated/ExtensionFunction.kt");
}
@TestMetadata("Function.kt")
public void testFunction() throws Exception {
runTest("testData/highlighter/deprecated/Function.kt");
}
@TestMetadata("Get.kt")
public void testGet() throws Exception {
runTest("testData/highlighter/deprecated/Get.kt");
}
@TestMetadata("Getter.kt")
public void testGetter() throws Exception {
runTest("testData/highlighter/deprecated/Getter.kt");
}
@TestMetadata("Inc.kt")
public void testInc() throws Exception {
runTest("testData/highlighter/deprecated/Inc.kt");
}
@TestMetadata("Invalid.kt")
public void testInvalid() throws Exception {
runTest("testData/highlighter/deprecated/Invalid.kt");
}
@TestMetadata("Invoke.kt")
public void testInvoke() throws Exception {
runTest("testData/highlighter/deprecated/Invoke.kt");
}
@TestMetadata("Operation.kt")
public void testOperation() throws Exception {
runTest("testData/highlighter/deprecated/Operation.kt");
}
@TestMetadata("Property.kt")
public void testProperty() throws Exception {
runTest("testData/highlighter/deprecated/Property.kt");
}
@TestMetadata("RangeTo.kt")
public void testRangeTo() throws Exception {
runTest("testData/highlighter/deprecated/RangeTo.kt");
}
@TestMetadata("Setter.kt")
public void testSetter() throws Exception {
runTest("testData/highlighter/deprecated/Setter.kt");
}
@TestMetadata("SuperCall.kt")
public void testSuperCall() throws Exception {
runTest("testData/highlighter/deprecated/SuperCall.kt");
}
@TestMetadata("Trait.kt")
public void testTrait() throws Exception {
runTest("testData/highlighter/deprecated/Trait.kt");
}
}
@RunWith(JUnit3RunnerWithInners.class)
@TestMetadata("testData/highlighter")
public abstract static class Uncategorized extends AbstractHighlightingTest {
@RunWith(JUnit3RunnerWithInners.class)
@TestMetadata("testData/highlighter")
public static class TestBucket001 extends AbstractHighlightingTest {
private void runTest(String testDataFilePath) throws Exception {
KotlinTestUtils.runTest(this::doTest, this, testDataFilePath);
}
@TestMetadata("Annotations.kt")
public void testAnnotations() throws Exception {
runTest("testData/highlighter/Annotations.kt");
}
@TestMetadata("AutoCreatedItParameter.kt")
public void testAutoCreatedItParameter() throws Exception {
runTest("testData/highlighter/AutoCreatedItParameter.kt");
}
@TestMetadata("DelegatingCtor.kt")
public void testDelegatingCtor() throws Exception {
runTest("testData/highlighter/DelegatingCtor.kt");
}
@TestMetadata("Destructuring.kt")
public void testDestructuring() throws Exception {
runTest("testData/highlighter/Destructuring.kt");
}
@TestMetadata("Dynamic.kt")
public void testDynamic() throws Exception {
runTest("testData/highlighter/Dynamic.kt");
}
@TestMetadata("Enums.kt")
public void testEnums() throws Exception {
runTest("testData/highlighter/Enums.kt");
}
@TestMetadata("Field.kt")
public void testField() throws Exception {
runTest("testData/highlighter/Field.kt");
}
@TestMetadata("Functions.kt")
public void testFunctions() throws Exception {
runTest("testData/highlighter/Functions.kt");
}
@TestMetadata("InaccessibleExpression.kt")
public void testInaccessibleExpression() throws Exception {
runTest("testData/highlighter/InaccessibleExpression.kt");
}
@TestMetadata("InvokeCall.kt")
public void testInvokeCall() throws Exception {
runTest("testData/highlighter/InvokeCall.kt");
}
@TestMetadata("JavaTypes.kt")
public void testJavaTypes() throws Exception {
runTest("testData/highlighter/JavaTypes.kt");
}
@TestMetadata("KDoc.kt")
public void testKDoc() throws Exception {
runTest("testData/highlighter/KDoc.kt");
}
@TestMetadata("KotlinInjection.kt")
public void testKotlinInjection() throws Exception {
runTest("testData/highlighter/KotlinInjection.kt");
}
@TestMetadata("Labels.kt")
public void testLabels() throws Exception {
runTest("testData/highlighter/Labels.kt");
}
@TestMetadata("NamedArguments.kt")
public void testNamedArguments() throws Exception {
runTest("testData/highlighter/NamedArguments.kt");
}
@TestMetadata("NonNullAssertion.kt")
public void testNonNullAssertion() throws Exception {
runTest("testData/highlighter/NonNullAssertion.kt");
}
@TestMetadata("Object.kt")
public void testObject() throws Exception {
runTest("testData/highlighter/Object.kt");
}
@TestMetadata("PropertiesWithPropertyDeclarations.kt")
public void testPropertiesWithPropertyDeclarations() throws Exception {
runTest("testData/highlighter/PropertiesWithPropertyDeclarations.kt");
}
@TestMetadata("RecursiveTypes.kt")
public void testRecursiveTypes() throws Exception {
runTest("testData/highlighter/RecursiveTypes.kt");
}
@TestMetadata("SmartCast.kt")
public void testSmartCast() throws Exception {
runTest("testData/highlighter/SmartCast.kt");
}
}
@RunWith(JUnit3RunnerWithInners.class)
@TestMetadata("testData/highlighter")
public static class TestBucket002 extends AbstractHighlightingTest {
private void runTest(String testDataFilePath) throws Exception {
KotlinTestUtils.runTest(this::doTest, this, testDataFilePath);
}
@TestMetadata("Suspend.kt")
public void testSuspend() throws Exception {
runTest("testData/highlighter/Suspend.kt");
}
@TestMetadata("SyntheticExtensionProperty.kt")
public void testSyntheticExtensionProperty() throws Exception {
runTest("testData/highlighter/SyntheticExtensionProperty.kt");
}
@TestMetadata("Todo.kt")
public void testTodo() throws Exception {
runTest("testData/highlighter/Todo.kt");
}
@TestMetadata("TopLevelDestructuring.kt")
public void testTopLevelDestructuring() throws Exception {
runTest("testData/highlighter/TopLevelDestructuring.kt");
}
@TestMetadata("TopLevelOpenSuspendFun.kt")
public void testTopLevelOpenSuspendFun() throws Exception {
runTest("testData/highlighter/TopLevelOpenSuspendFun.kt");
}
@TestMetadata("TypeAlias.kt")
public void testTypeAlias() throws Exception {
runTest("testData/highlighter/TypeAlias.kt");
}
@TestMetadata("TypeAlias2.kt")
public void testTypeAlias2() throws Exception {
runTest("testData/highlighter/TypeAlias2.kt");
}
@TestMetadata("TypesAndAnnotations.kt")
public void testTypesAndAnnotations() throws Exception {
runTest("testData/highlighter/TypesAndAnnotations.kt");
}
@TestMetadata("Variables.kt")
public void testVariables() throws Exception {
runTest("testData/highlighter/Variables.kt");
}
@TestMetadata("VariablesAsFunctions.kt")
public void testVariablesAsFunctions() throws Exception {
runTest("testData/highlighter/VariablesAsFunctions.kt");
}
}
}
}
| |
/*
* Copyright (c) 2008, 2011, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
/* @test
* @bug 4607272 6842687 6878369 6944810 7023403
* @summary Unit test for AsynchronousSocketChannel
* @run main Basic -skipSlowConnectTest
*/
import java.nio.ByteBuffer;
import java.nio.channels.*;
import static java.net.StandardSocketOptions.*;
import java.net.*;
import java.util.Random;
import java.util.concurrent.*;
import java.util.concurrent.atomic.*;
import java.io.Closeable;
import java.io.IOException;
public class Basic {
static final Random rand = new Random();
static boolean skipSlowConnectTest = false;
public static void main(String[] args) throws Exception {
for (String arg: args) {
switch (arg) {
case "-skipSlowConnectTest" :
skipSlowConnectTest = true;
break;
default:
throw new RuntimeException("Unrecognized argument: " + arg);
}
}
testBind();
testSocketOptions();
testConnect();
testCloseWhenPending();
testCancel();
testRead1();
testRead2();
testRead3();
testWrite1();
testWrite2();
// skip timeout tests until 7052549 is fixed
if (!System.getProperty("os.name").startsWith("Windows"))
testTimeout();
testShutdown();
}
static class Server implements Closeable {
private final ServerSocketChannel ssc;
private final InetSocketAddress address;
Server() throws IOException {
ssc = ServerSocketChannel.open().bind(new InetSocketAddress(0));
InetAddress lh = InetAddress.getLocalHost();
int port = ((InetSocketAddress)(ssc.getLocalAddress())).getPort();
address = new InetSocketAddress(lh, port);
}
InetSocketAddress address() {
return address;
}
SocketChannel accept() throws IOException {
return ssc.accept();
}
public void close() throws IOException {
ssc.close();
}
}
static void testBind() throws Exception {
System.out.println("-- bind --");
try (AsynchronousSocketChannel ch = AsynchronousSocketChannel.open()) {
if (ch.getLocalAddress() != null)
throw new RuntimeException("Local address should be 'null'");
ch.bind(new InetSocketAddress(0));
// check local address after binding
InetSocketAddress local = (InetSocketAddress)ch.getLocalAddress();
if (local.getPort() == 0)
throw new RuntimeException("Unexpected port");
if (!local.getAddress().isAnyLocalAddress())
throw new RuntimeException("Not bound to a wildcard address");
// try to re-bind
try {
ch.bind(new InetSocketAddress(0));
throw new RuntimeException("AlreadyBoundException expected");
} catch (AlreadyBoundException x) {
}
}
// check ClosedChannelException
AsynchronousSocketChannel ch = AsynchronousSocketChannel.open();
ch.close();
try {
ch.bind(new InetSocketAddress(0));
throw new RuntimeException("ClosedChannelException expected");
} catch (ClosedChannelException x) {
}
}
static void testSocketOptions() throws Exception {
System.out.println("-- socket options --");
try (AsynchronousSocketChannel ch = AsynchronousSocketChannel.open()) {
ch.setOption(SO_RCVBUF, 128*1024)
.setOption(SO_SNDBUF, 128*1024)
.setOption(SO_REUSEADDR, true);
// check SO_SNDBUF/SO_RCVBUF limits
int before, after;
before = ch.getOption(SO_SNDBUF);
after = ch.setOption(SO_SNDBUF, Integer.MAX_VALUE).getOption(SO_SNDBUF);
if (after < before)
throw new RuntimeException("setOption caused SO_SNDBUF to decrease");
before = ch.getOption(SO_RCVBUF);
after = ch.setOption(SO_RCVBUF, Integer.MAX_VALUE).getOption(SO_RCVBUF);
if (after < before)
throw new RuntimeException("setOption caused SO_RCVBUF to decrease");
ch.bind(new InetSocketAddress(0));
// default values
if (ch.getOption(SO_KEEPALIVE))
throw new RuntimeException("Default of SO_KEEPALIVE should be 'false'");
if (ch.getOption(TCP_NODELAY))
throw new RuntimeException("Default of TCP_NODELAY should be 'false'");
// set and check
if (!ch.setOption(SO_KEEPALIVE, true).getOption(SO_KEEPALIVE))
throw new RuntimeException("SO_KEEPALIVE did not change");
if (!ch.setOption(TCP_NODELAY, true).getOption(TCP_NODELAY))
throw new RuntimeException("SO_KEEPALIVE did not change");
// read others (can't check as actual value is implementation dependent)
ch.getOption(SO_RCVBUF);
ch.getOption(SO_SNDBUF);
}
}
static void testConnect() throws Exception {
System.out.println("-- connect --");
SocketAddress address;
try (Server server = new Server()) {
address = server.address();
// connect to server and check local/remote addresses
try (AsynchronousSocketChannel ch = AsynchronousSocketChannel.open()) {
ch.connect(address).get();
// check local address
if (ch.getLocalAddress() == null)
throw new RuntimeException("Not bound to local address");
// check remote address
InetSocketAddress remote = (InetSocketAddress)ch.getRemoteAddress();
if (remote.getPort() != server.address().getPort())
throw new RuntimeException("Connected to unexpected port");
if (!remote.getAddress().equals(server.address().getAddress()))
throw new RuntimeException("Connected to unexpected address");
// try to connect again
try {
ch.connect(server.address()).get();
throw new RuntimeException("AlreadyConnectedException expected");
} catch (AlreadyConnectedException x) {
}
// clean-up
server.accept().close();
}
// check that connect fails with ClosedChannelException
AsynchronousSocketChannel ch = AsynchronousSocketChannel.open();
ch.close();
try {
ch.connect(server.address()).get();
throw new RuntimeException("ExecutionException expected");
} catch (ExecutionException x) {
if (!(x.getCause() instanceof ClosedChannelException))
throw new RuntimeException("Cause of ClosedChannelException expected");
}
final AtomicReference<Throwable> connectException = new AtomicReference<>();
ch.connect(server.address(), (Void)null, new CompletionHandler<Void,Void>() {
public void completed(Void result, Void att) {
}
public void failed(Throwable exc, Void att) {
connectException.set(exc);
}
});
while (connectException.get() == null) {
Thread.sleep(100);
}
if (!(connectException.get() instanceof ClosedChannelException))
throw new RuntimeException("ClosedChannelException expected");
}
// test that failure to connect closes the channel
try (AsynchronousSocketChannel ch = AsynchronousSocketChannel.open()) {
try {
ch.connect(address).get();
} catch (ExecutionException x) {
// failed to establish connection
if (ch.isOpen())
throw new RuntimeException("Channel should be closed");
}
}
// repeat test by connecting to a (probably) non-existent host. This
// improves the chance that the connect will not fail immediately.
if (!skipSlowConnectTest) {
try (AsynchronousSocketChannel ch = AsynchronousSocketChannel.open()) {
try {
ch.connect(genSocketAddress()).get();
} catch (ExecutionException x) {
// failed to establish connection
if (ch.isOpen())
throw new RuntimeException("Channel should be closed");
}
}
}
}
static void testCloseWhenPending() throws Exception {
System.out.println("-- asynchronous close when connecting --");
AsynchronousSocketChannel ch;
// asynchronous close while connecting
ch = AsynchronousSocketChannel.open();
Future<Void> connectResult = ch.connect(genSocketAddress());
// give time to initiate the connect (SYN)
Thread.sleep(50);
// close
ch.close();
// check that exception is thrown in timely manner
try {
connectResult.get(5, TimeUnit.SECONDS);
} catch (TimeoutException x) {
throw new RuntimeException("AsynchronousCloseException not thrown");
} catch (ExecutionException x) {
// expected
}
System.out.println("-- asynchronous close when reading --");
try (Server server = new Server()) {
ch = AsynchronousSocketChannel.open();
ch.connect(server.address()).get();
ByteBuffer dst = ByteBuffer.allocateDirect(100);
Future<Integer> result = ch.read(dst);
// attempt a second read - should fail with ReadPendingException
ByteBuffer buf = ByteBuffer.allocateDirect(100);
try {
ch.read(buf);
throw new RuntimeException("ReadPendingException expected");
} catch (ReadPendingException x) {
}
// close channel (should cause initial read to complete)
ch.close();
server.accept().close();
// check that AsynchronousCloseException is thrown
try {
result.get();
throw new RuntimeException("Should not read");
} catch (ExecutionException x) {
if (!(x.getCause() instanceof AsynchronousCloseException))
throw new RuntimeException(x);
}
System.out.println("-- asynchronous close when writing --");
ch = AsynchronousSocketChannel.open();
ch.connect(server.address()).get();
final AtomicReference<Throwable> writeException =
new AtomicReference<Throwable>();
// write bytes to fill socket buffer
ch.write(genBuffer(), ch, new CompletionHandler<Integer,AsynchronousSocketChannel>() {
public void completed(Integer result, AsynchronousSocketChannel ch) {
ch.write(genBuffer(), ch, this);
}
public void failed(Throwable x, AsynchronousSocketChannel ch) {
writeException.set(x);
}
});
// give time for socket buffer to fill up.
Thread.sleep(5*1000);
// attempt a concurrent write - should fail with WritePendingException
try {
ch.write(genBuffer());
throw new RuntimeException("WritePendingException expected");
} catch (WritePendingException x) {
}
// close channel - should cause initial write to complete
ch.close();
server.accept().close();
// wait for exception
while (writeException.get() == null) {
Thread.sleep(100);
}
if (!(writeException.get() instanceof AsynchronousCloseException))
throw new RuntimeException("AsynchronousCloseException expected");
}
}
static void testCancel() throws Exception {
System.out.println("-- cancel --");
try (Server server = new Server()) {
for (int i=0; i<2; i++) {
boolean mayInterruptIfRunning = (i == 0) ? false : true;
// establish loopback connection
AsynchronousSocketChannel ch = AsynchronousSocketChannel.open();
ch.connect(server.address()).get();
SocketChannel peer = server.accept();
// start read operation
ByteBuffer buf = ByteBuffer.allocate(1);
Future<Integer> res = ch.read(buf);
// cancel operation
boolean cancelled = res.cancel(mayInterruptIfRunning);
// check post-conditions
if (!res.isDone())
throw new RuntimeException("isDone should return true");
if (res.isCancelled() != cancelled)
throw new RuntimeException("isCancelled not consistent");
try {
res.get();
throw new RuntimeException("CancellationException expected");
} catch (CancellationException x) {
}
try {
res.get(1, TimeUnit.SECONDS);
throw new RuntimeException("CancellationException expected");
} catch (CancellationException x) {
}
// check that the cancel doesn't impact writing to the channel
if (!mayInterruptIfRunning) {
buf = ByteBuffer.wrap("a".getBytes());
ch.write(buf).get();
}
ch.close();
peer.close();
}
}
}
static void testRead1() throws Exception {
System.out.println("-- read (1) --");
try (Server server = new Server()) {
final AsynchronousSocketChannel ch = AsynchronousSocketChannel.open();
ch.connect(server.address()).get();
// read with 0 bytes remaining should complete immediately
ByteBuffer buf = ByteBuffer.allocate(1);
buf.put((byte)0);
int n = ch.read(buf).get();
if (n != 0)
throw new RuntimeException("0 expected");
// write bytes and close connection
ByteBuffer src = genBuffer();
try (SocketChannel sc = server.accept()) {
sc.setOption(SO_SNDBUF, src.remaining());
while (src.hasRemaining())
sc.write(src);
}
// reads should complete immediately
final ByteBuffer dst = ByteBuffer.allocateDirect(src.capacity() + 100);
final CountDownLatch latch = new CountDownLatch(1);
ch.read(dst, (Void)null, new CompletionHandler<Integer,Void>() {
public void completed(Integer result, Void att) {
int n = result;
if (n > 0) {
ch.read(dst, (Void)null, this);
} else {
latch.countDown();
}
}
public void failed(Throwable exc, Void att) {
}
});
latch.await();
// check buffers
src.flip();
dst.flip();
if (!src.equals(dst)) {
throw new RuntimeException("Contents differ");
}
// close channel
ch.close();
// check read fails with ClosedChannelException
try {
ch.read(dst).get();
throw new RuntimeException("ExecutionException expected");
} catch (ExecutionException x) {
if (!(x.getCause() instanceof ClosedChannelException))
throw new RuntimeException("Cause of ClosedChannelException expected");
}
}
}
static void testRead2() throws Exception {
System.out.println("-- read (2) --");
try (Server server = new Server()) {
final AsynchronousSocketChannel ch = AsynchronousSocketChannel.open();
ch.connect(server.address()).get();
SocketChannel sc = server.accept();
ByteBuffer src = genBuffer();
// read until the buffer is full
final ByteBuffer dst = ByteBuffer.allocateDirect(src.capacity());
final CountDownLatch latch = new CountDownLatch(1);
ch.read(dst, (Void)null, new CompletionHandler<Integer,Void>() {
public void completed(Integer result, Void att) {
if (dst.hasRemaining()) {
ch.read(dst, (Void)null, this);
} else {
latch.countDown();
}
}
public void failed(Throwable exc, Void att) {
}
});
// trickle the writing
do {
int rem = src.remaining();
int size = (rem <= 100) ? rem : 50 + rand.nextInt(rem - 100);
ByteBuffer buf = ByteBuffer.allocate(size);
for (int i=0; i<size; i++)
buf.put(src.get());
buf.flip();
Thread.sleep(50 + rand.nextInt(1500));
while (buf.hasRemaining())
sc.write(buf);
} while (src.hasRemaining());
// wait until ascynrhonous reading has completed
latch.await();
// check buffers
src.flip();
dst.flip();
if (!src.equals(dst)) {
throw new RuntimeException("Contents differ");
}
sc.close();
ch.close();
}
}
// exercise scattering read
static void testRead3() throws Exception {
System.out.println("-- read (3) --");
try (Server server = new Server()) {
final AsynchronousSocketChannel ch = AsynchronousSocketChannel.open();
ch.connect(server.address()).get();
SocketChannel sc = server.accept();
ByteBuffer[] dsts = new ByteBuffer[3];
for (int i=0; i<dsts.length; i++) {
dsts[i] = ByteBuffer.allocateDirect(100);
}
// scattering read that completes ascynhronously
final CountDownLatch l1 = new CountDownLatch(1);
ch.read(dsts, 0, dsts.length, 0L, TimeUnit.SECONDS, (Void)null,
new CompletionHandler<Long,Void>() {
public void completed(Long result, Void att) {
long n = result;
if (n <= 0)
throw new RuntimeException("No bytes read");
l1.countDown();
}
public void failed(Throwable exc, Void att) {
}
});
// write some bytes
sc.write(genBuffer());
// read should now complete
l1.await();
// write more bytes
sc.write(genBuffer());
// read should complete immediately
for (int i=0; i<dsts.length; i++) {
dsts[i].rewind();
}
final CountDownLatch l2 = new CountDownLatch(1);
ch.read(dsts, 0, dsts.length, 0L, TimeUnit.SECONDS, (Void)null,
new CompletionHandler<Long,Void>() {
public void completed(Long result, Void att) {
long n = result;
if (n <= 0)
throw new RuntimeException("No bytes read");
l2.countDown();
}
public void failed(Throwable exc, Void att) {
}
});
l2.await();
ch.close();
sc.close();
}
}
static void testWrite1() throws Exception {
System.out.println("-- write (1) --");
try (Server server = new Server()) {
final AsynchronousSocketChannel ch = AsynchronousSocketChannel.open();
ch.connect(server.address()).get();
SocketChannel sc = server.accept();
// write with 0 bytes remaining should complete immediately
ByteBuffer buf = ByteBuffer.allocate(1);
buf.put((byte)0);
int n = ch.write(buf).get();
if (n != 0)
throw new RuntimeException("0 expected");
// write all bytes and close connection when done
final ByteBuffer src = genBuffer();
ch.write(src, (Void)null, new CompletionHandler<Integer,Void>() {
public void completed(Integer result, Void att) {
if (src.hasRemaining()) {
ch.write(src, (Void)null, this);
} else {
try {
ch.close();
} catch (IOException ignore) { }
}
}
public void failed(Throwable exc, Void att) {
}
});
// read to EOF or buffer full
ByteBuffer dst = ByteBuffer.allocateDirect(src.capacity() + 100);
do {
n = sc.read(dst);
} while (n > 0);
sc.close();
// check buffers
src.flip();
dst.flip();
if (!src.equals(dst)) {
throw new RuntimeException("Contents differ");
}
// check write fails with ClosedChannelException
try {
ch.read(dst).get();
throw new RuntimeException("ExecutionException expected");
} catch (ExecutionException x) {
if (!(x.getCause() instanceof ClosedChannelException))
throw new RuntimeException("Cause of ClosedChannelException expected");
}
}
}
// exercise gathering write
static void testWrite2() throws Exception {
System.out.println("-- write (2) --");
try (Server server = new Server()) {
final AsynchronousSocketChannel ch = AsynchronousSocketChannel.open();
ch.connect(server.address()).get();
SocketChannel sc = server.accept();
// number of bytes written
final AtomicLong bytesWritten = new AtomicLong(0);
// write buffers (should complete immediately)
ByteBuffer[] srcs = genBuffers(1);
final CountDownLatch l1 = new CountDownLatch(1);
ch.write(srcs, 0, srcs.length, 0L, TimeUnit.SECONDS, (Void)null,
new CompletionHandler<Long,Void>() {
public void completed(Long result, Void att) {
long n = result;
if (n <= 0)
throw new RuntimeException("No bytes read");
bytesWritten.addAndGet(n);
l1.countDown();
}
public void failed(Throwable exc, Void att) {
}
});
l1.await();
// set to true to signal that no more buffers should be written
final AtomicBoolean continueWriting = new AtomicBoolean(true);
// write until socket buffer is full so as to create the conditions
// for when a write does not complete immediately
srcs = genBuffers(1);
ch.write(srcs, 0, srcs.length, 0L, TimeUnit.SECONDS, (Void)null,
new CompletionHandler<Long,Void>() {
public void completed(Long result, Void att) {
long n = result;
if (n <= 0)
throw new RuntimeException("No bytes written");
bytesWritten.addAndGet(n);
if (continueWriting.get()) {
ByteBuffer[] srcs = genBuffers(8);
ch.write(srcs, 0, srcs.length, 0L, TimeUnit.SECONDS,
(Void)null, this);
}
}
public void failed(Throwable exc, Void att) {
}
});
// give time for socket buffer to fill up.
Thread.sleep(5*1000);
// signal handler to stop further writing
continueWriting.set(false);
// read until done
ByteBuffer buf = ByteBuffer.allocateDirect(4096);
long total = 0L;
do {
int n = sc.read(buf);
if (n <= 0)
throw new RuntimeException("No bytes read");
buf.rewind();
total += n;
} while (total < bytesWritten.get());
ch.close();
sc.close();
}
}
static void testShutdown() throws Exception {
System.out.println("-- shutdown--");
try (Server server = new Server();
AsynchronousSocketChannel ch = AsynchronousSocketChannel.open())
{
ch.connect(server.address()).get();
try (SocketChannel peer = server.accept()) {
ByteBuffer buf = ByteBuffer.allocateDirect(1000);
int n;
// check read
ch.shutdownInput();
n = ch.read(buf).get();
if (n != -1)
throw new RuntimeException("-1 expected");
// check full with full buffer
buf.put(new byte[100]);
n = ch.read(buf).get();
if (n != -1)
throw new RuntimeException("-1 expected");
// check write
ch.shutdownOutput();
try {
ch.write(buf).get();
throw new RuntimeException("ClosedChannelException expected");
} catch (ExecutionException x) {
if (!(x.getCause() instanceof ClosedChannelException))
throw new RuntimeException("ClosedChannelException expected");
}
}
}
}
static void testTimeout() throws Exception {
System.out.println("-- timeouts --");
testTimeout(Integer.MIN_VALUE, TimeUnit.SECONDS);
testTimeout(-1L, TimeUnit.SECONDS);
testTimeout(0L, TimeUnit.SECONDS);
testTimeout(2L, TimeUnit.SECONDS);
}
static void testTimeout(final long timeout, final TimeUnit unit) throws Exception {
try (Server server = new Server()) {
AsynchronousSocketChannel ch = AsynchronousSocketChannel.open();
ch.connect(server.address()).get();
ByteBuffer dst = ByteBuffer.allocate(512);
final AtomicReference<Throwable> readException = new AtomicReference<Throwable>();
// this read should timeout if value is > 0
ch.read(dst, timeout, unit, null, new CompletionHandler<Integer,Void>() {
public void completed(Integer result, Void att) {
readException.set(new RuntimeException("Should not complete"));
}
public void failed(Throwable exc, Void att) {
readException.set(exc);
}
});
if (timeout > 0L) {
// wait for exception
while (readException.get() == null) {
Thread.sleep(100);
}
if (!(readException.get() instanceof InterruptedByTimeoutException))
throw new RuntimeException("InterruptedByTimeoutException expected");
// after a timeout then further reading should throw unspecified runtime exception
boolean exceptionThrown = false;
try {
ch.read(dst);
} catch (RuntimeException x) {
exceptionThrown = true;
}
if (!exceptionThrown)
throw new RuntimeException("RuntimeException expected after timeout.");
} else {
Thread.sleep(1000);
Throwable exc = readException.get();
if (exc != null)
throw new RuntimeException(exc);
}
final AtomicReference<Throwable> writeException = new AtomicReference<Throwable>();
// write bytes to fill socket buffer
ch.write(genBuffer(), timeout, unit, ch,
new CompletionHandler<Integer,AsynchronousSocketChannel>()
{
public void completed(Integer result, AsynchronousSocketChannel ch) {
ch.write(genBuffer(), timeout, unit, ch, this);
}
public void failed(Throwable exc, AsynchronousSocketChannel ch) {
writeException.set(exc);
}
});
if (timeout > 0) {
// wait for exception
while (writeException.get() == null) {
Thread.sleep(100);
}
if (!(writeException.get() instanceof InterruptedByTimeoutException))
throw new RuntimeException("InterruptedByTimeoutException expected");
// after a timeout then further writing should throw unspecified runtime exception
boolean exceptionThrown = false;
try {
ch.write(genBuffer());
} catch (RuntimeException x) {
exceptionThrown = true;
}
if (!exceptionThrown)
throw new RuntimeException("RuntimeException expected after timeout.");
} else {
Thread.sleep(1000);
Throwable exc = writeException.get();
if (exc != null)
throw new RuntimeException(exc);
}
// clean-up
server.accept().close();
ch.close();
}
}
// returns ByteBuffer with random bytes
static ByteBuffer genBuffer() {
int size = 1024 + rand.nextInt(16000);
byte[] buf = new byte[size];
rand.nextBytes(buf);
boolean useDirect = rand.nextBoolean();
if (useDirect) {
ByteBuffer bb = ByteBuffer.allocateDirect(buf.length);
bb.put(buf);
bb.flip();
return bb;
} else {
return ByteBuffer.wrap(buf);
}
}
// return ByteBuffer[] with random bytes
static ByteBuffer[] genBuffers(int max) {
int len = 1;
if (max > 1)
len += rand.nextInt(max);
ByteBuffer[] bufs = new ByteBuffer[len];
for (int i=0; i<len; i++)
bufs[i] = genBuffer();
return bufs;
}
// return random SocketAddress
static SocketAddress genSocketAddress() {
StringBuilder sb = new StringBuilder("10.");
sb.append(rand.nextInt(256));
sb.append('.');
sb.append(rand.nextInt(256));
sb.append('.');
sb.append(rand.nextInt(256));
InetAddress rh;
try {
rh = InetAddress.getByName(sb.toString());
} catch (UnknownHostException x) {
throw new InternalError("Should not happen");
}
return new InetSocketAddress(rh, rand.nextInt(65535)+1);
}
}
| |
package org.incode.module.document.dom.impl.paperclips;
import java.util.List;
import javax.inject.Inject;
import org.apache.isis.applib.annotation.DomainService;
import org.apache.isis.applib.annotation.NatureOfService;
import org.apache.isis.applib.annotation.Programmatic;
import org.apache.isis.applib.query.QueryDefault;
import org.apache.isis.applib.services.bookmark.Bookmark;
import org.apache.isis.applib.services.bookmark.BookmarkService;
import org.apache.isis.applib.services.eventbus.AbstractDomainEvent;
import org.apache.isis.applib.services.eventbus.ActionDomainEvent;
import org.apache.isis.applib.services.eventbus.EventBusService;
import org.apache.isis.applib.services.repository.RepositoryService;
import org.apache.isis.applib.services.xactn.TransactionService;
import org.incode.module.document.dom.impl.docs.Document;
import org.incode.module.document.dom.impl.docs.DocumentAbstract;
@DomainService(
nature = NatureOfService.DOMAIN,
repositoryFor = Paperclip.class
)
public class PaperclipRepository {
public String getId() {
return "incodeDocuments.PaperclipRepository";
}
//region > findByDocument (programmatic)
@Programmatic
public List<Paperclip> findByDocument(final DocumentAbstract document) {
return repositoryService.allMatches(
new QueryDefault<>(Paperclip.class,
"findByDocument",
"document", document));
}
//endregion
//region > findByAttachedTo (programmatic)
@Programmatic
public List<Paperclip> findByAttachedTo(final Object attachedTo) {
final Bookmark bookmark = bookmarkService.bookmarkFor(attachedTo);
if (bookmark == null) {
return null;
}
final String attachedToStr = bookmark.toString();
return repositoryService.allMatches(
new QueryDefault<>(Paperclip.class,
"findByAttachedTo",
"attachedToStr", attachedToStr));
}
//endregion
//region > findByAttachedToAndRoleName (programmatic)
@Programmatic
public List<Paperclip> findByAttachedToAndRoleName(
final Object attachedTo,
final String roleName) {
final Bookmark bookmark = bookmarkService.bookmarkFor(attachedTo);
if (bookmark == null) {
return null;
}
final String attachedToStr = bookmark.toString();
return repositoryService.allMatches(
new QueryDefault<>(Paperclip.class,
"findByAttachedToAndRoleName",
"attachedToStr", attachedToStr,
"roleName", roleName));
}
//endregion
//region > findByDocumentAndAttachedTo (programmatic)
@Programmatic
public List<Paperclip> findByDocumentAndAttachedTo(
final DocumentAbstract<?> document,
final Object attachedTo) {
final Bookmark bookmark = bookmarkService.bookmarkFor(attachedTo);
if (bookmark == null) {
return null;
}
final String attachedToStr = bookmark.toString();
return repositoryService.allMatches(
new QueryDefault<>(Paperclip.class,
"findByDocumentAndAttachedTo",
"document", document,
"attachedToStr", attachedToStr));
}
//endregion
//region > findByDocumentAndAttachedToAndRoleName (programmatic)
@Programmatic
public Paperclip findByDocumentAndAttachedToAndRoleName(
final DocumentAbstract<?> document,
final Object attachedTo,
final String roleName) {
final Bookmark bookmark = bookmarkService.bookmarkFor(attachedTo);
if (bookmark == null) {
return null;
}
final String attachedToStr = bookmark.toString();
return repositoryService.firstMatch(
new QueryDefault<>(Paperclip.class,
"findByDocumentAndAttachedToAndRoleName",
"document", document,
"attachedToStr", attachedToStr,
"roleName", roleName));
}
//endregion
//region > canAttach (programmatic)
@Programmatic
public boolean canAttach(
final Object candidateToAttachTo) {
final Class<? extends Paperclip> subtype = subtypeClassForElseNull(candidateToAttachTo);
return subtype != null;
}
//endregion
//region > attach (programmatic)
public static class PaperclipAttachDomainEvent
extends ActionDomainEvent<Paperclip> {}
/**
* This is an idempotent operation.
* Also fires domain event in order to update derived persisted property {@Link IncomingInvoice#barcode} when a new paperclip is attached
*/
@Programmatic
public Paperclip attach(
final DocumentAbstract documentAbstract,
final String roleName,
final Object attachTo) {
Paperclip paperclip = findByDocumentAndAttachedToAndRoleName(
documentAbstract, attachTo, roleName);
if (paperclip != null) {
return paperclip;
}
final Class<? extends Paperclip> subtype = subtypeClassFor(attachTo);
paperclip = repositoryService.instantiate(subtype);
paperclip.setDocument(documentAbstract);
paperclip.setRoleName(roleName);
if (documentAbstract instanceof Document) {
final Document document = (Document) documentAbstract;
paperclip.setDocumentCreatedAt(document.getCreatedAt());
}
if (!repositoryService.isPersistent(attachTo)) {
transactionService.flushTransaction();
}
final Bookmark bookmark = bookmarkService.bookmarkFor(attachTo);
paperclip.setAttachedTo(attachTo);
paperclip.setAttachedToStr(bookmark.toString());
repositoryService.persistAndFlush(paperclip);
// fire domain event
final PaperclipAttachDomainEvent event = new PaperclipAttachDomainEvent();
event.setEventPhase(AbstractDomainEvent.Phase.EXECUTED);
event.setSource(paperclip);
eventBusService.post(event);
return paperclip;
}
private Class<? extends Paperclip> subtypeClassFor(final Object toAttachTo) {
Class<? extends Paperclip> subtype = subtypeClassForElseNull(toAttachTo);
if (subtype != null) {
return subtype;
}
throw new IllegalStateException(String.format(
"No subtype of Paperclip was found for '%s'; implement the PaperclipRepository.SubtypeProvider SPI",
toAttachTo.getClass().getName()));
}
private Class<? extends Paperclip> subtypeClassForElseNull(final Object toAttachTo) {
Class<?> domainClass = toAttachTo.getClass();
for (SubtypeProvider subtypeProvider : subtypeProviders) {
Class<? extends Paperclip> subtype = subtypeProvider.subtypeFor(domainClass);
if (subtype != null) {
return subtype;
}
}
return null;
}
//endregion
//region > attach (programmatic)
@Programmatic
public <T> T paperclipAttaches(final Document document, Class<T> typeAttachedTo) {
final List<Paperclip> paperclips = findByDocument(document);
for (Paperclip paperclip : paperclips) {
final Object attachedTo = paperclip.getAttachedTo();
if (typeAttachedTo.isAssignableFrom(attachedTo.getClass())) {
return (T) attachedTo;
}
}
return null;
}
//endregion
//region > delete, deleteIfAttachedTo
@Programmatic
public void delete(final Paperclip paperclip) {
repositoryService.remove(paperclip);
}
public enum Policy {
/**
* Delete the paperclips
*/
PAPERCLIPS_ONLY,
/**
* Delete the paperclips, and also delete the documents if they are no longer attached to any objects
*/
PAPERCLIPS_AND_DOCUMENTS_IF_ORPHANED
}
@Programmatic
public void deleteIfAttachedTo(final Object domainObject) {
deleteIfAttachedTo(domainObject, Policy.PAPERCLIPS_ONLY);
}
@Programmatic
public void deleteIfAttachedTo(final Object domainObject, final Policy policy) {
final List<Paperclip> paperclips = findByAttachedTo(domainObject);
for (Paperclip paperclip : paperclips) {
final DocumentAbstract document = paperclip.getDocument();
delete(paperclip);
if (policy == Policy.PAPERCLIPS_AND_DOCUMENTS_IF_ORPHANED && orphaned(document, domainObject)) {
repositoryService.remove(document);
}
}
}
private boolean orphaned(final DocumentAbstract document, final Object attachedTo) {
final List<Paperclip> paperclips = findByDocument(document);
for (Paperclip paperclip : paperclips) {
if (paperclip.getAttachedTo() != attachedTo) {
// found a paperclip for this document attached to some other object
return false;
}
}
return true;
}
//endregion
//region > listAll - for testing only (programmatic)
@Programmatic
public List<Paperclip> listAll() {
return repositoryService.allInstances(Paperclip.class);
}
//endregion
//region > SubtypeProvider SPI
/**
* SPI to be implemented (as a {@link DomainService}) for any domain object to which {@link Paperclip}s can be
* attached.
*/
public interface SubtypeProvider {
/**
* @return the subtype of {@link Paperclip} to use to hold the (type-safe) paperclip of the domain object.
*/
@Programmatic
Class<? extends Paperclip> subtypeFor(Class<?> domainObject);
}
/**
* Convenience adapter to help implement the {@link SubtypeProvider} SPI; ignores the roleName passed into
* {@link SubtypeProvider#subtypeFor(Class)}, simply returns the class pair passed into constructor.
*/
public abstract static class SubtypeProviderAbstract implements SubtypeProvider {
private final Class<?> attachedToDomainType;
private final Class<? extends Paperclip> attachedToSubtype;
protected SubtypeProviderAbstract(final Class<?> attachedToDomainType, final Class<? extends Paperclip> attachedToSubtype) {
this.attachedToDomainType = attachedToDomainType;
this.attachedToSubtype = attachedToSubtype;
}
@Override
public Class<? extends Paperclip> subtypeFor(final Class<?> domainType) {
return attachedToDomainType.isAssignableFrom(domainType) ? attachedToSubtype : null;
}
}
//endregion
//region > injected services
@Inject
RepositoryService repositoryService;
@Inject
TransactionService transactionService;
@Inject
BookmarkService bookmarkService;
@Inject
List<SubtypeProvider> subtypeProviders;
@Inject
EventBusService eventBusService;
//endregion
}
| |
/*
* Copyright (c) 2000, 2013, Oracle and/or its affiliates. All rights reserved.
* ORACLE PROPRIETARY/CONFIDENTIAL. Use is subject to license terms.
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*/
package javax.swing;
import java.util.*;
import java.io.Serializable;
/**
* A <code>SpinnerModel</code> for sequences of <code>Date</code>s.
* The upper and lower bounds of the sequence are defined by properties called
* <code>start</code> and <code>end</code> and the size
* of the increase or decrease computed by the <code>nextValue</code>
* and <code>previousValue</code> methods is defined by a property
* called <code>calendarField</code>. The <code>start</code>
* and <code>end</code> properties can be <code>null</code> to
* indicate that the sequence has no lower or upper limit.
* <p>
* The value of the <code>calendarField</code> property must be one of the
* <code>java.util.Calendar</code> constants that specify a field
* within a <code>Calendar</code>. The <code>getNextValue</code>
* and <code>getPreviousValue</code>
* methods change the date forward or backwards by this amount.
* For example, if <code>calendarField</code> is <code>Calendar.DAY_OF_WEEK</code>,
* then <code>nextValue</code> produces a <code>Date</code> that's 24
* hours after the current <code>value</code>, and <code>previousValue</code>
* produces a <code>Date</code> that's 24 hours earlier.
* <p>
* The legal values for <code>calendarField</code> are:
* <ul>
* <li><code>Calendar.ERA</code>
* <li><code>Calendar.YEAR</code>
* <li><code>Calendar.MONTH</code>
* <li><code>Calendar.WEEK_OF_YEAR</code>
* <li><code>Calendar.WEEK_OF_MONTH</code>
* <li><code>Calendar.DAY_OF_MONTH</code>
* <li><code>Calendar.DAY_OF_YEAR</code>
* <li><code>Calendar.DAY_OF_WEEK</code>
* <li><code>Calendar.DAY_OF_WEEK_IN_MONTH</code>
* <li><code>Calendar.AM_PM</code>
* <li><code>Calendar.HOUR</code>
* <li><code>Calendar.HOUR_OF_DAY</code>
* <li><code>Calendar.MINUTE</code>
* <li><code>Calendar.SECOND</code>
* <li><code>Calendar.MILLISECOND</code>
* </ul>
* However some UIs may set the calendarField before committing the edit
* to spin the field under the cursor. If you only want one field to
* spin you can subclass and ignore the setCalendarField calls.
* <p>
* This model inherits a <code>ChangeListener</code>. The
* <code>ChangeListeners</code> are notified whenever the models
* <code>value</code>, <code>calendarField</code>,
* <code>start</code>, or <code>end</code> properties changes.
*
* @see JSpinner
* @see SpinnerModel
* @see AbstractSpinnerModel
* @see SpinnerListModel
* @see SpinnerNumberModel
* @see Calendar#add
*
* @author Hans Muller
* @since 1.4
*/
public class SpinnerDateModel extends AbstractSpinnerModel implements Serializable
{
private Comparable start, end;
private Calendar value;
private int calendarField;
private boolean calendarFieldOK(int calendarField) {
switch(calendarField) {
case Calendar.ERA:
case Calendar.YEAR:
case Calendar.MONTH:
case Calendar.WEEK_OF_YEAR:
case Calendar.WEEK_OF_MONTH:
case Calendar.DAY_OF_MONTH:
case Calendar.DAY_OF_YEAR:
case Calendar.DAY_OF_WEEK:
case Calendar.DAY_OF_WEEK_IN_MONTH:
case Calendar.AM_PM:
case Calendar.HOUR:
case Calendar.HOUR_OF_DAY:
case Calendar.MINUTE:
case Calendar.SECOND:
case Calendar.MILLISECOND:
return true;
default:
return false;
}
}
/**
* Creates a <code>SpinnerDateModel</code> that represents a sequence of dates
* between <code>start</code> and <code>end</code>. The
* <code>nextValue</code> and <code>previousValue</code> methods
* compute elements of the sequence by advancing or reversing
* the current date <code>value</code> by the
* <code>calendarField</code> time unit. For a precise description
* of what it means to increment or decrement a <code>Calendar</code>
* <code>field</code>, see the <code>add</code> method in
* <code>java.util.Calendar</code>.
* <p>
* The <code>start</code> and <code>end</code> parameters can be
* <code>null</code> to indicate that the range doesn't have an
* upper or lower bound. If <code>value</code> or
* <code>calendarField</code> is <code>null</code>, or if both
* <code>start</code> and <code>end</code> are specified and
* <code>minimum > maximum</code> then an
* <code>IllegalArgumentException</code> is thrown.
* Similarly if <code>(minimum <= value <= maximum)</code> is false,
* an IllegalArgumentException is thrown.
*
* @param value the current (non <code>null</code>) value of the model
* @param start the first date in the sequence or <code>null</code>
* @param end the last date in the sequence or <code>null</code>
* @param calendarField one of
* <ul>
* <li><code>Calendar.ERA</code>
* <li><code>Calendar.YEAR</code>
* <li><code>Calendar.MONTH</code>
* <li><code>Calendar.WEEK_OF_YEAR</code>
* <li><code>Calendar.WEEK_OF_MONTH</code>
* <li><code>Calendar.DAY_OF_MONTH</code>
* <li><code>Calendar.DAY_OF_YEAR</code>
* <li><code>Calendar.DAY_OF_WEEK</code>
* <li><code>Calendar.DAY_OF_WEEK_IN_MONTH</code>
* <li><code>Calendar.AM_PM</code>
* <li><code>Calendar.HOUR</code>
* <li><code>Calendar.HOUR_OF_DAY</code>
* <li><code>Calendar.MINUTE</code>
* <li><code>Calendar.SECOND</code>
* <li><code>Calendar.MILLISECOND</code>
* </ul>
*
* @throws IllegalArgumentException if <code>value</code> or
* <code>calendarField</code> are <code>null</code>,
* if <code>calendarField</code> isn't valid,
* or if the following expression is
* false: <code>(start <= value <= end)</code>.
*
* @see Calendar#add
* @see #setValue
* @see #setStart
* @see #setEnd
* @see #setCalendarField
*/
public SpinnerDateModel(Date value, Comparable start, Comparable end, int calendarField) {
if (value == null) {
throw new IllegalArgumentException("value is null");
}
if (!calendarFieldOK(calendarField)) {
throw new IllegalArgumentException("invalid calendarField");
}
if (!(((start == null) || (start.compareTo(value) <= 0)) &&
((end == null) || (end.compareTo(value) >= 0)))) {
throw new IllegalArgumentException("(start <= value <= end) is false");
}
this.value = Calendar.getInstance();
this.start = start;
this.end = end;
this.calendarField = calendarField;
this.value.setTime(value);
}
/**
* Constructs a <code>SpinnerDateModel</code> whose initial
* <code>value</code> is the current date, <code>calendarField</code>
* is equal to <code>Calendar.DAY_OF_MONTH</code>, and for which
* there are no <code>start</code>/<code>end</code> limits.
*/
public SpinnerDateModel() {
this(new Date(), null, null, Calendar.DAY_OF_MONTH);
}
/**
* Changes the lower limit for Dates in this sequence.
* If <code>start</code> is <code>null</code>,
* then there is no lower limit. No bounds checking is done here:
* the new start value may invalidate the
* <code>(start <= value <= end)</code>
* invariant enforced by the constructors. This is to simplify updating
* the model. Naturally one should ensure that the invariant is true
* before calling the <code>nextValue</code>, <code>previousValue</code>,
* or <code>setValue</code> methods.
* <p>
* Typically this property is a <code>Date</code> however it's possible to use
* a <code>Comparable</code> with a <code>compareTo</code> method for Dates.
* For example <code>start</code> might be an instance of a class like this:
* <pre>
* MyStartDate implements Comparable {
* long t = 12345;
* public int compareTo(Date d) {
* return (t < d.getTime() ? -1 : (t == d.getTime() ? 0 : 1));
* }
* public int compareTo(Object o) {
* return compareTo((Date)o);
* }
* }
* </pre>
* Note that the above example will throw a <code>ClassCastException</code>
* if the <code>Object</code> passed to <code>compareTo(Object)</code>
* is not a <code>Date</code>.
* <p>
* This method fires a <code>ChangeEvent</code> if the
* <code>start</code> has changed.
*
* @param start defines the first date in the sequence
* @see #getStart
* @see #setEnd
* @see #addChangeListener
*/
public void setStart(Comparable start) {
if ((start == null) ? (this.start != null) : !start.equals(this.start)) {
this.start = start;
fireStateChanged();
}
}
/**
* Returns the first <code>Date</code> in the sequence.
*
* @return the value of the <code>start</code> property
* @see #setStart
*/
public Comparable getStart() {
return start;
}
/**
* Changes the upper limit for <code>Date</code>s in this sequence.
* If <code>start</code> is <code>null</code>, then there is no upper
* limit. No bounds checking is done here: the new
* start value may invalidate the <code>(start <= value <= end)</code>
* invariant enforced by the constructors. This is to simplify updating
* the model. Naturally, one should ensure that the invariant is true
* before calling the <code>nextValue</code>, <code>previousValue</code>,
* or <code>setValue</code> methods.
* <p>
* Typically this property is a <code>Date</code> however it's possible to use
* <code>Comparable</code> with a <code>compareTo</code> method for
* <code>Date</code>s. See <code>setStart</code> for an example.
* <p>
* This method fires a <code>ChangeEvent</code> if the <code>end</code>
* has changed.
*
* @param end defines the last date in the sequence
* @see #getEnd
* @see #setStart
* @see #addChangeListener
*/
public void setEnd(Comparable end) {
if ((end == null) ? (this.end != null) : !end.equals(this.end)) {
this.end = end;
fireStateChanged();
}
}
/**
* Returns the last <code>Date</code> in the sequence.
*
* @return the value of the <code>end</code> property
* @see #setEnd
*/
public Comparable getEnd() {
return end;
}
/**
* Changes the size of the date value change computed
* by the <code>nextValue</code> and <code>previousValue</code> methods.
* The <code>calendarField</code> parameter must be one of the
* <code>Calendar</code> field constants like <code>Calendar.MONTH</code>
* or <code>Calendar.MINUTE</code>.
* The <code>nextValue</code> and <code>previousValue</code> methods
* simply move the specified <code>Calendar</code> field forward or backward
* by one unit with the <code>Calendar.add</code> method.
* You should use this method with care as some UIs may set the
* calendarField before committing the edit to spin the field under
* the cursor. If you only want one field to spin you can subclass
* and ignore the setCalendarField calls.
*
* @param calendarField one of
* <ul>
* <li><code>Calendar.ERA</code>
* <li><code>Calendar.YEAR</code>
* <li><code>Calendar.MONTH</code>
* <li><code>Calendar.WEEK_OF_YEAR</code>
* <li><code>Calendar.WEEK_OF_MONTH</code>
* <li><code>Calendar.DAY_OF_MONTH</code>
* <li><code>Calendar.DAY_OF_YEAR</code>
* <li><code>Calendar.DAY_OF_WEEK</code>
* <li><code>Calendar.DAY_OF_WEEK_IN_MONTH</code>
* <li><code>Calendar.AM_PM</code>
* <li><code>Calendar.HOUR</code>
* <li><code>Calendar.HOUR_OF_DAY</code>
* <li><code>Calendar.MINUTE</code>
* <li><code>Calendar.SECOND</code>
* <li><code>Calendar.MILLISECOND</code>
* </ul>
* <p>
* This method fires a <code>ChangeEvent</code> if the
* <code>calendarField</code> has changed.
*
* @see #getCalendarField
* @see #getNextValue
* @see #getPreviousValue
* @see Calendar#add
* @see #addChangeListener
*/
public void setCalendarField(int calendarField) {
if (!calendarFieldOK(calendarField)) {
throw new IllegalArgumentException("invalid calendarField");
}
if (calendarField != this.calendarField) {
this.calendarField = calendarField;
fireStateChanged();
}
}
/**
* Returns the <code>Calendar</code> field that is added to or subtracted from
* by the <code>nextValue</code> and <code>previousValue</code> methods.
*
* @return the value of the <code>calendarField</code> property
* @see #setCalendarField
*/
public int getCalendarField() {
return calendarField;
}
/**
* Returns the next <code>Date</code> in the sequence, or <code>null</code> if
* the next date is after <code>end</code>.
*
* @return the next <code>Date</code> in the sequence, or <code>null</code> if
* the next date is after <code>end</code>.
*
* @see SpinnerModel#getNextValue
* @see #getPreviousValue
* @see #setCalendarField
*/
public Object getNextValue() {
Calendar cal = Calendar.getInstance();
cal.setTime(value.getTime());
cal.add(calendarField, 1);
Date next = cal.getTime();
return ((end == null) || (end.compareTo(next) >= 0)) ? next : null;
}
/**
* Returns the previous <code>Date</code> in the sequence, or <code>null</code>
* if the previous date is before <code>start</code>.
*
* @return the previous <code>Date</code> in the sequence, or
* <code>null</code> if the previous date
* is before <code>start</code>
*
* @see SpinnerModel#getPreviousValue
* @see #getNextValue
* @see #setCalendarField
*/
public Object getPreviousValue() {
Calendar cal = Calendar.getInstance();
cal.setTime(value.getTime());
cal.add(calendarField, -1);
Date prev = cal.getTime();
return ((start == null) || (start.compareTo(prev) <= 0)) ? prev : null;
}
/**
* Returns the current element in this sequence of <code>Date</code>s.
* This method is equivalent to <code>(Date)getValue</code>.
*
* @return the <code>value</code> property
* @see #setValue
*/
public Date getDate() {
return value.getTime();
}
/**
* Returns the current element in this sequence of <code>Date</code>s.
*
* @return the <code>value</code> property
* @see #setValue
* @see #getDate
*/
public Object getValue() {
return value.getTime();
}
/**
* Sets the current <code>Date</code> for this sequence.
* If <code>value</code> is <code>null</code>,
* an <code>IllegalArgumentException</code> is thrown. No bounds
* checking is done here:
* the new value may invalidate the <code>(start <= value < end)</code>
* invariant enforced by the constructors. Naturally, one should ensure
* that the <code>(start <= value <= maximum)</code> invariant is true
* before calling the <code>nextValue</code>, <code>previousValue</code>,
* or <code>setValue</code> methods.
* <p>
* This method fires a <code>ChangeEvent</code> if the
* <code>value</code> has changed.
*
* @param value the current (non <code>null</code>)
* <code>Date</code> for this sequence
* @throws IllegalArgumentException if value is <code>null</code>
* or not a <code>Date</code>
* @see #getDate
* @see #getValue
* @see #addChangeListener
*/
public void setValue(Object value) {
if ((value == null) || !(value instanceof Date)) {
throw new IllegalArgumentException("illegal value");
}
if (!value.equals(this.value.getTime())) {
this.value.setTime((Date)value);
fireStateChanged();
}
}
}
| |
// Copyright 2000-2021 JetBrains s.r.o. and contributors. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package org.jetbrains.plugins.groovy.dsl;
import com.intellij.ide.impl.TrustedProjects;
import com.intellij.openapi.application.Application;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.fileEditor.impl.LoadTextUtil;
import com.intellij.openapi.fileTypes.FileTypeRegistry;
import com.intellij.openapi.progress.ProcessCanceledException;
import com.intellij.openapi.progress.ProgressManager;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.project.ProjectManager;
import com.intellij.openapi.roots.ProjectFileIndex;
import com.intellij.openapi.roots.ProjectRootManager;
import com.intellij.openapi.util.*;
import com.intellij.openapi.util.io.FileUtil;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.openapi.vfs.VirtualFileManager;
import com.intellij.openapi.vfs.newvfs.BulkFileListener;
import com.intellij.openapi.vfs.newvfs.events.VFileContentChangeEvent;
import com.intellij.openapi.vfs.newvfs.events.VFileEvent;
import com.intellij.psi.*;
import com.intellij.psi.impl.PsiManagerEx;
import com.intellij.psi.search.FileTypeIndex;
import com.intellij.psi.search.GlobalSearchScope;
import com.intellij.psi.util.CachedValue;
import com.intellij.psi.util.CachedValueProvider;
import com.intellij.psi.util.CachedValuesManager;
import com.intellij.psi.util.PsiModificationTracker;
import com.intellij.util.PairProcessor;
import com.intellij.util.PathUtil;
import com.intellij.util.concurrency.AppExecutorUtil;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.util.containers.MultiMap;
import com.intellij.util.io.URLUtil;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.jetbrains.plugins.groovy.GdslFileType;
import org.jetbrains.plugins.groovy.dsl.DslActivationStatus.Status;
import org.jetbrains.plugins.groovy.dsl.holders.CustomMembersHolder;
import org.jetbrains.plugins.groovy.dsl.holders.OriginAwareMembersHolder;
import org.jetbrains.plugins.groovy.lang.psi.GroovyFile;
import org.jetbrains.plugins.groovy.lang.psi.api.statements.blocks.GrClosableBlock;
import org.jetbrains.plugins.groovy.lang.psi.api.statements.typedef.GrTypeDefinition;
import org.jetbrains.plugins.groovy.lang.psi.impl.statements.expressions.TypesUtil;
import java.io.File;
import java.util.*;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.TimeUnit;
import java.util.regex.Pattern;
/**
* @author peter
*/
public final class GroovyDslFileIndex {
private static final Key<Pair<GroovyDslExecutor, Long>> CACHED_EXECUTOR = Key.create("CachedGdslExecutor");
private static final Key<CachedValue<List<GroovyDslScript>>> SCRIPTS_CACHE = Key.create("GdslScriptCache");
private static final Logger LOG = Logger.getInstance(GroovyDslFileIndex.class);
private static final MultiMap<String, LinkedBlockingQueue<Pair<VirtualFile, GroovyDslExecutor>>> filesInProcessing =
MultiMap.createConcurrent();
private static final ExecutorService ourPool = AppExecutorUtil.createBoundedApplicationPoolExecutor("GroovyDSLIndex Pool", 4);
private GroovyDslFileIndex() {}
@Nullable
@NlsSafe
public static String getError(VirtualFile file) {
DslActivationStatus.Entry info = DslActivationStatus.getInstance().getGdslFileInfo(file);
return info == null ? null : info.error;
}
public static boolean isActivated(@NotNull VirtualFile file) {
return getStatus(file) == Status.ACTIVE;
}
public static void activate(final VirtualFile vfile) {
setStatusAndError(vfile, Status.ACTIVE, null);
clearScriptCache();
}
public static Status getStatus(@NotNull VirtualFile file) {
DslActivationStatus.Entry info = DslActivationStatus.getInstance().getGdslFileInfo(file);
return info == null ? Status.ACTIVE : info.status;
}
private static void clearScriptCache() {
Application app = ApplicationManager.getApplication();
app.invokeLater(() -> {
for (Project project : ProjectManager.getInstance().getOpenProjects()) {
project.putUserData(SCRIPTS_CACHE, null);
PsiManagerEx.getInstanceEx(project).dropPsiCaches();
}
}, app.getDisposed());
}
public static void disableFile(@NotNull VirtualFile vfile, @NotNull Status status, @NlsSafe @Nullable String error) {
assert status != Status.ACTIVE;
setStatusAndError(vfile, status, error);
vfile.putUserData(CACHED_EXECUTOR, null);
clearScriptCache();
}
private static void setStatusAndError(@NotNull VirtualFile vfile, @NotNull Status status, @NlsSafe @Nullable String error) {
DslActivationStatus.Entry entry = DslActivationStatus.getInstance().getGdslFileInfoOrCreate(vfile);
entry.status = status;
entry.error = error;
}
@Nullable
private static GroovyDslExecutor getCachedExecutor(@NotNull final VirtualFile file, final long stamp) {
final Pair<GroovyDslExecutor, Long> pair = file.getUserData(CACHED_EXECUTOR);
if (pair == null || pair.second.longValue() != stamp) {
return null;
}
return pair.first;
}
@Nullable
public static PsiClassType processScriptSuperClasses(@NotNull GroovyFile scriptFile) {
if (!scriptFile.isScript()) return null;
final VirtualFile virtualFile = scriptFile.getOriginalFile().getVirtualFile();
if (virtualFile == null) return null;
final String filePath = virtualFile.getPath();
List<Trinity<String, String, GroovyDslScript>> supers = new ArrayList<>();
final Project project = scriptFile.getProject();
for (GroovyDslScript script : getDslScripts(project)) {
final MultiMap staticInfo = script.getStaticInfo();
//noinspection unchecked
final Collection infos = staticInfo.get("scriptSuperClass");
for (Object info : infos) {
if (info instanceof Map) {
@NonNls final Map map = (Map)info;
final Object _pattern = map.get("pattern");
final Object _superClass = map.get("superClass");
if (_pattern instanceof String && _superClass instanceof String) {
final String pattern = (String)_pattern;
final String superClass = (String)_superClass;
try {
if (Pattern.matches(".*" + pattern, filePath)) {
supers.add(Trinity.create(superClass, pattern, script));
}
}
catch (RuntimeException e) {
script.handleDslError(e);
}
}
}
}
}
if (!supers.isEmpty()) {
final String className = supers.get(0).first;
final GroovyDslScript script = supers.get(0).third;
try {
return TypesUtil.createTypeByFQClassName(className, scriptFile);
}
catch (ProcessCanceledException e) {
throw e;
}
catch (RuntimeException e) {
script.handleDslError(e);
return null;
}
}
/*else if (supers.size() > 1) {
StringBuilder buffer = new StringBuilder("Several script super class patterns match file ").append(filePath).append(". <p> ");
for (Trinity<String, String, GroovyDslScript> aSuper : supers) {
buffer.append(aSuper.third.getFilePath()).append(" ").append(aSuper.second).append('\n');
}
NOTIFICATION_GROUP.createNotification("DSL script execution error", buffer.toString(), NotificationType.ERROR, null).notify(project);
return null;
}*/
else {
return null;
}
}
public static boolean processExecutors(
@NotNull PsiClassType psiType,
@NotNull PsiElement place,
@NotNull PairProcessor<? super CustomMembersHolder, ? super GroovyClassDescriptor> processor
) {
if (insideAnnotation(place)) {
// Basic filter, all DSL contexts are applicable for reference expressions only
return true;
}
final PsiFile placeFile = place.getContainingFile().getOriginalFile();
final PsiClass psiClass = psiType.resolve();
if (psiClass == null) {
return true;
}
for (GroovyDslScript script : getDslScripts(placeFile.getProject())) {
GroovyClassDescriptor descriptor = new GroovyClassDescriptor(psiType, psiClass, place, placeFile);
CustomMembersHolder holder = script.processExecutor(descriptor);
VirtualFile origin = script.getFile();
if (origin != null) {
holder = new OriginAwareMembersHolder(origin, holder);
}
if (!processor.process(holder, descriptor)) {
return false;
}
}
return true;
}
private static boolean insideAnnotation(@Nullable PsiElement place) {
while (place != null) {
if (place instanceof PsiAnnotation) return true;
if (place instanceof GrClosableBlock ||
place instanceof GrTypeDefinition ||
place instanceof PsiFile) {
return false;
}
place = place.getParent();
}
return false;
}
private static List<VirtualFile> getGdslFiles(final Project project) {
final List<VirtualFile> result = new ArrayList<>(bundledGdslFiles.getValue());
if (TrustedProjects.isTrusted(project)) {
result.addAll(getProjectGdslFiles(project));
}
return result;
}
private static final ClearableLazyValue<List<VirtualFile>> bundledGdslFiles = ClearableLazyValue.create(() -> {
final List<VirtualFile> result = new ArrayList<>();
for (File file : getBundledScriptFolders()) {
if (file.exists()) {
File[] children = file.listFiles();
if (children != null) {
for (File child : children) {
final String fileName = child.getName();
if (fileName.endsWith(".gdsl")) {
String path = FileUtil.toSystemIndependentName(child.getPath());
String url = VirtualFileManager.constructUrl(URLUtil.FILE_PROTOCOL, path);
ContainerUtil.addIfNotNull(result, VirtualFileManager.getInstance().refreshAndFindFileByUrl(url));
}
}
}
}
}
return result;
});
static {
GdslScriptProvider.EP_NAME.addChangeListener(() -> {
bundledGdslFiles.drop();
}, null);
}
static List<VirtualFile> getProjectGdslFiles(Project project) {
final List<VirtualFile> result = new ArrayList<>();
final ProjectFileIndex fileIndex = ProjectRootManager.getInstance(project).getFileIndex();
final GlobalSearchScope scope = GlobalSearchScope.allScope(project);
for (VirtualFile vfile : FileTypeIndex.getFiles(GdslFileType.INSTANCE, scope)) {
if (FileTypeRegistry.getInstance().getFileTypeByFileName(vfile.getNameSequence()) != GdslFileType.INSTANCE) {
continue;
}
if (!vfile.isValid()) {
continue;
}
if (fileIndex.isInLibrarySource(vfile)) {
continue;
}
if (!fileIndex.isInLibraryClasses(vfile)) {
if (!fileIndex.isInSourceContent(vfile) || !isActivated(vfile)) {
continue;
}
}
result.add(vfile);
}
return result;
}
@NotNull
private static Set<File> getBundledScriptFolders() {
final GdslScriptProvider[] extensions = GdslScriptProvider.EP_NAME.getExtensions();
final Set<Class<?>> classes = new HashSet<>(ContainerUtil.map(extensions, GdslScriptProvider::getClass));
classes.add(GdslScriptProvider.class); // for default extension
Set<File> scriptFolders = new LinkedHashSet<>();
for (Class<?> aClass : classes) {
File jarPath = new File(PathUtil.getJarPathForClass(aClass));
if (jarPath.isFile()) {
jarPath = jarPath.getParentFile();
}
scriptFolders.add(new File(jarPath, "standardDsls"));
}
return scriptFolders;
}
private static List<GroovyDslScript> getDslScripts(final Project project) {
return CachedValuesManager.getManager(project).getCachedValue(project, SCRIPTS_CACHE, () -> {
if (GdslUtil.ourGdslStopped) {
return CachedValueProvider.Result.create(Collections.emptyList(), ModificationTracker.NEVER_CHANGED);
}
// eagerly initialize some services used by background gdsl parsing threads
// because service init requires a read action
// and there could be a deadlock with a write action waiting already on EDT
// if current thread is inside a non-cancellable read action
GdslScriptBase.getIdeaVersion();
DslActivationStatus.getInstance();
int count = 0;
List<GroovyDslScript> result = new ArrayList<>();
final LinkedBlockingQueue<Pair<VirtualFile, GroovyDslExecutor>> queue =
new LinkedBlockingQueue<>();
for (VirtualFile vfile : getGdslFiles(project)) {
final long stamp = vfile.getModificationStamp();
final GroovyDslExecutor cached = getCachedExecutor(vfile, stamp);
if (cached == null) {
scheduleParsing(queue, project, vfile, stamp, LoadTextUtil.loadText(vfile).toString());
count++;
}
else {
result.add(new GroovyDslScript(project, vfile, cached, vfile.getPath()));
}
}
try {
while (count > 0 && !GdslUtil.ourGdslStopped) {
ProgressManager.checkCanceled();
final Pair<VirtualFile, GroovyDslExecutor> pair = queue.poll(20, TimeUnit.MILLISECONDS);
if (pair != null) {
count--;
if (pair.second != null) {
result.add(new GroovyDslScript(project, pair.first, pair.second, pair.first.getPath()));
}
}
}
}
catch (InterruptedException e) {
LOG.error(e);
}
return CachedValueProvider.Result.create(result, PsiModificationTracker.MODIFICATION_COUNT, ProjectRootManager.getInstance(project));
}, false);
}
private static void scheduleParsing(final LinkedBlockingQueue<Pair<VirtualFile, GroovyDslExecutor>> queue,
final Project project,
final VirtualFile vfile,
final long stamp,
final String text) {
final String fileUrl = vfile.getUrl();
final Runnable parseScript = () -> {
GroovyDslExecutor executor = getCachedExecutor(vfile, stamp);
try {
if (executor == null && isActivated(vfile)) {
executor = createExecutor(text, vfile, project);
// executor is not only time-consuming to create, but also takes some PermGenSpace
// => we can't afford garbage-collecting it together with PsiFile
// => cache globally by file instance
vfile.putUserData(CACHED_EXECUTOR, Pair.create(executor, stamp));
if (executor != null) {
setStatusAndError(vfile, Status.ACTIVE, null);
}
}
}
finally {
// access to our MultiMap should be synchronized
synchronized (filesInProcessing) {
// put evaluated executor to all queues
for (LinkedBlockingQueue<Pair<VirtualFile, GroovyDslExecutor>> queue1 : filesInProcessing.remove(fileUrl)) {
queue1.offer(Pair.create(vfile, executor));
}
}
}
};
synchronized (filesInProcessing) { //ensure that only one thread calculates dsl executor
final boolean isNewRequest = !filesInProcessing.containsKey(fileUrl);
filesInProcessing.putValue(fileUrl, queue);
if (isNewRequest) {
ourPool.execute(parseScript);
}
}
}
@Nullable
private static GroovyDslExecutor createExecutor(String text, VirtualFile vfile, final Project project) {
if (GdslUtil.ourGdslStopped) {
return null;
}
try {
return GroovyDslExecutor.createAndRunExecutor(text, vfile.getName());
}
catch (final Throwable e) {
if (project.isDisposed()) {
LOG.error(e);
return null;
}
if (ApplicationManager.getApplication().isUnitTestMode()) {
LOG.error(e);
return null;
}
DslErrorReporter.getInstance().invokeDslErrorPopup(e, project, vfile);
//noinspection InstanceofCatchParameter
if (e instanceof OutOfMemoryError) {
GdslUtil.stopGdsl();
throw (Error)e;
}
//noinspection InstanceofCatchParameter
if (e instanceof NoClassDefFoundError) {
GdslUtil.stopGdsl();
throw (NoClassDefFoundError)e;
}
return null;
}
}
public static class MyFileListener implements BulkFileListener {
@Override
public void after(@NotNull List<? extends @NotNull VFileEvent> events) {
for (VFileEvent event : events) {
if (event instanceof VFileContentChangeEvent && !event.isFromRefresh()) {
VirtualFile file = event.getFile();
if (file == null || !GdslUtil.GDSL_FILTER.value(file) || getStatus(file) != Status.ACTIVE) {
continue;
}
disableFile(file, Status.MODIFIED, null);
}
}
}
}
}
| |
/*
* Copyright 2017 ThoughtWorks, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.thoughtworks.go.config;
import com.rits.cloning.Cloner;
import com.thoughtworks.go.config.materials.MaterialConfigs;
import com.thoughtworks.go.config.materials.ScmMaterialConfig;
import com.thoughtworks.go.config.materials.git.GitMaterialConfig;
import com.thoughtworks.go.config.materials.mercurial.HgMaterialConfig;
import com.thoughtworks.go.config.parts.XmlPartialConfigProvider;
import com.thoughtworks.go.config.remote.ConfigRepoConfig;
import com.thoughtworks.go.config.remote.PartialConfig;
import com.thoughtworks.go.config.remote.RepoConfigOrigin;
import com.thoughtworks.go.config.update.CreatePipelineConfigCommand;
import com.thoughtworks.go.config.update.FullConfigUpdateCommand;
import com.thoughtworks.go.config.validation.GoConfigValidity;
import com.thoughtworks.go.domain.materials.MaterialConfig;
import com.thoughtworks.go.helper.*;
import com.thoughtworks.go.listener.ConfigChangedListener;
import com.thoughtworks.go.server.domain.Username;
import com.thoughtworks.go.server.service.GoConfigService;
import com.thoughtworks.go.server.service.result.DefaultLocalizedOperationResult;
import com.thoughtworks.go.serverhealth.*;
import com.thoughtworks.go.service.ConfigRepository;
import com.thoughtworks.go.util.*;
import com.thoughtworks.go.util.command.CommandLine;
import com.thoughtworks.go.util.command.ConsoleResult;
import org.apache.commons.io.FileUtils;
import org.eclipse.jgit.api.errors.GitAPIException;
import org.hamcrest.Matchers;
import org.hamcrest.core.Is;
import org.junit.*;
import org.junit.rules.ExpectedException;
import org.junit.rules.TemporaryFolder;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.core.io.ClassPathResource;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
import java.io.File;
import java.io.IOException;
import java.util.*;
import java.util.function.Predicate;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import java.util.stream.Collectors;
import static com.thoughtworks.go.helper.ConfigFileFixture.DEFAULT_XML_WITH_2_AGENTS;
import static java.nio.charset.StandardCharsets.UTF_8;
import static java.util.Arrays.asList;
import static org.hamcrest.Matchers.not;
import static org.hamcrest.core.Is.is;
import static org.hamcrest.core.IsNull.nullValue;
import static org.hamcrest.core.StringContains.containsString;
import static org.junit.Assert.*;
@RunWith(SpringJUnit4ClassRunner.class)
@ContextConfiguration(locations = {
"classpath:WEB-INF/applicationContext-global.xml",
"classpath:WEB-INF/applicationContext-dataLocalAccess.xml",
"classpath:WEB-INF/applicationContext-acegi-security.xml",
"classpath:testPropertyConfigurer.xml"
})
public class CachedGoConfigIntegrationTest {
@Autowired
private GoConfigWatchList configWatchList;
@Autowired
private GoRepoConfigDataSource repoConfigDataSource;
@Autowired
private CachedGoConfig cachedGoConfig;
private GoConfigFileHelper configHelper;
@Autowired
private ServerHealthService serverHealthService;
@Autowired
private GoConfigService goConfigService;
@Autowired
private GoConfigDao goConfigDao;
@Autowired
private CachedGoPartials cachedGoPartials;
@Autowired
private GoPartialConfig goPartialConfig;
@Autowired
private GoFileConfigDataSource goFileConfigDataSource;
@Autowired
private ConfigRepository configRepository;
@Rule
public final TemporaryFolder temporaryFolder = new TemporaryFolder();
private String latestCommit;
private ConfigRepoConfig configRepo;
private File externalConfigRepo;
@Rule
public ExpectedException thrown = ExpectedException.none();
@Before
public void setUp() throws Exception {
configHelper = new GoConfigFileHelper(DEFAULT_XML_WITH_2_AGENTS);
configHelper.usingCruiseConfigDao(goConfigDao).initializeConfigFile();
configHelper.onSetUp();
externalConfigRepo = temporaryFolder.newFolder();
latestCommit = setupExternalConfigRepo(externalConfigRepo);
configHelper.addConfigRepo(new ConfigRepoConfig(new GitMaterialConfig(externalConfigRepo.getAbsolutePath()), XmlPartialConfigProvider.providerName));
goConfigService.forceNotifyListeners();
configRepo = configWatchList.getCurrentConfigRepos().get(0);
cachedGoPartials.clear();
configHelper.addAgent("hostname1", "uuid1");
}
@After
public void tearDown() throws Exception {
cachedGoPartials.clear();
for (PartialConfig partial : cachedGoPartials.lastValidPartials()) {
assertThat(ErrorCollector.getAllErrors(partial).isEmpty(), is(true));
}
for (PartialConfig partial : cachedGoPartials.lastKnownPartials()) {
assertThat(ErrorCollector.getAllErrors(partial).isEmpty(), is(true));
}
}
@Test
public void shouldRecoverFromDeepConfigRepoReferencesBug1901When2Repos() throws Exception {
// pipeline references are like this: pipe1 -> downstream
File downstreamExternalConfigRepo = temporaryFolder.newFolder();
/*here is a pipeline 'downstream' with material dependency on 'pipe1' in other repository*/
String downstreamLatestCommit = setupExternalConfigRepo(downstreamExternalConfigRepo, "external_git_config_repo_referencing_first");
configHelper.addConfigRepo(new ConfigRepoConfig(new GitMaterialConfig(downstreamExternalConfigRepo.getAbsolutePath()), "gocd-xml"));
goConfigService.forceNotifyListeners();//TODO what if this is not called?
ConfigRepoConfig downstreamConfigRepo = configWatchList.getCurrentConfigRepos().get(1);
assertThat(configWatchList.getCurrentConfigRepos().size(), is(2));
// And unluckily downstream gets parsed first
repoConfigDataSource.onCheckoutComplete(downstreamConfigRepo.getMaterialConfig(), downstreamExternalConfigRepo, downstreamLatestCommit);
// So parsing fails and proper message is shown:
List<ServerHealthState> messageForInvalidMerge = serverHealthService.filterByScope(HealthStateScope.forPartialConfigRepo(downstreamConfigRepo));
assertThat(messageForInvalidMerge.isEmpty(), is(false));
assertThat(messageForInvalidMerge.get(0).getDescription(), containsString("tries to fetch artifact from pipeline "pipe1""));
// and current config is still old
assertThat(goConfigService.hasPipelineNamed(new CaseInsensitiveString("downstream")), is(false));
assertThat(cachedGoPartials.lastKnownPartials().size(), is(1));
assertThat(cachedGoPartials.lastValidPartials().size(), is(0));
//here downstream partial is waiting to be merged
assertThat(cachedGoPartials.lastKnownPartials().get(0).getGroups().get(0).hasPipeline(new CaseInsensitiveString("downstream")), is(true));
// Finally upstream config repository is parsed
repoConfigDataSource.onCheckoutComplete(configRepo.getMaterialConfig(), externalConfigRepo, latestCommit);
// now server should be healthy and contain all pipelines
assertThat(serverHealthService.filterByScope(HealthStateScope.forPartialConfigRepo(configRepo)).isEmpty(), is(true));
assertThat(serverHealthService.filterByScope(HealthStateScope.forPartialConfigRepo(downstreamConfigRepo)).isEmpty(), is(true));
assertThat(cachedGoConfig.currentConfig().hasPipelineNamed(new CaseInsensitiveString("pipe1")), is(true));
assertThat(cachedGoConfig.currentConfig().hasPipelineNamed(new CaseInsensitiveString("downstream")), is(true));
}
@Test
public void shouldRecoverFromDeepConfigRepoReferencesBug1901When3Repos() throws Exception {
// pipeline references are like this: pipe1 -> downstream -> downstream2
File secondDownstreamExternalConfigRepo = temporaryFolder.newFolder();
/*here is a pipeline 'downstream2' with material dependency on 'downstream' in other repository*/
String secondDownstreamLatestCommit = setupExternalConfigRepo(secondDownstreamExternalConfigRepo, "external_git_config_repo_referencing_second");
configHelper.addConfigRepo(new ConfigRepoConfig(new GitMaterialConfig(secondDownstreamExternalConfigRepo.getAbsolutePath()), "gocd-xml"));
File firstDownstreamExternalConfigRepo = temporaryFolder.newFolder();
/*here is a pipeline 'downstream' with material dependency on 'pipe1' in other repository*/
String firstDownstreamLatestCommit = setupExternalConfigRepo(firstDownstreamExternalConfigRepo, "external_git_config_repo_referencing_first");
configHelper.addConfigRepo(new ConfigRepoConfig(new GitMaterialConfig(firstDownstreamExternalConfigRepo.getAbsolutePath()), "gocd-xml"));
goConfigService.forceNotifyListeners();
ConfigRepoConfig firstDownstreamConfigRepo = configWatchList.getCurrentConfigRepos().get(1);
ConfigRepoConfig secondDownstreamConfigRepo = configWatchList.getCurrentConfigRepos().get(2);
assertThat(configWatchList.getCurrentConfigRepos().size(), is(3));
// And unluckily downstream2 gets parsed first
repoConfigDataSource.onCheckoutComplete(secondDownstreamConfigRepo.getMaterialConfig(), secondDownstreamExternalConfigRepo, secondDownstreamLatestCommit);
// So parsing fails and proper message is shown:
List<ServerHealthState> messageForInvalidMerge = serverHealthService.filterByScope(HealthStateScope.forPartialConfigRepo(secondDownstreamConfigRepo));
assertThat(messageForInvalidMerge.isEmpty(), is(false));
assertThat(messageForInvalidMerge.get(0).getDescription(), containsString("tries to fetch artifact from pipeline "downstream""));
// and current config is still old
assertThat(goConfigService.hasPipelineNamed(new CaseInsensitiveString("downstream2")), is(false));
assertThat(cachedGoPartials.lastKnownPartials().size(), is(1));
assertThat(cachedGoPartials.lastValidPartials().size(), is(0));
//here downstream2 partial is waiting to be merged
assertThat(cachedGoPartials.lastKnownPartials().get(0).getGroups().get(0).hasPipeline(new CaseInsensitiveString("downstream2")), is(true));
// Then middle upstream config repository is parsed
repoConfigDataSource.onCheckoutComplete(firstDownstreamConfigRepo.getMaterialConfig(), firstDownstreamExternalConfigRepo, firstDownstreamLatestCommit);
// and errors are still shown
messageForInvalidMerge = serverHealthService.filterByScope(HealthStateScope.forPartialConfigRepo(firstDownstreamConfigRepo));
assertThat(messageForInvalidMerge.isEmpty(), is(false));
assertThat(messageForInvalidMerge.get(0).getDescription(), containsString("Pipeline "pipe1" does not exist. It is used from pipeline "downstream""));
// and current config is still old
assertThat(goConfigService.hasPipelineNamed(new CaseInsensitiveString("downstream")), is(false));
assertThat(goConfigService.hasPipelineNamed(new CaseInsensitiveString("downstream2")), is(false));
assertThat(cachedGoPartials.lastKnownPartials().size(), is(2));
assertThat(cachedGoPartials.lastValidPartials().size(), is(0));
// Finally upstream config repository is parsed
repoConfigDataSource.onCheckoutComplete(configRepo.getMaterialConfig(), externalConfigRepo, latestCommit);
// now server should be healthy and contain all pipelines
assertThat(serverHealthService.filterByScope(HealthStateScope.forPartialConfigRepo(firstDownstreamConfigRepo)).isEmpty(), is(true));
assertThat(serverHealthService.filterByScope(HealthStateScope.forPartialConfigRepo(secondDownstreamConfigRepo)).isEmpty(), is(true));
assertThat(cachedGoConfig.currentConfig().hasPipelineNamed(new CaseInsensitiveString("pipe1")), is(true));
assertThat(cachedGoConfig.currentConfig().hasPipelineNamed(new CaseInsensitiveString("downstream")), is(true));
assertThat(cachedGoConfig.currentConfig().hasPipelineNamed(new CaseInsensitiveString("downstream2")), is(true));
}
@Test
public void shouldFailWhenTryingToAddPipelineDefinedRemotely() throws Exception {
assertThat(configWatchList.getCurrentConfigRepos().size(), is(1));
repoConfigDataSource.onCheckoutComplete(configRepo.getMaterialConfig(), externalConfigRepo, latestCommit);
assertThat(cachedGoConfig.loadMergedForEditing().hasPipelineNamed(new CaseInsensitiveString("pipe1")), is(true));
PipelineConfig dupPipelineConfig = PipelineMother.twoBuildPlansWithResourcesAndSvnMaterialsAtUrl("pipe1", "ut",
"www.spring.com");
try {
goConfigDao.addPipeline(dupPipelineConfig, PipelineConfigs.DEFAULT_GROUP);
} catch (RuntimeException ex) {
assertThat(ex.getMessage(), containsString("You have defined multiple pipelines named 'pipe1'. Pipeline names must be unique. Source(s):"));
return;
}
fail("Should have thrown");
}
@Test
public void shouldNotifyListenersWhenConfigChanged() {
ConfigChangeListenerStub listener = new ConfigChangeListenerStub();
cachedGoConfig.registerListener(listener);
assertThat(listener.invocationCount, is(1));
cachedGoConfig.writeWithLock(new UpdateConfigCommand() {
@Override
public CruiseConfig update(CruiseConfig cruiseConfig) throws Exception {
return cruiseConfig;
}
});
assertThat(listener.invocationCount, is(2));
}
@Test
public void shouldReturnMergedConfig_WhenThereIsValidPartialConfig() throws Exception {
assertThat(configWatchList.getCurrentConfigRepos().size(), is(1));
repoConfigDataSource.onCheckoutComplete(configRepo.getMaterialConfig(), externalConfigRepo, latestCommit);
assertThat(serverHealthService.filterByScope(HealthStateScope.forPartialConfigRepo(configRepo)).isEmpty(), is(true));
assertThat(repoConfigDataSource.latestPartialConfigForMaterial(configRepo.getMaterialConfig()).getGroups().findGroup("first").findBy(new CaseInsensitiveString("pipe1")), is(not(nullValue())));
assertThat(cachedGoConfig.currentConfig().hasPipelineNamed(new CaseInsensitiveString("pipe1")), is(true));
}
@Test
public void shouldFailWhenTryingToAddPipelineWithTheSameNameAsAnotherPipelineDefinedRemotely_EntitySave() throws Exception {
assertThat(configWatchList.getCurrentConfigRepos().size(), is(1));
repoConfigDataSource.onCheckoutComplete(configRepo.getMaterialConfig(), externalConfigRepo, latestCommit);
assertThat(cachedGoConfig.currentConfig().hasPipelineNamed(new CaseInsensitiveString("pipe1")), is(true));
PipelineConfig dupPipelineConfig = PipelineMother.twoBuildPlansWithResourcesAndSvnMaterialsAtUrl("pipe1", "ut",
"www.spring.com");
try {
goConfigDao.updateConfig(new CreatePipelineConfigCommand(goConfigService, dupPipelineConfig, Username.ANONYMOUS, new DefaultLocalizedOperationResult(), "default"), Username.ANONYMOUS);
fail("Should have thrown");
} catch (RuntimeException ex) {
PipelineConfig pipe1 = goConfigService.pipelineConfigNamed(new CaseInsensitiveString("pipe1"));
String errorMessage = dupPipelineConfig.errors().on(PipelineConfig.NAME);
assertThat(errorMessage, containsString("You have defined multiple pipelines named 'pipe1'. Pipeline names must be unique. Source(s):"));
Matcher matcher = Pattern.compile("^.*\\[(.*),\\s(.*)\\].*$").matcher(errorMessage);
assertThat(matcher.matches(), is(true));
assertThat(matcher.groupCount(), is(2));
List<String> expectedSources = asList(dupPipelineConfig.getOriginDisplayName(), pipe1.getOriginDisplayName());
List<String> actualSources = new ArrayList<>();
for (int i = 1; i <= matcher.groupCount(); i++) {
actualSources.add(matcher.group(i));
}
assertThat(actualSources.size(), is(expectedSources.size()));
assertThat(actualSources.containsAll(expectedSources), is(true));
}
}
@Test
public void shouldFailWhenTryingToAddPipelineWithTheSameNameAsAnotherPipelineDefinedRemotely_FullConfigSave() throws Exception {
assertThat(configWatchList.getCurrentConfigRepos().size(), is(1));
repoConfigDataSource.onCheckoutComplete(configRepo.getMaterialConfig(), externalConfigRepo, latestCommit);
assertThat(cachedGoConfig.currentConfig().hasPipelineNamed(new CaseInsensitiveString("pipe1")), is(true));
final PipelineConfig dupPipelineConfig = PipelineMother.twoBuildPlansWithResourcesAndSvnMaterialsAtUrl("pipe1", "ut",
"www.spring.com");
try {
goConfigDao.updateConfig(new UpdateConfigCommand() {
@Override
public CruiseConfig update(CruiseConfig cruiseConfig) throws Exception {
cruiseConfig.getGroups().first().add(dupPipelineConfig);
return cruiseConfig;
}
});
fail("Should have thrown");
} catch (RuntimeException ex) {
String errorMessage = ex.getMessage();
assertThat(errorMessage, containsString("You have defined multiple pipelines named 'pipe1'. Pipeline names must be unique. Source(s):"));
Matcher matcher = Pattern.compile("^.*\\[(.*),\\s(.*)\\].*$").matcher(errorMessage);
assertThat(matcher.matches(), is(true));
assertThat(matcher.groupCount(), is(2));
PipelineConfig pipe1 = goConfigService.pipelineConfigNamed(new CaseInsensitiveString("pipe1"));
List<String> expectedSources = asList(dupPipelineConfig.getOriginDisplayName(), pipe1.getOriginDisplayName());
List<String> actualSources = new ArrayList<>();
for (int i = 1; i <= matcher.groupCount(); i++) {
actualSources.add(matcher.group(i));
}
assertThat(actualSources.size(), is(expectedSources.size()));
assertThat(actualSources.containsAll(expectedSources), is(true));
}
}
@Test
public void shouldReturnRemotePipelinesAmongAllPipelinesInMergedConfigForEdit() throws Exception {
assertThat(configWatchList.getCurrentConfigRepos().size(), is(1));
repoConfigDataSource.onCheckoutComplete(configRepo.getMaterialConfig(), externalConfigRepo, latestCommit);
assertThat(cachedGoConfig.loadMergedForEditing().hasPipelineNamed(new CaseInsensitiveString("pipe1")), is(true));
}
private List<ServerHealthState> findMessageFor(final HealthStateType type) {
return serverHealthService.logs().stream().filter(new Predicate<ServerHealthState>() {
@Override
public boolean test(ServerHealthState element) {
return element.getType().equals(type);
}
}).collect(Collectors.toList());
}
@Test
public void shouldNotifyWithMergedConfig_WhenPartUpdated() throws Exception {
ConfigChangeListenerStub listener = new ConfigChangeListenerStub();
cachedGoConfig.registerListener(listener);
// at registration
assertThat(listener.invocationCount, is(1));
repoConfigDataSource.onCheckoutComplete(configRepo.getMaterialConfig(), externalConfigRepo, latestCommit);
assertThat("currentConfigShouldBeMerged", cachedGoConfig.currentConfig().hasPipelineNamed(new CaseInsensitiveString("pipe1")), is(true));
assertThat(listener.invocationCount, is(2));
}
@Test
public void shouldNotNotifyListenersWhenMergeFails() throws IOException {
checkinPartial("config_repo_with_invalid_partial");
ConfigRepoConfig configRepo = configWatchList.getCurrentConfigRepos().get(0);
ConfigChangeListenerStub listener = new ConfigChangeListenerStub();
cachedGoConfig.registerListener(listener);
// at registration
assertThat(listener.invocationCount, is(1));
repoConfigDataSource.onCheckoutComplete(configRepo.getMaterialConfig(), externalConfigRepo, latestCommit);
assertThat("currentConfigShouldBeMainXmlOnly", cachedGoConfig.currentConfig().hasPipelineNamed(new CaseInsensitiveString("pipeline_with_no_stage")), is(false));
assertThat(listener.invocationCount, is(1));
}
@Test
public void shouldSetErrorHealthStateWhenMergeFails() throws IOException {
checkinPartial("config_repo_with_invalid_partial");
ConfigRepoConfig configRepo = configWatchList.getCurrentConfigRepos().get(0);
repoConfigDataSource.onCheckoutComplete(configRepo.getMaterialConfig(), externalConfigRepo, latestCommit);
List<ServerHealthState> messageForInvalidMerge = serverHealthService.filterByScope(HealthStateScope.forPartialConfigRepo(configRepo));
assertThat(messageForInvalidMerge.isEmpty(), is(false));
assertThat(messageForInvalidMerge.get(0).getDescription().contains("Pipeline 'pipeline_with_no_stage' does not have any stages configured"), is(true));
}
@Test
public void shouldUnSetErrorHealthStateWhenMergePasses() throws IOException {
ConfigRepoConfig configRepo = configWatchList.getCurrentConfigRepos().get(0);
checkinPartial("config_repo_with_invalid_partial/bad_partial.gocd.xml");
repoConfigDataSource.onCheckoutComplete(configRepo.getMaterialConfig(), externalConfigRepo, latestCommit);
assertThat(serverHealthService.filterByScope(HealthStateScope.forPartialConfigRepo(configRepo)).isEmpty(), is(false));
//fix partial
deletePartial("bad_partial.gocd.xml");
repoConfigDataSource.onCheckoutComplete(configRepo.getMaterialConfig(), externalConfigRepo, latestCommit);
assertThat(serverHealthService.filterByScope(HealthStateScope.forPartialConfigRepo(configRepo)).isEmpty(), is(true));
}
@Test
public void shouldUpdateCachedConfigOnSave() throws Exception {
assertThat(cachedGoConfig.currentConfig().agents().size(), Matchers.is(1));
configHelper.addAgent("hostname", "uuid2");
assertThat(cachedGoConfig.currentConfig().agents().size(), Matchers.is(2));
}
@Test
public void shouldReloadCachedConfigWhenWriting() throws Exception {
cachedGoConfig.writeWithLock(updateFirstAgentResources("osx"));
assertThat(cachedGoConfig.currentConfig().agents().get(0).getResourceConfigs().toString(), Matchers.is("osx"));
cachedGoConfig.writeWithLock(updateFirstAgentResources("osx, firefox"));
assertThat(cachedGoConfig.currentConfig().agents().get(0).getResourceConfigs().toString(), Matchers.is("firefox | osx"));
}
@Test
public void shouldReloadCachedConfigFromDisk() throws Exception {
assertThat(cachedGoConfig.currentConfig().agents().size(), Matchers.is(1));
configHelper.writeXmlToConfigFile(ConfigFileFixture.TASKS_WITH_CONDITION);
cachedGoConfig.forceReload();
assertThat(cachedGoConfig.currentConfig().agents().size(), Matchers.is(0));
}
@Test
public void shouldInterpolateParamsInTemplate() throws Exception {
String content = "<cruise schemaVersion='" + GoConstants.CONFIG_SCHEMA_VERSION + "'>\n"
+ "<server artifactsdir='artifacts' >"
+ "</server>"
+ "<pipelines>\n"
+ "<pipeline name='dev' template='abc'>\n"
+ " <params>"
+ " <param name='command'>ls</param>"
+ " <param name='dir'>/tmp</param>"
+ " </params>"
+ " <materials>\n"
+ " <svn url =\"svnurl\"/>"
+ " </materials>\n"
+ "</pipeline>\n"
+ "<pipeline name='acceptance' template='abc'>\n"
+ " <params>"
+ " <param name='command'>twist</param>"
+ " <param name='dir'>./acceptance</param>"
+ " </params>"
+ " <materials>\n"
+ " <svn url =\"svnurl\"/>"
+ " </materials>\n"
+ "</pipeline>\n"
+ "</pipelines>\n"
+ "<templates>\n"
+ " <pipeline name='abc'>\n"
+ " <stage name='stage1'>"
+ " <jobs>"
+ " <job name='job1'>"
+ " <tasks>"
+ " <exec command='/bin/#{command}' args='#{dir}'/>"
+ " </tasks>"
+ " </job>"
+ " </jobs>"
+ " </stage>"
+ " </pipeline>\n"
+ "</templates>\n"
+ "</cruise>";
configHelper.writeXmlToConfigFile(content);
cachedGoConfig.forceReload();
CruiseConfig cruiseConfig = cachedGoConfig.currentConfig();
ExecTask devExec = (ExecTask) cruiseConfig.pipelineConfigByName(new CaseInsensitiveString("dev")).getFirstStageConfig().jobConfigByConfigName(new CaseInsensitiveString("job1")).getTasks().first();
assertThat(devExec, Is.is(new ExecTask("/bin/ls", "/tmp", (String) null)));
ExecTask acceptanceExec = (ExecTask) cruiseConfig.pipelineConfigByName(new CaseInsensitiveString("acceptance")).getFirstStageConfig().jobConfigByConfigName(new CaseInsensitiveString("job1")).getTasks().first();
assertThat(acceptanceExec, Is.is(new ExecTask("/bin/twist", "./acceptance", (String) null)));
cruiseConfig = cachedGoConfig.loadForEditing();
devExec = (ExecTask) cruiseConfig.getTemplateByName(new CaseInsensitiveString("abc")).get(0).jobConfigByConfigName(new CaseInsensitiveString("job1")).getTasks().first();
assertThat(devExec, Is.is(new ExecTask("/bin/#{command}", "#{dir}", (String) null)));
assertThat(cruiseConfig.pipelineConfigByName(new CaseInsensitiveString("dev")).size(), Matchers.is(0));
assertThat(cruiseConfig.pipelineConfigByName(new CaseInsensitiveString("acceptance")).size(), Matchers.is(0));
}
@Test
public void shouldHandleParamQuotingCorrectly() throws Exception {
String content = "<cruise schemaVersion='" + GoConstants.CONFIG_SCHEMA_VERSION + "'>\n"
+ "<server artifactsdir='artifacts' />"
+ "<pipelines>\n"
+ "<pipeline name='dev'>\n"
+ " <params>"
+ " <param name='command'>ls#{a}</param>"
+ " <param name='dir'>/tmp</param>"
+ " </params>"
+ " <materials>\n"
+ " <svn url =\"svnurl\"/>"
+ " </materials>\n"
+ " <stage name='stage1'>"
+ " <jobs>"
+ " <job name='job1'>"
+ " <tasks>"
+ " <exec command='/bin/#{command}##{b}' args='#{dir}'/>"
+ " </tasks>"
+ " </job>"
+ " </jobs>"
+ " </stage>"
+ "</pipeline>\n"
+ "</pipelines>\n"
+ "</cruise>";
configHelper.writeXmlToConfigFile(content);
cachedGoConfig.forceReload();
CruiseConfig cruiseConfig = cachedGoConfig.currentConfig();
ExecTask devExec = (ExecTask) cruiseConfig.pipelineConfigByName(new CaseInsensitiveString("dev")).getFirstStageConfig().jobConfigByConfigName(new CaseInsensitiveString("job1")).getTasks().first();
assertThat(devExec, Is.is(new ExecTask("/bin/ls#{a}#{b}", "/tmp", (String) null)));
}
@Test
public void shouldAllowParamsInLabelTemplates() throws Exception {
String content = "<cruise schemaVersion='" + GoConstants.CONFIG_SCHEMA_VERSION + "'>\n"
+ "<server artifactsdir='artifacts' />"
+ "<pipelines>\n"
+ "<pipeline name='dev' labeltemplate='cruise-#{VERSION}-${COUNT}'>\n"
+ " <params>"
+ " <param name='VERSION'>1.2</param>"
+ " </params>"
+ " <materials>\n"
+ " <svn url =\"svnurl\"/>"
+ " </materials>\n"
+ " <stage name='stage1'>"
+ " <jobs>"
+ " <job name='job1'>"
+ " <tasks>"
+ " <exec command='/bin/ls' args='some'/>"
+ " </tasks>"
+ " </job>"
+ " </jobs>"
+ " </stage>"
+ "</pipeline>\n"
+ "</pipelines>\n"
+ "</cruise>";
configHelper.writeXmlToConfigFile(content);
cachedGoConfig.forceReload();
CruiseConfig cruiseConfig = cachedGoConfig.currentConfig();
assertThat(cruiseConfig.pipelineConfigByName(new CaseInsensitiveString("dev")).getLabelTemplate(), Is.is("cruise-1.2-${COUNT}"));
}
@Test
public void shouldThrowErrorWhenEnvironmentVariablesAreDuplicate() throws Exception {
String content = "<cruise schemaVersion='" + GoConstants.CONFIG_SCHEMA_VERSION + "'>\n"
+ "<server artifactsdir='artifacts' />"
+ "<pipelines>\n"
+ "<pipeline name='dev'>\n"
+ " <params>"
+ " <param name='product'>GO</param>"
+ " </params>"
+ " <environmentvariables>"
+ " <variable name='#{product}_WORKING_DIR'><value>go_dir</value></variable>"
+ " <variable name='GO_WORKING_DIR'><value>dir</value></variable>"
+ " </environmentvariables>"
+ " <materials>\n"
+ " <svn url =\"svnurl\"/>"
+ " </materials>\n"
+ " <stage name='stage1'>"
+ " <jobs>"
+ " <job name='job1'>"
+ " <tasks>"
+ " <exec command='/bin/ls' args='some'/>"
+ " </tasks>"
+ " </job>"
+ " </jobs>"
+ " </stage>"
+ "</pipeline>\n"
+ "</pipelines>\n"
+ "</cruise>";
configHelper.writeXmlToConfigFile(content);
GoConfigValidity configValidity = cachedGoConfig.checkConfigFileValid();
assertThat(configValidity.isValid(), Matchers.is(false));
assertThat(configValidity.errorMessage(), containsString("Environment Variable name 'GO_WORKING_DIR' is not unique for pipeline 'dev'"));
}
@Test
public void shouldReturnCachedConfigIfConfigFileIsInvalid() throws Exception {
CruiseConfig before = cachedGoConfig.currentConfig();
assertThat(before.agents().size(), Matchers.is(1));
configHelper.writeXmlToConfigFile("invalid-xml");
cachedGoConfig.forceReload();
assertTrue(cachedGoConfig.currentConfig() == before);
assertThat(cachedGoConfig.checkConfigFileValid().isValid(), Matchers.is(false));
}
@Test
public void shouldClearInvalidExceptionWhenConfigErrorsAreFixed() throws Exception {
configHelper.writeXmlToConfigFile("invalid-xml");
cachedGoConfig.forceReload();
cachedGoConfig.currentConfig();
assertThat(cachedGoConfig.checkConfigFileValid().isValid(), Matchers.is(false));
configHelper.addAgent("hostname", "uuid2");//some valid change
CruiseConfig cruiseConfig = cachedGoConfig.currentConfig();
assertThat(cruiseConfig.agents().size(), Matchers.is(2));
assertThat(cachedGoConfig.checkConfigFileValid().isValid(), Matchers.is(true));
}
@Test
public void shouldSetServerHealthMessageWhenConfigFileIsInvalid() throws IOException {
configHelper.writeXmlToConfigFile("invalid-xml");
cachedGoConfig.forceReload();
assertThat(cachedGoConfig.checkConfigFileValid().isValid(), Matchers.is(false));
List<ServerHealthState> serverHealthStates = serverHealthService.logs();
assertThat(serverHealthStates.isEmpty(), is(false));
assertThat(serverHealthStates.contains(ServerHealthState.error(GoConfigService.INVALID_CRUISE_CONFIG_XML, "Error on line 1: Content is not allowed in prolog.", HealthStateType.invalidConfig())), is(true));
}
@Test
public void shouldClearServerHealthMessageWhenConfigFileIsValid() throws IOException {
serverHealthService.update(ServerHealthState.error(GoConfigService.INVALID_CRUISE_CONFIG_XML, "Error on line 1: Content is not allowed in prolog.", HealthStateType.invalidConfig()));
Assert.assertThat(findMessageFor(HealthStateType.invalidConfig()).isEmpty(), is(false));
configHelper.writeXmlToConfigFile(ConfigFileFixture.TASKS_WITH_CONDITION);
cachedGoConfig.forceReload();
Assert.assertThat(cachedGoConfig.checkConfigFileValid().isValid(), Matchers.is(true));
Assert.assertThat(findMessageFor(HealthStateType.invalidConfig()).isEmpty(), is(true));
}
@Test
public void shouldReturnDefaultCruiseConfigIfLoadingTheConfigFailsForTheFirstTime() throws Exception {
ReflectionUtil.setField(cachedGoConfig, "currentConfig", null);
configHelper.writeXmlToConfigFile("invalid-xml");
Assert.assertThat(cachedGoConfig.currentConfig(), Matchers.<CruiseConfig>is(new BasicCruiseConfig()));
}
@Test
public void shouldGetConfigForEditAndRead() throws Exception {
CruiseConfig cruiseConfig = configHelper.load();
addPipelineWithParams(cruiseConfig);
configHelper.writeConfigFile(cruiseConfig);
PipelineConfig config = cachedGoConfig.currentConfig().pipelineConfigByName(new CaseInsensitiveString("mingle"));
HgMaterialConfig hgMaterialConfig = (HgMaterialConfig) byFolder(config.materialConfigs(), "folder");
Assert.assertThat(hgMaterialConfig.getUrl(), Matchers.is("http://hg-server/repo-name"));
config = cachedGoConfig.loadForEditing().pipelineConfigByName(new CaseInsensitiveString("mingle"));
hgMaterialConfig = (HgMaterialConfig) byFolder(config.materialConfigs(), "folder");
Assert.assertThat(hgMaterialConfig.getUrl(), Matchers.is("http://#{foo}/#{bar}"));
}
@Test
public void shouldLoadConfigForReadAndEditWhenNewXMLIsWritten() throws Exception {
String pipelineName = "mingle";
cachedGoConfig.save(configXmlWithPipeline(pipelineName), false);
PipelineConfig reloadedPipelineConfig = cachedGoConfig.currentConfig().pipelineConfigByName(new CaseInsensitiveString(pipelineName));
HgMaterialConfig hgMaterialConfig = (HgMaterialConfig) byFolder(reloadedPipelineConfig.materialConfigs(), "folder");
Assert.assertThat(hgMaterialConfig.getUrl(), Matchers.is("http://hg-server/repo-name"));
reloadedPipelineConfig = cachedGoConfig.loadForEditing().pipelineConfigByName(new CaseInsensitiveString(pipelineName));
hgMaterialConfig = (HgMaterialConfig) byFolder(reloadedPipelineConfig.materialConfigs(), "folder");
Assert.assertThat(hgMaterialConfig.getUrl(), Matchers.is("http://#{foo}/#{bar}"));
GoConfigHolder configHolder = cachedGoConfig.loadConfigHolder();
reloadedPipelineConfig = configHolder.config.pipelineConfigByName(new CaseInsensitiveString(pipelineName));
hgMaterialConfig = (HgMaterialConfig) byFolder(reloadedPipelineConfig.materialConfigs(), "folder");
Assert.assertThat(hgMaterialConfig.getUrl(), Matchers.is("http://hg-server/repo-name"));
reloadedPipelineConfig = configHolder.configForEdit.pipelineConfigByName(new CaseInsensitiveString(pipelineName));
hgMaterialConfig = (HgMaterialConfig) byFolder(reloadedPipelineConfig.materialConfigs(), "folder");
Assert.assertThat(hgMaterialConfig.getUrl(), Matchers.is("http://#{foo}/#{bar}"));
}
@Test
public void shouldLoadConfigForReadAndEditWhenConfigIsUpdatedThoughACommand() throws Exception {
cachedGoConfig.writeWithLock(new UpdateConfigCommand() {
public CruiseConfig update(CruiseConfig cruiseConfig) throws Exception {
addPipelineWithParams(cruiseConfig);
return cruiseConfig;
}
});
PipelineConfig reloadedPipelineConfig = cachedGoConfig.currentConfig().pipelineConfigByName(new CaseInsensitiveString("mingle"));
HgMaterialConfig hgMaterialConfig = (HgMaterialConfig) byFolder(reloadedPipelineConfig.materialConfigs(), "folder");
Assert.assertThat(hgMaterialConfig.getUrl(), Matchers.is("http://hg-server/repo-name"));
reloadedPipelineConfig = cachedGoConfig.loadForEditing().pipelineConfigByName(new CaseInsensitiveString("mingle"));
hgMaterialConfig = (HgMaterialConfig) byFolder(reloadedPipelineConfig.materialConfigs(), "folder");
Assert.assertThat(hgMaterialConfig.getUrl(), Matchers.is("http://#{foo}/#{bar}"));
}
private String configXmlWithPipeline(String pipelineName) {
return "<?xml version=\"1.0\" encoding=\"utf-8\"?>\n" +
"<cruise xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" xsi:noNamespaceSchemaLocation=\"cruise-config.xsd\" schemaVersion=\"" + GoConstants.CONFIG_SCHEMA_VERSION + "\">\n" +
" <server artifactsdir=\"artifactsDir\" serverId=\"dd8d0f5a-7e8d-4948-a1c7-ddcedbac15d0\" />\n" +
" <pipelines group=\"another\">\n" +
" <pipeline name=\"" + pipelineName + "\">\n" +
" <params>\n" +
" <param name=\"foo\">hg-server</param>\n" +
" <param name=\"bar\">repo-name</param>\n" +
" </params>\n" +
" <materials>\n" +
" <svn url=\"http://some/svn/url\" dest=\"svnDir\" materialName=\"url\" />\n" +
" <hg url=\"http://#{foo}/#{bar}\" dest=\"folder\" />\n" +
" </materials>\n" +
" <stage name=\"dev\">\n" +
" <jobs>\n" +
" <job name=\"ant\" />\n" +
" </jobs>\n" +
" </stage>\n" +
" </pipeline>\n" +
" </pipelines>\n" +
"</cruise>\n" +
"\n";
}
@Test
public void shouldReturnUpdatedStatusWhenConfigIsUpdatedWithLatestCopy() {
final String md5 = cachedGoConfig.currentConfig().getMd5();
ConfigSaveState firstSaveState = cachedGoConfig.writeWithLock(new NoOverwriteUpdateConfigCommand() {
@Override
public CruiseConfig update(CruiseConfig cruiseConfig) throws Exception {
cruiseConfig.addPipeline("g1", PipelineConfigMother.createPipelineConfig("p1", "s1", "j1"));
return cruiseConfig;
}
@Override
public String unmodifiedMd5() {
return md5;
}
});
assertThat(firstSaveState, is(ConfigSaveState.UPDATED));
}
@Test
public void shouldReturnMergedStatusWhenConfigIsMergedWithStaleCopy() {
final String md5 = cachedGoConfig.currentConfig().getMd5();
ConfigSaveState firstSaveState = cachedGoConfig.writeWithLock(new NoOverwriteUpdateConfigCommand() {
@Override
public CruiseConfig update(CruiseConfig cruiseConfig) throws Exception {
cruiseConfig.addPipeline("g1", PipelineConfigMother.createPipelineConfig("p1", "s1", "j1"));
return cruiseConfig;
}
@Override
public String unmodifiedMd5() {
return md5;
}
});
assertThat(firstSaveState, is(ConfigSaveState.UPDATED));
ConfigSaveState secondSaveState = cachedGoConfig.writeWithLock(new NoOverwriteUpdateConfigCommand() {
@Override
public CruiseConfig update(CruiseConfig cruiseConfig) throws Exception {
cruiseConfig.server().setArtifactsDir("something");
return cruiseConfig;
}
@Override
public String unmodifiedMd5() {
return md5;
}
});
assertThat(secondSaveState, is(ConfigSaveState.MERGED));
}
@Test
public void shouldNotAllowAGitMergeOfConcurrentChangesIfTheChangeCausesMergedPartialsToBecomeInvalid() {
final String upstream = UUID.randomUUID().toString();
String remoteDownstream = "remote-downstream";
setupExternalConfigRepoWithDependencyMaterialOnPipelineInMainXml(upstream, remoteDownstream);
final String md5 = cachedGoConfig.currentConfig().getMd5();
// some random unrelated change to force a git merge workflow
cachedGoConfig.writeWithLock(new NoOverwriteUpdateConfigCommand() {
@Override
public CruiseConfig update(CruiseConfig cruiseConfig) throws Exception {
cruiseConfig.server().setCommandRepositoryLocation("new_location");
return cruiseConfig;
}
@Override
public String unmodifiedMd5() {
return md5;
}
});
thrown.expectMessage(String.format("Stage with name 'stage' does not exist on pipeline '%s', it is being referred to from pipeline 'remote-downstream' (%s at r1)", upstream, configRepo.getMaterialConfig().getDisplayName()));
cachedGoConfig.writeWithLock(new NoOverwriteUpdateConfigCommand() {
@Override
public CruiseConfig update(CruiseConfig cruiseConfig) throws Exception {
cruiseConfig.getPipelineConfigByName(new CaseInsensitiveString(upstream)).getFirstStageConfig().setName(new CaseInsensitiveString("new_name"));
return cruiseConfig;
}
@Override
public String unmodifiedMd5() {
return md5;
}
});
}
@Test
public void shouldMarkAPartialAsValidIfItBecomesValidBecauseOfNewerChangesInMainXml_GitMergeWorkflow() {
final String upstream = UUID.randomUUID().toString();
String remoteDownstream = "remote-downstream";
setupExternalConfigRepoWithDependencyMaterialOnPipelineInMainXml(upstream, remoteDownstream);
PartialConfig partialWithStageRenamed = new Cloner().deepClone(goPartialConfig.lastPartials().get(0));
PipelineConfig pipelineInRemoteConfigRepo = partialWithStageRenamed.getGroups().get(0).getPipelines().get(0);
pipelineInRemoteConfigRepo.materialConfigs().getDependencyMaterial().setStageName(new CaseInsensitiveString("new_name"));
partialWithStageRenamed.setOrigin(new RepoConfigOrigin(configRepo, "r2"));
goPartialConfig.onSuccessPartialConfig(configRepo, partialWithStageRenamed);
final String md5 = cachedGoConfig.currentConfig().getMd5();
// some random unrelated change to force a git merge workflow
cachedGoConfig.writeWithLock(new NoOverwriteUpdateConfigCommand() {
@Override
public CruiseConfig update(CruiseConfig cruiseConfig) throws Exception {
cruiseConfig.server().setCommandRepositoryLocation("new_location");
return cruiseConfig;
}
@Override
public String unmodifiedMd5() {
return md5;
}
});
ConfigSaveState saveState = cachedGoConfig.writeWithLock(new NoOverwriteUpdateConfigCommand() {
@Override
public CruiseConfig update(CruiseConfig cruiseConfig) throws Exception {
cruiseConfig.getPipelineConfigByName(new CaseInsensitiveString(upstream)).getFirstStageConfig().setName(new CaseInsensitiveString("new_name"));
return cruiseConfig;
}
@Override
public String unmodifiedMd5() {
return md5;
}
});
assertThat(saveState, is(ConfigSaveState.MERGED));
assertThat(cachedGoPartials.lastValidPartials().get(0).getGroups().first().get(0).materialConfigs().getDependencyMaterial().getStageName(), is(new CaseInsensitiveString("new_name")));
assertThat(goConfigService.getConfigForEditing().getPipelineConfigByName(new CaseInsensitiveString(upstream)).getFirstStageConfig().name(), is(new CaseInsensitiveString("new_name")));
assertThat(goConfigService.getCurrentConfig().getPipelineConfigByName(new CaseInsensitiveString(upstream)).getFirstStageConfig().name(), is(new CaseInsensitiveString("new_name")));
}
private void setupExternalConfigRepoWithDependencyMaterialOnPipelineInMainXml(String upstream, String remoteDownstreamPipelineName) {
PipelineConfig upstreamPipelineConfig = GoConfigMother.createPipelineConfigWithMaterialConfig(upstream, new GitMaterialConfig("FOO"));
goConfigService.addPipeline(upstreamPipelineConfig, "default");
PartialConfig partialConfig = PartialConfigMother.pipelineWithDependencyMaterial(remoteDownstreamPipelineName, upstreamPipelineConfig, new RepoConfigOrigin(configRepo, "r1"));
goPartialConfig.onSuccessPartialConfig(configRepo, partialConfig);
}
@Test
public void shouldSaveConfigChangesWhenFullConfigIsBeingSavedFromConfigXmlTabAndAllKnownConfigRepoPartialsAreInvalid() throws Exception {
cachedGoPartials.clear();
PartialConfig invalidPartial = PartialConfigMother.invalidPartial("invalid", new RepoConfigOrigin(configRepo, "revision1"));
goPartialConfig.onSuccessPartialConfig(configRepo, invalidPartial);
CruiseConfig updatedConfig = new Cloner().deepClone(goConfigService.getConfigForEditing());
updatedConfig.server().setCommandRepositoryLocation("foo");
String updatedXml = goFileConfigDataSource.configAsXml(updatedConfig, false);
FileUtils.writeStringToFile(new File(goConfigDao.fileLocation()), updatedXml, UTF_8);
GoConfigValidity validity = goConfigService.fileSaver(false).saveXml(updatedXml, goConfigDao.md5OfConfigFile());
assertThat(validity.isValid(), is(true));
assertThat(cachedGoPartials.lastValidPartials().isEmpty(), is(true));
assertThat(cachedGoPartials.lastKnownPartials().contains(invalidPartial), is(true));
}
@Test
public void shouldAllowFallbackMergeAndSaveWhenKnownPartialHasAnInvalidEnvironmentThatRefersToAnUnknownPipeline() throws Exception {
cachedGoPartials.clear();
PartialConfig partialConfigWithInvalidEnvironment = PartialConfigMother.withEnvironment("env", new RepoConfigOrigin(configRepo, "revision1"));
goPartialConfig.onSuccessPartialConfig(configRepo, partialConfigWithInvalidEnvironment);
ConfigSaveState state = cachedGoConfig.writeWithLock(new UpdateConfigCommand() {
@Override
public CruiseConfig update(CruiseConfig cruiseConfig) throws Exception {
cruiseConfig.server().setCommandRepositoryLocation("newlocation");
return cruiseConfig;
}
});
assertThat(state, is(ConfigSaveState.UPDATED));
assertThat(goConfigService.getCurrentConfig().server().getCommandRepositoryLocation(), is("newlocation"));
}
@Test
public void shouldRemoveCorrespondingRemotePipelinesFromCachedGoConfigIfTheConfigRepoIsDeleted() {
final ConfigRepoConfig repoConfig1 = new ConfigRepoConfig(MaterialConfigsMother.gitMaterialConfig("url1"), XmlPartialConfigProvider.providerName);
final ConfigRepoConfig repoConfig2 = new ConfigRepoConfig(MaterialConfigsMother.gitMaterialConfig("url2"), XmlPartialConfigProvider.providerName);
goConfigService.updateConfig(new UpdateConfigCommand() {
@Override
public CruiseConfig update(CruiseConfig cruiseConfig) throws Exception {
cruiseConfig.getConfigRepos().add(repoConfig1);
cruiseConfig.getConfigRepos().add(repoConfig2);
return cruiseConfig;
}
});
PartialConfig partialConfigInRepo1 = PartialConfigMother.withPipeline("pipeline_in_repo1", new RepoConfigOrigin(repoConfig1, "repo1_r1"));
PartialConfig partialConfigInRepo2 = PartialConfigMother.withPipeline("pipeline_in_repo2", new RepoConfigOrigin(repoConfig2, "repo2_r1"));
goPartialConfig.onSuccessPartialConfig(repoConfig1, partialConfigInRepo1);
goPartialConfig.onSuccessPartialConfig(repoConfig2, partialConfigInRepo2);
// introduce an invalid change in repo1 so that there is a server health message corresponding to it
PartialConfig invalidPartialInRepo1Revision2 = PartialConfigMother.invalidPartial("pipeline_in_repo1", new RepoConfigOrigin(repoConfig1, "repo1_r2"));
goPartialConfig.onSuccessPartialConfig(repoConfig1, invalidPartialInRepo1Revision2);
assertThat(serverHealthService.filterByScope(HealthStateScope.forPartialConfigRepo(repoConfig1)).size(), is(1));
assertThat(serverHealthService.filterByScope(HealthStateScope.forPartialConfigRepo(repoConfig1)).get(0).getMessage(), is("Invalid Merged Configuration"));
assertThat(serverHealthService.filterByScope(HealthStateScope.forPartialConfigRepo(repoConfig1)).get(0).getDescription(), is("1+ errors :: Invalid stage name ''. This must be alphanumeric and can contain underscores and periods (however, it cannot start with a period). The maximum allowed length is 255 characters.;; - Config-Repo: url1 at repo1_r2"));
assertThat(serverHealthService.filterByScope(HealthStateScope.forPartialConfigRepo(repoConfig2)).isEmpty(), is(true));
int countBeforeDeletion = cachedGoConfig.currentConfig().getConfigRepos().size();
ConfigSaveState configSaveState = cachedGoConfig.writeWithLock(new UpdateConfigCommand() {
@Override
public CruiseConfig update(CruiseConfig cruiseConfig) throws Exception {
cruiseConfig.getConfigRepos().remove(repoConfig1);
return cruiseConfig;
}
});
assertThat(configSaveState, is(ConfigSaveState.UPDATED));
assertThat(cachedGoConfig.currentConfig().getConfigRepos().size(), is(countBeforeDeletion - 1));
assertThat(cachedGoConfig.currentConfig().getConfigRepos().contains(repoConfig2), is(true));
assertThat(cachedGoConfig.currentConfig().getAllPipelineNames().contains(new CaseInsensitiveString("pipeline_in_repo1")), is(false));
assertThat(cachedGoConfig.currentConfig().getAllPipelineNames().contains(new CaseInsensitiveString("pipeline_in_repo2")), is(true));
assertThat(cachedGoPartials.lastKnownPartials().size(), is(1));
assertThat(((RepoConfigOrigin) cachedGoPartials.lastKnownPartials().get(0).getOrigin()).getMaterial().getFingerprint().equals(repoConfig2.getMaterialConfig().getFingerprint()), is(true));
assertThat(cachedGoPartials.lastKnownPartials().stream().filter(new Predicate<PartialConfig>() {
@Override
public boolean test(PartialConfig item) {
return ((RepoConfigOrigin) item.getOrigin()).getMaterial().getFingerprint().equals(repoConfig1.getMaterialConfig().getFingerprint());
}
}).findFirst().orElse(null), is(nullValue()));
assertThat(cachedGoPartials.lastValidPartials().size(), is(1));
assertThat(((RepoConfigOrigin) cachedGoPartials.lastValidPartials().get(0).getOrigin()).getMaterial().getFingerprint().equals(repoConfig2.getMaterialConfig().getFingerprint()), is(true));
assertThat(cachedGoPartials.lastValidPartials().stream().filter(new Predicate<PartialConfig>() {
@Override
public boolean test(PartialConfig item) {
return ((RepoConfigOrigin) item.getOrigin()).getMaterial().getFingerprint().equals(repoConfig1.getMaterialConfig().getFingerprint());
}
}).findFirst().orElse(null), is(nullValue()));
assertThat(serverHealthService.filterByScope(HealthStateScope.forPartialConfigRepo(repoConfig1)).isEmpty(), is(true));
assertThat(serverHealthService.filterByScope(HealthStateScope.forPartialConfigRepo(repoConfig2)).isEmpty(), is(true));
}
@Test
public void shouldUpdateConfigWhenPartialsAreNotConfigured() throws GitAPIException, IOException {
String gitShaBeforeSave = configRepository.getCurrentRevCommit().getName();
BasicCruiseConfig config = GoConfigMother.configWithPipelines("pipeline1");
ConfigSaveState state = cachedGoConfig.writeFullConfigWithLock(new FullConfigUpdateCommand(config, goConfigService.configFileMd5()));
String gitShaAfterSave = configRepository.getCurrentRevCommit().getName();
String configXmlFromConfigFolder = FileUtils.readFileToString(new File(goConfigDao.fileLocation()), UTF_8);
assertThat(state, is(ConfigSaveState.UPDATED));
assertThat(cachedGoConfig.loadForEditing(), is(config));
assertNotEquals(gitShaBeforeSave, gitShaAfterSave);
assertThat(cachedGoConfig.loadForEditing().getMd5(), is(configRepository.getCurrentRevision().getMd5()));
assertThat(cachedGoConfig.currentConfig().getMd5(), is(configRepository.getCurrentRevision().getMd5()));
assertThat(configXmlFromConfigFolder, is(configRepository.getCurrentRevision().getContent()));
}
@Test
public void writeFullConfigWithLockShouldUpdateReloadStrategyToEnsureReloadIsSkippedInAbsenceOfConfigFileChanges() throws GitAPIException, IOException {
BasicCruiseConfig config = GoConfigMother.configWithPipelines("pipeline1");
ConfigSaveState state = cachedGoConfig.writeFullConfigWithLock(new FullConfigUpdateCommand(config, goConfigService.configFileMd5()));
String gitShaAfterSave = configRepository.getCurrentRevCommit().getName();
assertThat(state, is(ConfigSaveState.UPDATED));
cachedGoConfig.forceReload();
String gitShaAfterReload = configRepository.getCurrentRevCommit().getName();
assertThat(gitShaAfterReload, is(gitShaAfterSave));
}
@Test
public void shouldUpdateConfigWhenPartialsAreConfigured() throws GitAPIException, IOException {
String gitShaBeforeSave = configRepository.getCurrentRevCommit().getName();
PartialConfig validPartial = PartialConfigMother.withPipeline("remote_pipeline", new RepoConfigOrigin(configRepo, "revision1"));
goPartialConfig.onSuccessPartialConfig(configRepo, validPartial);
assertThat(cachedGoPartials.lastValidPartials().contains(validPartial), is(true));
assertThat(cachedGoPartials.lastKnownPartials().contains(validPartial), is(true));
CruiseConfig config = new Cloner().deepClone(cachedGoConfig.loadForEditing());
config.addEnvironment(UUID.randomUUID().toString());
ConfigSaveState state = cachedGoConfig.writeFullConfigWithLock(new FullConfigUpdateCommand(config, goConfigService.configFileMd5()));
String gitShaAfterSave = configRepository.getCurrentRevCommit().getName();
String configXmlFromConfigFolder = FileUtils.readFileToString(new File(goConfigDao.fileLocation()), UTF_8);
assertThat(state, is(ConfigSaveState.UPDATED));
assertThat(cachedGoConfig.loadForEditing(), is(config));
assertNotEquals(gitShaBeforeSave, gitShaAfterSave);
assertThat(cachedGoConfig.loadForEditing().getMd5(), is(configRepository.getCurrentRevision().getMd5()));
assertThat(cachedGoConfig.currentConfig().getMd5(), is(configRepository.getCurrentRevision().getMd5()));
assertThat(configXmlFromConfigFolder, is(configRepository.getCurrentRevision().getContent()));
assertThat(cachedGoPartials.lastValidPartials().contains(validPartial), is(true));
assertThat(cachedGoPartials.lastKnownPartials().contains(validPartial), is(true));
}
@Test
public void shouldUpdateConfigWithNoValidPartialsAndInvalidKnownPartials() throws GitAPIException, IOException {
String gitShaBeforeSave = configRepository.getCurrentRevCommit().getName();
PartialConfig invalidPartial = PartialConfigMother.invalidPartial("invalid", new RepoConfigOrigin(configRepo, "revision1"));
goPartialConfig.onSuccessPartialConfig(configRepo, invalidPartial);
assertTrue(cachedGoPartials.lastValidPartials().isEmpty());
assertTrue(cachedGoPartials.lastKnownPartials().contains(invalidPartial));
CruiseConfig config = new Cloner().deepClone(cachedGoConfig.loadForEditing());
config.addEnvironment(UUID.randomUUID().toString());
ConfigSaveState state = cachedGoConfig.writeFullConfigWithLock(new FullConfigUpdateCommand(config, goConfigService.configFileMd5()));
String gitShaAfterSave = configRepository.getCurrentRevCommit().getName();
String configXmlFromConfigFolder = FileUtils.readFileToString(new File(goConfigDao.fileLocation()), UTF_8);
assertThat(state, is(ConfigSaveState.UPDATED));
assertThat(cachedGoConfig.loadForEditing(), is(config));
assertNotEquals(gitShaBeforeSave, gitShaAfterSave);
assertThat(cachedGoConfig.loadForEditing().getMd5(), is(configRepository.getCurrentRevision().getMd5()));
assertThat(cachedGoConfig.currentConfig().getMd5(), is(configRepository.getCurrentRevision().getMd5()));
assertThat(configXmlFromConfigFolder, is(configRepository.getCurrentRevision().getContent()));
assertTrue(cachedGoPartials.lastValidPartials().isEmpty());
assertTrue(cachedGoPartials.lastKnownPartials().contains(invalidPartial));
}
@Test
public void shouldUpdateConfigWithValidPartialsAndInvalidKnownPartials() throws GitAPIException, IOException {
String gitShaBeforeSave = configRepository.getCurrentRevCommit().getName();
PartialConfig validPartial = PartialConfigMother.withPipeline("remote_pipeline", new RepoConfigOrigin(configRepo, "revision1"));
PartialConfig invalidPartial = PartialConfigMother.invalidPartial("invalid", new RepoConfigOrigin(configRepo, "revision2"));
goPartialConfig.onSuccessPartialConfig(configRepo, validPartial);
goPartialConfig.onSuccessPartialConfig(configRepo, invalidPartial);
assertTrue(cachedGoPartials.lastValidPartials().contains(validPartial));
assertTrue(cachedGoPartials.lastKnownPartials().contains(invalidPartial));
CruiseConfig config = new Cloner().deepClone(cachedGoConfig.loadForEditing());
config.addEnvironment(UUID.randomUUID().toString());
ConfigSaveState state = cachedGoConfig.writeFullConfigWithLock(new FullConfigUpdateCommand(config, goConfigService.configFileMd5()));
String gitShaAfterSave = configRepository.getCurrentRevCommit().getName();
String configXmlFromConfigFolder = FileUtils.readFileToString(new File(goConfigDao.fileLocation()), UTF_8);
assertThat(state, is(ConfigSaveState.UPDATED));
assertThat(cachedGoConfig.loadForEditing(), is(config));
assertNotEquals(gitShaBeforeSave, gitShaAfterSave);
assertThat(cachedGoConfig.loadForEditing().getMd5(), is(configRepository.getCurrentRevision().getMd5()));
assertThat(cachedGoConfig.currentConfig().getMd5(), is(configRepository.getCurrentRevision().getMd5()));
assertThat(configXmlFromConfigFolder, is(configRepository.getCurrentRevision().getContent()));
assertTrue(cachedGoPartials.lastValidPartials().contains(validPartial));
assertTrue(cachedGoPartials.lastKnownPartials().contains(invalidPartial));
}
@Test
public void shouldErrorOutOnUpdateConfigWithValidPartials_WithMainConfigBreakingPartials() throws GitAPIException, IOException {
setupExternalConfigRepoWithDependencyMaterialOnPipelineInMainXml("upstream", "downstream");
String gitShaBeforeSave = configRepository.getCurrentRevCommit().getName();
CruiseConfig originalConfig = cachedGoConfig.loadForEditing();
CruiseConfig editedConfig = new Cloner().deepClone(originalConfig);
editedConfig.getGroups().remove(editedConfig.findGroup("default"));
try {
cachedGoConfig.writeFullConfigWithLock(new FullConfigUpdateCommand(editedConfig, goConfigService.configFileMd5()));
fail("Expected the test to fail");
} catch (Exception e) {
String gitShaAfterSave = configRepository.getCurrentRevCommit().getName();
String configXmlFromConfigFolder = FileUtils.readFileToString(new File(goConfigDao.fileLocation()), UTF_8);
assertThat(cachedGoConfig.loadForEditing(), is(originalConfig));
assertEquals(gitShaBeforeSave, gitShaAfterSave);
assertThat(cachedGoConfig.loadForEditing().getMd5(), is(configRepository.getCurrentRevision().getMd5()));
assertThat(cachedGoConfig.currentConfig().getMd5(), is(configRepository.getCurrentRevision().getMd5()));
assertThat(configXmlFromConfigFolder, is(configRepository.getCurrentRevision().getContent()));
RepoConfigOrigin origin = (RepoConfigOrigin) cachedGoPartials.lastValidPartials().get(0).getOrigin();
assertThat(origin.getRevision(), is("r1"));
}
}
@Test
public void shouldMarkAPreviousInvalidPartialAsValid_IfMainXMLSatisfiesTheDependency() throws GitAPIException, IOException {
String gitShaBeforeSave = configRepository.getCurrentRevCommit().getName();
PipelineConfig upstream = PipelineConfigMother.createPipelineConfig("upstream", "S", "J");
PartialConfig partialConfig = PartialConfigMother.pipelineWithDependencyMaterial("downstream", upstream, new RepoConfigOrigin(configRepo, "r2"));
goPartialConfig.onSuccessPartialConfig(configRepo, partialConfig);
assertTrue(cachedGoPartials.lastKnownPartials().contains(partialConfig));
assertTrue(cachedGoPartials.lastValidPartials().isEmpty());
CruiseConfig originalConfig = cachedGoConfig.loadForEditing();
CruiseConfig editedConfig = new Cloner().deepClone(originalConfig);
editedConfig.addPipeline("default", upstream);
ConfigSaveState state = cachedGoConfig.writeFullConfigWithLock(new FullConfigUpdateCommand(editedConfig, goConfigService.configFileMd5()));
String gitShaAfterSave = configRepository.getCurrentRevCommit().getName();
String configXmlFromConfigFolder = FileUtils.readFileToString(new File(goConfigDao.fileLocation()), UTF_8);
assertThat(state, is(ConfigSaveState.UPDATED));
assertThat(cachedGoConfig.loadForEditing(), is(editedConfig));
assertNotEquals(gitShaBeforeSave, gitShaAfterSave);
assertThat(cachedGoConfig.loadForEditing().getMd5(), is(configRepository.getCurrentRevision().getMd5()));
assertThat(cachedGoConfig.currentConfig().getMd5(), is(configRepository.getCurrentRevision().getMd5()));
assertThat(configXmlFromConfigFolder, is(configRepository.getCurrentRevision().getContent()));
RepoConfigOrigin origin = (RepoConfigOrigin) cachedGoPartials.lastValidPartials().get(0).getOrigin();
assertThat(origin.getRevision(), is("r2"));
assertTrue(cachedGoPartials.lastKnownPartials().contains(partialConfig));
assertTrue(cachedGoPartials.lastValidPartials().contains(partialConfig));
}
private void addPipelineWithParams(CruiseConfig cruiseConfig) {
PipelineConfig pipelineConfig = PipelineConfigMother.createPipelineConfig("mingle", "dev", "ant");
pipelineConfig.addParam(new ParamConfig("foo", "hg-server"));
pipelineConfig.addParam(new ParamConfig("bar", "repo-name"));
pipelineConfig.addMaterialConfig(MaterialConfigsMother.hgMaterialConfig("http://#{foo}/#{bar}", "folder"));
cruiseConfig.addPipeline("another", pipelineConfig);
}
public MaterialConfig byFolder(MaterialConfigs materialConfigs, String folder) {
for (MaterialConfig materialConfig : materialConfigs) {
if (materialConfig instanceof ScmMaterialConfig && Objects.equals(folder, materialConfig.getFolder())) {
return materialConfig;
}
}
return null;
}
private UpdateConfigCommand updateFirstAgentResources(final String resources) {
return new UpdateConfigCommand() {
public CruiseConfig update(CruiseConfig cruiseConfig) {
AgentConfig agentConfig = cruiseConfig.agents().get(0);
agentConfig.setResourceConfigs(new ResourceConfigs(resources));
return cruiseConfig;
}
};
}
private void deletePartial(String partial) {
FileUtils.deleteQuietly(new File(externalConfigRepo, partial));
gitAddDotAndCommit(externalConfigRepo);
}
private void checkinPartial(String partial) throws IOException {
File externalConfigRepo = this.externalConfigRepo;
checkInPartial(partial, externalConfigRepo);
}
private void checkInPartial(String partial, File externalConfigRepo) throws IOException {
ClassPathResource resource = new ClassPathResource(partial);
if (resource.getFile().isDirectory()) {
FileUtils.copyDirectory(resource.getFile(), externalConfigRepo);
} else {
FileUtils.copyFileToDirectory(resource.getFile(), externalConfigRepo);
}
gitAddDotAndCommit(externalConfigRepo);
}
private class ConfigChangeListenerStub implements ConfigChangedListener {
private int invocationCount = 0;
@Override
public void onConfigChange(CruiseConfig newCruiseConfig) {
invocationCount++;
}
}
private String setupExternalConfigRepo(File configRepo) throws IOException {
String configRepoTestResource = "external_git_config_repo";
return setupExternalConfigRepo(configRepo, configRepoTestResource);
}
private String setupExternalConfigRepo(File configRepo, String configRepoTestResource) throws IOException {
ClassPathResource resource = new ClassPathResource(configRepoTestResource);
FileUtils.copyDirectory(resource.getFile(), configRepo);
CommandLine.createCommandLine("git").withEncoding("utf-8").withArg("init").withArg(configRepo.getAbsolutePath()).runOrBomb("");
CommandLine.createCommandLine("git").withEncoding("utf-8").withArgs("config", "commit.gpgSign", "false").withWorkingDir(configRepo.getAbsoluteFile()).runOrBomb("");
gitAddDotAndCommit(configRepo);
ConsoleResult consoleResult = CommandLine.createCommandLine("git").withEncoding("utf-8").withArg("log").withArg("-1").withArg("--pretty=format:%h").withWorkingDir(configRepo).runOrBomb("");
return consoleResult.outputAsString();
}
private void gitAddDotAndCommit(File configRepo) {
CommandLine.createCommandLine("git").withEncoding("utf-8").withArg("add").withArg("-A").withArg(".").withWorkingDir(configRepo).runOrBomb("");
CommandLine.createCommandLine("git").withEncoding("utf-8").withArg("config").withArg("user.email").withArg("go_test@go_test.me").withWorkingDir(configRepo).runOrBomb("");
CommandLine.createCommandLine("git").withEncoding("utf-8").withArg("config").withArg("user.name").withArg("user").withWorkingDir(configRepo).runOrBomb("");
CommandLine.createCommandLine("git").withEncoding("utf-8").withArg("commit").withArg("-m").withArg("initial commit").withWorkingDir(configRepo).runOrBomb("");
}
}
| |
/*
* Copyright 2001-2008 Artima, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scalatest.finders;
import static org.scalatest.finders.LocationUtils.*;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
public class FlatSpecFinder implements Finder {
public Selection find(AstNode node) {
Selection result = null;
while (result == null) {
if (node instanceof ConstructorBlock)
result = getAllTestSelection(node.className(), node.children());
else if (node instanceof MethodInvocation) {
MethodInvocation invocation = (MethodInvocation) node;
if (invocation.name().equals("of") || invocation.name().equals("in") || invocation.name().equals("should") || invocation.name().equals("must")) {
ConstructorBlock constructor = getParentOfType(node, ConstructorBlock.class);
if (constructor != null) {
AstNode scopeNode = getScopeNode(node, constructor.children());
if (scopeNode != null) {
String prefix = getPrefix((MethodInvocation) scopeNode);
result = getNodeTestSelection(node, prefix, constructor.children());
}
}
}
}
if (result == null) {
if (node.parent() != null)
node = node.parent();
else
break;
}
}
return result;
}
private Selection getAllTestSelection(String className, AstNode[] constructorChildren) {
String prefix = null;
List<String> testNames = new ArrayList<String>();
for (AstNode child : constructorChildren) {
if (isScope(child))
prefix = getPrefix((MethodInvocation) child);
if(prefix != null && child instanceof MethodInvocation && child.name().equals("in"))
testNames.add(getTestName(prefix, (MethodInvocation) child));
}
return new Selection(className, className, testNames.toArray(new String[testNames.size()]));
}
private String getPrefix(MethodInvocation invocation) {
String result = null;
while (result == null) {
if (invocation.name().equals("of"))
//result = invocation.target().toString();
result = invocation.args()[0].toString();
else {
if (invocation.target() instanceof MethodInvocation) {
MethodInvocation invocationTarget = (MethodInvocation) invocation.target();
if (invocationTarget.name().equals("should") || invocationTarget.name().equals("must"))
invocation = invocationTarget;
else
result = invocation.target().toString();
}
else
result = invocation.target().toString();
}
}
return result;
}
private AstNode getScopeNode(AstNode node, AstNode[] constructorChildren) {
AstNode topLevelNode = null;
while (node != null && topLevelNode == null) {
if (node.parent() instanceof ConstructorBlock)
topLevelNode = node;
else
node = node.parent();
}
if (topLevelNode != null) {
if (isScope(topLevelNode))
return topLevelNode;
else {
List<AstNode> beforeTopLevelNodeList = new ArrayList<AstNode>();
for (AstNode child : constructorChildren) {
if (!child.equals(topLevelNode))
beforeTopLevelNodeList.add(child);
else
break;
}
AstNode scopeNode = null;
for(int i=beforeTopLevelNodeList.size() - 1; i >= 0; i--) {
AstNode tnode = beforeTopLevelNodeList.get(i);
if (isScope(tnode)) {
scopeNode = tnode;
break;
}
}
return scopeNode;
}
}
else
return null;
}
private boolean isScope(AstNode node) {
if (node instanceof MethodInvocation) {
MethodInvocation invocation = (MethodInvocation) node;
return invocation.name().equals("of") ||
isScopeShould(invocation) ||
(invocation.name().equals("in") && invocation.target() != null && invocation.target() instanceof MethodInvocation && isScopeShould((MethodInvocation) invocation.target()));
}
else
return false;
}
private boolean isScopeShould(MethodInvocation invocation) {
return (invocation.name().equals("should") || invocation.name().equals("must")) && invocation.args().length > 0 && invocation.target() != null && !invocation.target().toString().equals("it");
}
private Selection getNodeTestSelection(AstNode node, String prefix, AstNode[] constructorChildren) {
if (node instanceof ConstructorBlock) {
List<String> testNames = getTestNamesFromChildren(prefix, Arrays.asList(node.children()));
return new Selection(node.className(), prefix.length() > 0 ? prefix : node.className(), testNames.toArray(new String[testNames.size()]));
}
else if (node instanceof MethodInvocation) {
MethodInvocation invocation = (MethodInvocation) node;
String name = invocation.name();
if (name.equals("of")) {
List<AstNode> constructorChildrenList = Arrays.asList(constructorChildren);
int nodeIdx = constructorChildrenList.indexOf(node);
if (nodeIdx >= 0) {
List<AstNode> startList = constructorChildrenList.subList(nodeIdx + 1, constructorChildrenList.size());
List<AstNode> subList = new ArrayList<AstNode>();
for (AstNode snode : startList) {
if (!isScope(snode))
subList.add(snode);
else
break;
}
List<String> testNames = getTestNamesFromChildren(prefix, subList);
return new Selection(node.className(), prefix, testNames.toArray(new String[testNames.size()]));
}
else
return null;
}
else if (name.equals("should") || name.equals("must")) {
AstNode parent = invocation.parent();
if (parent instanceof MethodInvocation && parent.name().equals("in")) {
String testName = getTestName(prefix, (MethodInvocation) parent);
return new Selection(invocation.className(), testName, new String[] { testName });
}
else
return null;
}
else if (name.equals("in")) {
String testName = getTestName(prefix, invocation);
return new Selection(invocation.className(), testName, new String[] { testName });
}
else
return null;
}
else
return null;
}
private List<String> getTestNamesFromChildren(String prefix, List<AstNode> children) {
Set<String> validSet = new HashSet<String>();
validSet.add("in");
List<String> testNameList = new ArrayList<String>();
for (AstNode node : children) {
if (node instanceof MethodInvocation && isValidName(node.name(), validSet)) {
MethodInvocation invocation = (MethodInvocation) node;
testNameList.add(getTestName(prefix, invocation));
}
}
return testNameList;
}
private String getTargetString(AstNode target, String prefix, String postfix) {
if (target == null)
return postfix;
else {
if (target instanceof MethodInvocation && ((MethodInvocation) target).name().equals("should"))
return "should " + ((MethodInvocation) target).args()[0];
else if (target instanceof MethodInvocation && ((MethodInvocation) target).name().equals("must"))
return "must " + ((MethodInvocation) target).args()[0];
else
return target.toString();
}
}
private String getTestName(String prefix, MethodInvocation invocation) {
return prefix + " " + getTargetString(invocation.target(), prefix, "");
}
}
/*package org.scalatest.finders
import LocationUtils._
import scala.annotation.tailrec
import scala.collection.mutable.ListBuffer
class FlatSpecFinder extends Finder {
def find(node: AstNode): Option[Selection] = {
node match {
case constructor: ConstructorBlock =>
getAllTestSelection(node.className, constructor.children)
case invocation: MethodInvocation
if invocation.name == "of" || invocation.name == "in" || invocation.name == "should" =>
val constructorOpt: Option[ConstructorBlock] = node match {
case constructor: ConstructorBlock => Some(constructor)
case _ =>
getParentOfType(node, classOf[ConstructorBlock])
}
constructorOpt match {
case Some(constructor) =>
val scopeNodeOpt = getScopeNode(node, constructor.children)
scopeNodeOpt match {
case Some(scopeNode) =>
val prefix = getPrefix(scopeNode.asInstanceOf[MethodInvocation])
getNodeTestSelection(node, prefix, constructor.children)
case None =>
if (node.parent != null)
find(node.parent)
else
None
}
case None => None
}
case _ =>
if (node.parent != null)
find(node.parent)
else
None
}
}
private def getAllTestSelection(className: String, constructorChildren: Array[AstNode]) = {
var prefix: String = null
val listBuffer = new ListBuffer[String]()
for (child <- constructorChildren) {
if (isScope(child))
prefix = getPrefix(child.asInstanceOf[MethodInvocation])
if(prefix != null && child.isInstanceOf[MethodInvocation] && child.name == "in")
listBuffer += getTestName(prefix, child.asInstanceOf[MethodInvocation])
}
Some(new Selection(className, className, listBuffer.toArray))
}
@tailrec
private def getPrefix(scopeInvocation: MethodInvocation): String = {
if (scopeInvocation.name == "of")
scopeInvocation.args(0).toString
else {
scopeInvocation.target match {
case inInvocation @ MethodInvocation(className, target, parent, children, "should", args) => // in
getPrefix(inInvocation)
case _ =>
scopeInvocation.target.toString
}
}
}
private def getScopeNode(node: AstNode, constructorChildren: Array[AstNode]): Option[AstNode] = {
@tailrec
def getTopLevelNode(node: AstNode): AstNode =
if (node.parent.isInstanceOf[ConstructorBlock])
node
else
getTopLevelNode(node.parent)
val topLevelNode = getTopLevelNode(node)
if (isScope(topLevelNode))
return Some(topLevelNode)
else
constructorChildren.takeWhile(_ != topLevelNode).reverse.find(isScope(_))
}
private def isScope(node: AstNode): Boolean = {
def isScopeShould(invocation: MethodInvocation) = invocation.name == "should" && invocation.args.length > 0 && invocation.target != null && invocation.target.toString != "it"
node match {
case invocation: MethodInvocation //(className, target, parent, children, name, args)
if invocation.name == "of" ||
isScopeShould(invocation) ||
(invocation.name == "in" && invocation.target != null && invocation.target.isInstanceOf[MethodInvocation] && isScopeShould(invocation.target.asInstanceOf[MethodInvocation]))
=>
true
case _ =>
false
}
}
private def getNodeTestSelection(node: AstNode, prefix: String, constructorChildren: Array[AstNode]) = {
node match {
case ConstructorBlock(className, children) =>
val testNames = getTestNamesFromChildren(prefix, children)
Some(new Selection(className, if (prefix.length > 0) prefix else className, testNames))
case invocation @ MethodInvocation(className, target, parent, children, name, args) =>
if (name == "of") {
val nodeIdx = constructorChildren.indexOf(node)
if (nodeIdx >= 0) {
val startList = constructorChildren.drop(nodeIdx + 1)
val subList = startList.takeWhile(!isScope(_))
val testNames = getTestNamesFromChildren(prefix, subList)
Some(new Selection(className, prefix, testNames))
}
else
None
}
else if (name == "should") {
invocation.parent match {
case invocationParent @ MethodInvocation(className, target, parent, children, "in", args) =>
val testName = getTestName(prefix, invocationParent)
Some(new Selection(className, testName, Array(testName)))
case _ =>
None
}
}
else if (name == "in") {
val testName = getTestName(prefix, invocation)
Some(new Selection(className, testName, Array[String](testName)))
}
else
None
case _ => None
}
}
private def getTestNamesFromChildren(prefix: String, children: Array[AstNode]) = {
children
.filter(node => node.isInstanceOf[MethodInvocation] && isValidName(node.name, Set("in")))
.map { node =>
val invocation = node.asInstanceOf[MethodInvocation]
getTestName(prefix, invocation)
}
}
private def getTargetString(target: AstNode, prefix: String, postfix: String): String = {
if (target == null)
postfix
else {
target match {
case MethodInvocation(className, targetTarget, parent, children, "should", args) if (args.length > 0) =>
"should " + args(0).toString
case _ =>
target.toString
}
}
}
private def getTestName(prefix: String, invocation: MethodInvocation) = {
prefix + " " + getTargetString(invocation.target, prefix, "")
}
}*/
| |
package com.example.android.odometer;
import android.database.Cursor;
import android.database.sqlite.SQLiteDatabase;
import android.os.Bundle;
import android.support.v7.app.AppCompatActivity;
import com.example.android.odometer.database.OdometerContract;
import com.example.android.odometer.database.OdometerDbHelper;
import com.github.mikephil.charting.data.BarEntry;
//import com.github.mikephil.charting.data.LineEntry;
import com.github.mikephil.charting.data.Entry;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.text.ParseException;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Date;
//import static android.R.id.input;
/**
* Created by ACornelius on 6/7/2017.
*/
public class Data extends AppCompatActivity {
private OdometerDbHelper mDbHelper;
private Cursor cursor;
public Integer lastOdometerValue = 0;
public String lastOdometerDate = "";
public Integer numberOfReadings = 0;
public Integer lastRecordID = 0;
// public Integer leaseDuration = 36; // in months
// public Integer leaseMileage = 30000; //
// public Double dailyDistance = leaseMileage / (leaseDuration / 12) / 365.25 ;
public Integer leaseStartMileage;
public Date leaseStartDate;
public Double leaseOverageCost;
public Integer leaseDuration;
public Integer leaseMileage;
public Double dailyDistance;
private DateFormat formatter;
// private DateTimeFormatter formatter;
public ArrayList <Integer> readings;
public ArrayList <BarEntry> entries;
public ArrayList <Entry> lineDataEntries;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
System.out.println("__Debug: Data.onCreate");
updateLeaseDetails(1);
}
@Override
protected void onStart() {
super.onStart();
System.out.println("__Debug: Data.onStart");
// updateLeaseDetails(1);
}
public void setDBConnection(OdometerDbHelper mDbHelper) {
this.mDbHelper = mDbHelper;
}
public void refresh() {
System.out.println("......... Data.refresh()");
updateReadings(1);
refreshLeaseDetatils();
// updateLeaseDetails(1);
}
public void refreshLeaseDetatils() {
System.out.println("......... Data.refreshLeaseDetails()");
updateLeaseDetails(1);
}
// private ArrayList getAllOdometerReadings(int vehicle_ID) {
private void updateReadings(int vehicle_ID) {
System.out.println("-------- Data.updateReadings...");
// Create and/or open a database to read from it
SQLiteDatabase db = mDbHelper.getReadableDatabase();
// ArrayList <Integer> readings = new ArrayList<>();
readings = new ArrayList<>();
entries = new ArrayList<>();
lineDataEntries = new ArrayList<>();
// Define a projection that specifies which columns from the database
// you will actually use after this query.
String[] projection = { OdometerContract.OdometerEntry._ID,
OdometerContract.OdometerEntry.COLUMN_ODOMETER,
OdometerContract.OdometerEntry.COLUMN_DATETIME};
// Perform a query on the mileage table
cursor = db.query(
OdometerContract.OdometerEntry.TABLE_NAME, // The table to query
projection, // The columns to return
OdometerContract.OdometerEntry.COLUMN_VEHICLE_ID + "=" + vehicle_ID, // The WHERE clause
null,
null, // Don't group the rows
null, // Don't filter by row groups
OdometerContract.OdometerEntry._ID + " ASC"); // The sort order
try {
if(cursor.getCount() > 0) {
// Long millis = 0L;
Long seconds = 0L;
numberOfReadings = cursor.getCount();
cursor.moveToFirst();
for (int i = 0; i < cursor.getCount(); i++) {
readings.add(cursor.getInt(cursor.getColumnIndex(OdometerContract.OdometerEntry.COLUMN_ODOMETER)));
String dt = cursor.getString(cursor.getColumnIndex(OdometerContract.OdometerEntry.COLUMN_DATETIME));
try {
seconds = new SimpleDateFormat("MM/dd/yyyy").parse(dt).getTime() / 1000000;
} catch (ParseException e) {
try {
seconds = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").parse(dt).getTime() / 1000000;
} catch (ParseException e2) {
// e2.printStackTrace();
System.out.println("XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX " + dt);
continue;
}
}
Long xVal = seconds; //cursor.getLong(cursor.getColumnIndex(OdometerContract.OdometerEntry.COLUMN_DATETIME));
Integer yVal = cursor.getInt(cursor.getColumnIndex(OdometerContract.OdometerEntry.COLUMN_ODOMETER));
System.out.println("entries = " + xVal + yVal + " " + dt);
entries.add(new BarEntry(xVal, yVal));
lineDataEntries.add(new Entry(xVal, yVal));
cursor.moveToNext();
}
cursor.moveToLast();
lastOdometerValue = cursor.getInt(cursor.getColumnIndex(OdometerContract.OdometerEntry.COLUMN_ODOMETER));
lastOdometerDate = cursor.getString(cursor.getColumnIndex(OdometerContract.OdometerEntry.COLUMN_DATETIME));
lastRecordID = cursor.getInt(cursor.getColumnIndex(OdometerContract.OdometerEntry._ID));
} else{
numberOfReadings = 0;
lastOdometerValue = 0;
lastOdometerDate = "";
lastRecordID = 0;
readings.add(0);
}
} finally {
// Always close the cursor when you're done reading from it. This releases all its
// resources and makes it invalid.
cursor.close();
}
// TODO: Fix this Null check
// if (dailyDistance.isNaN()) {
// System.out.println("___Debug: trapped null");
// updateLeaseDetails(1);
// }
// updateLeaseDetails(1);
// return readings;
}
private void updateLeaseDetails (int vehicle_ID) {
System.out.println("--------- Data.updateLeaseDetails...");
SQLiteDatabase db = mDbHelper.getReadableDatabase();
// Define a projection that specifies which columns from the database
// you will actually use after this query.
String[] projection = { OdometerContract.LeaseDetails.COLUMN_START_DATE,
OdometerContract.LeaseDetails.COLUMN_START_MILEAGE,
OdometerContract.LeaseDetails.COLUMN_DURATION,
OdometerContract.LeaseDetails.COLUMN_MAX_MILES,
OdometerContract.LeaseDetails.COLUMN_OVERAGE_COST};
// Perform a query on the mileage table
cursor = db.query(
OdometerContract.LeaseDetails.TABLE_NAME, // The table to query
projection, // The columns to return
OdometerContract.LeaseDetails.COLUMN_VEHICLE_ID + "=" + vehicle_ID, // The WHERE clause
null,
null, // Don't group the rows
null, // Don't filter by row groups
null); // The sort order
try {
cursor.moveToFirst();
if(cursor.getCount() == 1) {
leaseStartMileage = cursor.getInt(cursor.getColumnIndex(OdometerContract.LeaseDetails.COLUMN_START_MILEAGE));
leaseDuration = cursor.getInt(cursor.getColumnIndex(OdometerContract.LeaseDetails.COLUMN_DURATION));
leaseMileage = cursor.getInt(cursor.getColumnIndex(OdometerContract.LeaseDetails.COLUMN_MAX_MILES));
leaseOverageCost = cursor.getDouble(cursor.getColumnIndex(OdometerContract.LeaseDetails.COLUMN_OVERAGE_COST));
leaseStartDate = stringToDate(cursor.getString(cursor.getColumnIndex(OdometerContract.LeaseDetails.COLUMN_START_DATE)));
System.out.println("");
String debugDate = "Date = " + new SimpleDateFormat("MM/dd/yyyy");
System.out.println(debugDate);
System.out.println("");
if (leaseDuration > 0) {
dailyDistance = leaseMileage / (leaseDuration / 12) / 365.25 ;
} else {
dailyDistance = 0.0;
}
} else {
if (cursor.getCount() == 0) {
System.out.println("********** No data found in Data.updateLeaseDetails");
System.out.println("********** No data found in Data.updateLeaseDetails");
System.out.println("********** No data found in Data.updateLeaseDetails");
}
if (cursor.getCount() > 1) {
System.out.println("********** Multiple rows found in Data.updateLeaseDetails (only 1 expected)");
System.out.println("********** Multiple rows found in Data.updateLeaseDetails (only 1 expected)");
System.out.println("********** Multiple rows found in Data.updateLeaseDetails (only 1 expected)");
}
leaseStartDate = stringToDate("2016-01-04 00:00:00");
leaseStartMileage = 0;
leaseDuration = 12;
leaseMileage = 12345;
leaseOverageCost = 0.0;
dailyDistance = 0.0;
}
} finally {
// Always close the cursor when you're done reading from it. This releases all its
// resources and makes it invalid.
cursor.close();
}
}
public Date stringToDate(String strDate) {
System.out.println("--------- Data.stringToDate");
// String exampleDate = "2017-01-01";
Date d;
// SimpleDateFormat sdf = new SimpleDateFormat("MM/dd/yyyy");
SimpleDateFormat sdf = new SimpleDateFormat("MM/dd/yyyy");
try {
// d = sdf.parse(exampleDate);
d = sdf.parse(strDate);
} catch (ParseException e) {
// Logger.getLogger(Prime.class.getName()).log(Level.SEVERE, null, e);
System.out.println(" ******** Exception parsing this date: " + strDate);
Calendar calendar = Calendar.getInstance();
calendar.set(2016, 0, 3, 0, 0, 0); // or GregorianCalendar(year + 1900, month, date, hrs, min, sec)
d = calendar.getTime();
e.printStackTrace();
}
return d;
}
public Date stringToDateTime(String strDate) {
// String exampleDate = "2017-01-01 00:00:00";
Date d;
SimpleDateFormat sdf = new SimpleDateFormat("MM/dd/yyyy HH:mm:ss");
try {
// d = sdf.parse(exampleDate);
d = sdf.parse(strDate);
} catch (ParseException e) {
// Logger.getLogger(Prime.class.getName()).log(Level.SEVERE, null, e);
Calendar calendar = Calendar.getInstance();
calendar.set(2016, 0, 2, 0, 0, 0); // or GregorianCalendar(year + 1900, month, date, hrs, min, sec)
d = calendar.getTime();
e.printStackTrace();
}
return d;
}
}
| |
/*
* Copyright 2000-2015 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.ide.actions;
import com.intellij.execution.ExecutionException;
import com.intellij.execution.configurations.GeneralCommandLine;
import com.intellij.execution.process.ProcessOutput;
import com.intellij.execution.util.ExecUtil;
import com.intellij.notification.Notification;
import com.intellij.notification.NotificationType;
import com.intellij.notification.Notifications;
import com.intellij.openapi.actionSystem.AnActionEvent;
import com.intellij.openapi.application.ApplicationBundle;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.application.ApplicationNamesInfo;
import com.intellij.openapi.application.PathManager;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.progress.ProgressIndicator;
import com.intellij.openapi.progress.ProgressManager;
import com.intellij.openapi.progress.Task;
import com.intellij.openapi.project.DumbAwareAction;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.ui.DialogWrapper;
import com.intellij.openapi.util.SystemInfo;
import com.intellij.openapi.util.io.FileUtil;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.ui.AppUIUtil;
import com.intellij.util.PlatformUtils;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import java.io.File;
import java.io.IOException;
import java.util.Locale;
import java.util.Map;
import static com.intellij.openapi.util.Pair.pair;
import static com.intellij.util.containers.ContainerUtil.newHashMap;
public class CreateDesktopEntryAction extends DumbAwareAction {
private static final Logger LOG = Logger.getInstance("#com.intellij.ide.actions.CreateDesktopEntryAction");
public static boolean isAvailable() {
return SystemInfo.isUnix && SystemInfo.hasXdgOpen();
}
@Override
public void update(@NotNull AnActionEvent event) {
boolean enabled = isAvailable();
event.getPresentation().setEnabledAndVisible(enabled);
}
@Override
public void actionPerformed(@NotNull AnActionEvent event) {
if (!isAvailable()) return;
Project project = event.getProject();
CreateDesktopEntryDialog dialog = new CreateDesktopEntryDialog(project);
if (!dialog.showAndGet()) {
return;
}
final boolean globalEntry = dialog.myGlobalEntryCheckBox.isSelected();
ProgressManager.getInstance().run(new Task.Backgroundable(project, ApplicationBundle.message("desktop.entry.title")) {
@Override
public void run(@NotNull ProgressIndicator indicator) {
createDesktopEntry(getProject(), indicator, globalEntry);
}
});
}
public static void createDesktopEntry(@Nullable Project project, @NotNull ProgressIndicator indicator, boolean globalEntry) {
if (!isAvailable()) return;
double step = (1.0 - indicator.getFraction()) / 3.0;
File entry = null;
try {
indicator.setText(ApplicationBundle.message("desktop.entry.checking"));
check();
indicator.setFraction(indicator.getFraction() + step);
indicator.setText(ApplicationBundle.message("desktop.entry.preparing"));
entry = prepare();
indicator.setFraction(indicator.getFraction() + step);
indicator.setText(ApplicationBundle.message("desktop.entry.installing"));
install(entry, globalEntry);
indicator.setFraction(indicator.getFraction() + step);
if (ApplicationManager.getApplication() != null) {
String message = ApplicationBundle.message("desktop.entry.success", ApplicationNamesInfo.getInstance().getProductName());
Notifications.Bus.notify(
new Notification(Notifications.SYSTEM_MESSAGES_GROUP_ID, "Desktop entry created", message, NotificationType.INFORMATION),
project);
}
}
catch (Exception e) {
if (ApplicationManager.getApplication() == null) {
throw new RuntimeException(e);
}
LOG.warn(e);
String message = e.getMessage();
if (StringUtil.isEmptyOrSpaces(message)) message = "Internal error";
Notifications.Bus.notify(
new Notification(Notifications.SYSTEM_MESSAGES_GROUP_ID, "Failed to create desktop entry", message, NotificationType.ERROR),
project);
}
finally {
if (entry != null) {
FileUtil.delete(entry);
}
}
}
private static void check() throws ExecutionException, InterruptedException {
int result = new GeneralCommandLine("which", "xdg-desktop-menu").createProcess().waitFor();
if (result != 0) throw new RuntimeException(ApplicationBundle.message("desktop.entry.xdg.missing"));
}
private static File prepare() throws IOException {
String homePath = PathManager.getHomePath();
assert new File(homePath).isDirectory() : "Invalid home path: '" + homePath + "'";
String binPath = homePath + "/bin";
assert new File(binPath).isDirectory() : "Invalid bin path: '" + binPath + "'";
String name = ApplicationNamesInfo.getInstance().getFullProductName();
if (PlatformUtils.isIdeaCommunity()) name += " Community Edition";
String iconPath = AppUIUtil.findIcon(binPath);
if (iconPath == null) {
throw new RuntimeException(ApplicationBundle.message("desktop.entry.icon.missing", binPath));
}
String execPath = findScript(binPath);
if (execPath == null) {
throw new RuntimeException(ApplicationBundle.message("desktop.entry.script.missing", binPath));
}
String wmClass = AppUIUtil.getFrameClass();
Map<String, String> vars = newHashMap(pair("$NAME$", name), pair("$SCRIPT$", execPath), pair("$ICON$", iconPath), pair("$WM_CLASS$", wmClass));
String content = ExecUtil.loadTemplate(CreateDesktopEntryAction.class.getClassLoader(), "entry.desktop", vars);
File entryFile = new File(FileUtil.getTempDirectory(), wmClass + ".desktop");
FileUtil.writeToFile(entryFile, content);
return entryFile;
}
@Nullable
private static String findScript(String binPath) {
ApplicationNamesInfo names = ApplicationNamesInfo.getInstance();
String execPath = binPath + '/' + names.getProductName() + ".sh";
if (new File(execPath).canExecute()) return execPath;
execPath = binPath + '/' + names.getProductName().toLowerCase(Locale.US) + ".sh";
if (new File(execPath).canExecute()) return execPath;
execPath = binPath + '/' + names.getScriptName() + ".sh";
if (new File(execPath).canExecute()) return execPath;
return null;
}
private static void install(File entryFile, boolean globalEntry) throws IOException, ExecutionException {
if (globalEntry) {
String prompt = ApplicationBundle.message("desktop.entry.sudo.prompt");
exec(new GeneralCommandLine("xdg-desktop-menu", "install", "--mode", "system", entryFile.getAbsolutePath()), prompt);
exec(new GeneralCommandLine("xdg-desktop-menu", "forceupdate", "--mode", "system"), prompt);
}
else {
exec(new GeneralCommandLine("xdg-desktop-menu", "install", "--mode", "user", entryFile.getAbsolutePath()), null);
exec(new GeneralCommandLine("xdg-desktop-menu", "forceupdate", "--mode", "user"), null);
}
}
private static void exec(GeneralCommandLine command, @Nullable String prompt) throws IOException, ExecutionException {
command.setRedirectErrorStream(true);
ProcessOutput result = prompt != null ? ExecUtil.sudoAndGetOutput(command, prompt) : ExecUtil.execAndGetOutput(command);
int exitCode = result.getExitCode();
if (exitCode != 0) {
String message = "Command '" + (prompt != null ? "sudo " : "") + command.getCommandLineString() + "' returned " + exitCode;
String output = result.getStdout();
if (!StringUtil.isEmptyOrSpaces(output)) message += "\nOutput: " + output.trim();
throw new RuntimeException(message);
}
}
public static class CreateDesktopEntryDialog extends DialogWrapper {
private JPanel myContentPane;
private JLabel myLabel;
private JCheckBox myGlobalEntryCheckBox;
public CreateDesktopEntryDialog(final Project project) {
super(project);
init();
setTitle(ApplicationBundle.message("desktop.entry.title"));
myLabel.setText(myLabel.getText().replace("$APP_NAME$", ApplicationNamesInfo.getInstance().getProductName()));
}
@Override
protected JComponent createCenterPanel() {
return myContentPane;
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.cassandra.service.reads;
import java.util.concurrent.TimeUnit;
import org.apache.cassandra.config.DatabaseDescriptor;
import org.apache.cassandra.dht.Murmur3Partitioner;
import org.apache.cassandra.dht.Token;
import org.apache.cassandra.locator.EndpointsForRange;
import org.apache.cassandra.locator.ReplicaPlan;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import org.apache.cassandra.SchemaLoader;
import org.apache.cassandra.Util;
import org.apache.cassandra.db.ColumnFamilyStore;
import org.apache.cassandra.db.ConsistencyLevel;
import org.apache.cassandra.db.Keyspace;
import org.apache.cassandra.db.SinglePartitionReadCommand;
import org.apache.cassandra.exceptions.ReadFailureException;
import org.apache.cassandra.exceptions.ReadTimeoutException;
import org.apache.cassandra.exceptions.RequestFailureReason;
import org.apache.cassandra.locator.EndpointsForToken;
import org.apache.cassandra.locator.InetAddressAndPort;
import org.apache.cassandra.locator.ReplicaLayout;
import org.apache.cassandra.locator.ReplicaUtils;
import org.apache.cassandra.net.MessageOut;
import org.apache.cassandra.net.MessagingService;
import org.apache.cassandra.schema.KeyspaceParams;
import static org.apache.cassandra.locator.ReplicaUtils.full;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.fail;
public class ReadExecutorTest
{
static Keyspace ks;
static ColumnFamilyStore cfs;
static EndpointsForToken targets;
static Token dummy;
@BeforeClass
public static void setUpClass() throws Throwable
{
SchemaLoader.loadSchema();
SchemaLoader.createKeyspace("Foo", KeyspaceParams.simple(3), SchemaLoader.standardCFMD("Foo", "Bar"));
ks = Keyspace.open("Foo");
cfs = ks.getColumnFamilyStore("Bar");
dummy = Murmur3Partitioner.instance.getMinimumToken();
targets = EndpointsForToken.of(dummy,
full(InetAddressAndPort.getByName("127.0.0.255")),
full(InetAddressAndPort.getByName("127.0.0.254")),
full(InetAddressAndPort.getByName("127.0.0.253"))
);
cfs.sampleReadLatencyNanos = 0;
}
@Before
public void resetCounters() throws Throwable
{
cfs.metric.speculativeInsufficientReplicas.dec(cfs.metric.speculativeInsufficientReplicas.getCount());
cfs.metric.speculativeRetries.dec(cfs.metric.speculativeRetries.getCount());
cfs.metric.speculativeFailedRetries.dec(cfs.metric.speculativeFailedRetries.getCount());
}
/**
* If speculation would have been beneficial but could not be attempted due to lack of replicas
* count that it occured
*/
@Test
public void testUnableToSpeculate() throws Throwable
{
assertEquals(0, cfs.metric.speculativeInsufficientReplicas.getCount());
assertEquals(0, ks.metric.speculativeInsufficientReplicas.getCount());
AbstractReadExecutor executor = new AbstractReadExecutor.NeverSpeculatingReadExecutor(cfs, new MockSinglePartitionReadCommand(), plan(targets, ConsistencyLevel.LOCAL_QUORUM), System.nanoTime(), true);
executor.maybeTryAdditionalReplicas();
try
{
executor.awaitResponses();
fail();
}
catch (ReadTimeoutException e)
{
//expected
}
assertEquals(1, cfs.metric.speculativeInsufficientReplicas.getCount());
assertEquals(1, ks.metric.speculativeInsufficientReplicas.getCount());
//Shouldn't increment
executor = new AbstractReadExecutor.NeverSpeculatingReadExecutor(cfs, new MockSinglePartitionReadCommand(), plan(targets, ConsistencyLevel.LOCAL_QUORUM), System.nanoTime(), false);
executor.maybeTryAdditionalReplicas();
try
{
executor.awaitResponses();
fail();
}
catch (ReadTimeoutException e)
{
//expected
}
assertEquals(1, cfs.metric.speculativeInsufficientReplicas.getCount());
assertEquals(1, ks.metric.speculativeInsufficientReplicas.getCount());
}
/**
* Test that speculation when it is attempted is countedc, and when it succeed
* no failure is counted.
*/
@Test
public void testSpeculateSucceeded() throws Throwable
{
assertEquals(0, cfs.metric.speculativeRetries.getCount());
assertEquals(0, cfs.metric.speculativeFailedRetries.getCount());
assertEquals(0, ks.metric.speculativeRetries.getCount());
assertEquals(0, ks.metric.speculativeFailedRetries.getCount());
AbstractReadExecutor executor = new AbstractReadExecutor.SpeculatingReadExecutor(cfs, new MockSinglePartitionReadCommand(TimeUnit.DAYS.toMillis(365)), plan(ConsistencyLevel.LOCAL_QUORUM, targets, targets.subList(0, 2)), System.nanoTime());
executor.maybeTryAdditionalReplicas();
new Thread()
{
@Override
public void run()
{
//Failures end the read promptly but don't require mock data to be suppleid
executor.handler.onFailure(targets.get(0).endpoint(), RequestFailureReason.READ_TOO_MANY_TOMBSTONES);
executor.handler.onFailure(targets.get(1).endpoint(), RequestFailureReason.READ_TOO_MANY_TOMBSTONES);
executor.handler.condition.signalAll();
}
}.start();
try
{
executor.awaitResponses();
fail();
}
catch (ReadFailureException e)
{
//expected
}
assertEquals(1, cfs.metric.speculativeRetries.getCount());
assertEquals(0, cfs.metric.speculativeFailedRetries.getCount());
assertEquals(1, ks.metric.speculativeRetries.getCount());
assertEquals(0, ks.metric.speculativeFailedRetries.getCount());
}
/**
* Test that speculation failure statistics are incremented if speculation occurs
* and the read still times out.
*/
@Test
public void testSpeculateFailed() throws Throwable
{
assertEquals(0, cfs.metric.speculativeRetries.getCount());
assertEquals(0, cfs.metric.speculativeFailedRetries.getCount());
assertEquals(0, ks.metric.speculativeRetries.getCount());
assertEquals(0, ks.metric.speculativeFailedRetries.getCount());
AbstractReadExecutor executor = new AbstractReadExecutor.SpeculatingReadExecutor(cfs, new MockSinglePartitionReadCommand(), plan(ConsistencyLevel.LOCAL_QUORUM, targets, targets.subList(0, 2)), System.nanoTime());
executor.maybeTryAdditionalReplicas();
try
{
executor.awaitResponses();
fail();
}
catch (ReadTimeoutException e)
{
//expected
}
assertEquals(1, cfs.metric.speculativeRetries.getCount());
assertEquals(1, cfs.metric.speculativeFailedRetries.getCount());
assertEquals(1, ks.metric.speculativeRetries.getCount());
assertEquals(1, ks.metric.speculativeFailedRetries.getCount());
}
public static class MockSinglePartitionReadCommand extends SinglePartitionReadCommand
{
private final long timeout;
MockSinglePartitionReadCommand()
{
this(0);
}
MockSinglePartitionReadCommand(long timeout)
{
super(false, 0, false, cfs.metadata(), 0, null, null, null, Util.dk("ry@n_luvs_teh_y@nk33z"), null, null);
this.timeout = timeout;
}
@Override
public long getTimeout()
{
return timeout;
}
@Override
public MessageOut createMessage()
{
return new MessageOut(MessagingService.Verb.BATCH_REMOVE)
{
@Override
public int serializedSize(int version)
{
return 0;
}
};
}
}
private ReplicaPlan.ForTokenRead plan(EndpointsForToken targets, ConsistencyLevel consistencyLevel)
{
return plan(consistencyLevel, targets, targets);
}
private ReplicaPlan.ForTokenRead plan(ConsistencyLevel consistencyLevel, EndpointsForToken natural, EndpointsForToken selected)
{
return new ReplicaPlan.ForTokenRead(ks, consistencyLevel, natural, selected);
}
}
| |
package com.microsoft.bingads.v12.bulk.entities;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import com.microsoft.bingads.internal.functionalinterfaces.BiConsumer;
import com.microsoft.bingads.internal.functionalinterfaces.Function;
import com.microsoft.bingads.v12.campaignmanagement.CampaignCriterionStatus;
import com.microsoft.bingads.v12.campaignmanagement.AudienceCriterion;
import com.microsoft.bingads.v12.campaignmanagement.NegativeCampaignCriterion;
import com.microsoft.bingads.v12.internal.bulk.BulkMapping;
import com.microsoft.bingads.v12.internal.bulk.MappingHelpers;
import com.microsoft.bingads.v12.internal.bulk.RowValues;
import com.microsoft.bingads.v12.internal.bulk.SimpleBulkMapping;
import com.microsoft.bingads.v12.internal.bulk.StringExtensions;
import com.microsoft.bingads.v12.internal.bulk.StringTable;
import com.microsoft.bingads.v12.internal.bulk.entities.SingleRecordBulkEntity;
/**
* Base class for all Campaign Audience Association subclasses that can be read or written in a bulk file.
*
* @see BulkCampaignNegativeCustomAudienceAssociation
* @see BulkCampaignNegativeInMarketAudienceAssociation
* @see BulkCampaignNegativeProductAudienceAssociation
* @see BulkCampaignNegativeRemarketingListAssociation
* @see BulkCampaignNegativeSimilarRemarketingListAssociation
*/
public class BulkCampaignNegativeAudienceAssociation extends SingleRecordBulkEntity {
private NegativeCampaignCriterion negativeCampaignCriterion;
private String campaignName;
private String audienceName;
private PerformanceData performanceData;
private static final List<BulkMapping<BulkCampaignNegativeAudienceAssociation>> MAPPINGS;
static {
List<BulkMapping<BulkCampaignNegativeAudienceAssociation>> m = new ArrayList<BulkMapping<BulkCampaignNegativeAudienceAssociation>>();
m.add(new SimpleBulkMapping<BulkCampaignNegativeAudienceAssociation, String>(StringTable.Status,
new Function<BulkCampaignNegativeAudienceAssociation, String>() {
@Override
public String apply(BulkCampaignNegativeAudienceAssociation c) {
CampaignCriterionStatus status = c.getNegativeCampaignCriterion().getStatus();
return status == null ? null : status.value();
}
},
new BiConsumer<String, BulkCampaignNegativeAudienceAssociation>() {
@Override
public void accept(String v, BulkCampaignNegativeAudienceAssociation c) {
c.getNegativeCampaignCriterion().setStatus(StringExtensions.parseOptional(v, new Function<String, CampaignCriterionStatus>() {
@Override
public CampaignCriterionStatus apply(String s) {
return CampaignCriterionStatus.fromValue(s);
}
}));
}
}
));
m.add(new SimpleBulkMapping<BulkCampaignNegativeAudienceAssociation, Long>(StringTable.Id,
new Function<BulkCampaignNegativeAudienceAssociation, Long>() {
@Override
public Long apply(BulkCampaignNegativeAudienceAssociation c) {
return c.getNegativeCampaignCriterion().getId();
}
},
new BiConsumer<String, BulkCampaignNegativeAudienceAssociation>() {
@Override
public void accept(String v, BulkCampaignNegativeAudienceAssociation c) {
c.getNegativeCampaignCriterion().setId(StringExtensions.<Long>parseOptional(v, new Function<String, Long>() {
@Override
public Long apply(String value) {
return Long.parseLong(value);
}
}));
}
}
));
m.add(new SimpleBulkMapping<BulkCampaignNegativeAudienceAssociation, Long>(StringTable.ParentId,
new Function<BulkCampaignNegativeAudienceAssociation, Long>() {
@Override
public Long apply(BulkCampaignNegativeAudienceAssociation c) {
return c.getNegativeCampaignCriterion().getCampaignId();
}
},
new BiConsumer<String, BulkCampaignNegativeAudienceAssociation>() {
@Override
public void accept(String v, BulkCampaignNegativeAudienceAssociation c) {
c.getNegativeCampaignCriterion().setCampaignId(StringExtensions.<Long>parseOptional(v, new Function<String, Long>() {
@Override
public Long apply(String value) {
return Long.parseLong(value);
}
}));
}
}
));
m.add(new SimpleBulkMapping<BulkCampaignNegativeAudienceAssociation, String>(StringTable.Campaign,
new Function<BulkCampaignNegativeAudienceAssociation, String>() {
@Override
public String apply(BulkCampaignNegativeAudienceAssociation c) {
return c.getCampaignName();
}
},
new BiConsumer<String, BulkCampaignNegativeAudienceAssociation>() {
@Override
public void accept(String v, BulkCampaignNegativeAudienceAssociation c) {
c.setCampaignName(v);
}
}
));
m.add(new SimpleBulkMapping<BulkCampaignNegativeAudienceAssociation, String>(StringTable.Audience,
new Function<BulkCampaignNegativeAudienceAssociation, String>() {
@Override
public String apply(BulkCampaignNegativeAudienceAssociation c) {
return c.getAudienceName();
}
},
new BiConsumer<String, BulkCampaignNegativeAudienceAssociation>() {
@Override
public void accept(String v, BulkCampaignNegativeAudienceAssociation c) {
c.setAudienceName(v);
}
}
));
m.add(new SimpleBulkMapping<BulkCampaignNegativeAudienceAssociation, Long>(StringTable.AudienceId,
new Function<BulkCampaignNegativeAudienceAssociation, Long>() {
@Override
public Long apply(BulkCampaignNegativeAudienceAssociation c) {
if (c.getNegativeCampaignCriterion() instanceof NegativeCampaignCriterion) {
AudienceCriterion audienceCriterion = (AudienceCriterion) c.getNegativeCampaignCriterion().getCriterion();
if (audienceCriterion == null) {
return null;
} else {
return audienceCriterion.getAudienceId();
}
} else {
return null;
}
}
},
new BiConsumer<String, BulkCampaignNegativeAudienceAssociation>() {
@Override
public void accept(String v, BulkCampaignNegativeAudienceAssociation c) {
AudienceCriterion audienceCriterion = (AudienceCriterion) c.getNegativeCampaignCriterion().getCriterion();
if (audienceCriterion == null) {
return;
}
audienceCriterion.setAudienceId(StringExtensions.<Long>parseOptional(v, new Function<String, Long>() {
@Override
public Long apply(String value) {
return Long.parseLong(value);
}
}));
}
}
));
MAPPINGS = Collections.unmodifiableList(m);
}
@Override
public void processMappingsFromRowValues(RowValues values) {
NegativeCampaignCriterion campaignCriterion = new NegativeCampaignCriterion();
campaignCriterion.setType(NegativeCampaignCriterion.class.getSimpleName());
AudienceCriterion audienceCriterion = new AudienceCriterion();
audienceCriterion.setType(AudienceCriterion.class.getSimpleName());
campaignCriterion.setCriterion(audienceCriterion);
this.setNegativeCampaignCriterion(campaignCriterion);
MappingHelpers.convertToEntity(values, MAPPINGS, this);
performanceData = PerformanceData.readFromRowValuesOrNull(values);
}
@Override
public void processMappingsToRowValues(RowValues values, boolean excludeReadonlyData) {
validatePropertyNotNull(getNegativeCampaignCriterion(), BulkCampaignNegativeAudienceAssociation.class.getSimpleName());
MappingHelpers.convertToValues(this, values, MAPPINGS);
if (!excludeReadonlyData) {
PerformanceData.writeToRowValuesIfNotNull(performanceData, values);
}
}
/**
* Gets an Campaign Criterion.
*/
public NegativeCampaignCriterion getNegativeCampaignCriterion() {
return this.negativeCampaignCriterion;
}
/**
* Sets an Campaign Criterion.
*/
public void setNegativeCampaignCriterion(NegativeCampaignCriterion campaignCriterion) {
this.negativeCampaignCriterion = campaignCriterion;
}
/**
* Gets the name of the campaign.
* Corresponds to the 'Campaign' field in the bulk file.
*/
public String getCampaignName() {
return this.campaignName;
}
/**
* Sets the name of the campaign.
* Corresponds to the 'Campaign' field in the bulk file.
*/
public void setCampaignName(String campaignName) {
this.campaignName = campaignName;
}
/**
* Gets the name of the audience.
*/
public String getAudienceName() {
return this.audienceName;
}
/**
* Sets the name of the audience.
*/
public void setAudienceName(String audienceName) {
this.audienceName = audienceName;
}
}
| |
/*
* Copyright 2017 LINE Corporation
*
* LINE Corporation licenses this file to you under the Apache License,
* version 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.linecorp.armeria.client.retry;
import static org.assertj.core.api.Assertions.assertThat;
import static org.awaitility.Awaitility.await;
import java.util.concurrent.atomic.AtomicInteger;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.RegisterExtension;
import com.linecorp.armeria.client.ClientFactory;
import com.linecorp.armeria.client.Endpoint;
import com.linecorp.armeria.client.WebClient;
import com.linecorp.armeria.client.endpoint.EndpointGroup;
import com.linecorp.armeria.client.metric.MetricCollectingClient;
import com.linecorp.armeria.common.HttpHeaderNames;
import com.linecorp.armeria.common.HttpRequest;
import com.linecorp.armeria.common.HttpResponse;
import com.linecorp.armeria.common.HttpStatus;
import com.linecorp.armeria.common.MediaType;
import com.linecorp.armeria.common.SessionProtocol;
import com.linecorp.armeria.common.metric.MeterIdPrefixFunction;
import com.linecorp.armeria.common.metric.MoreMeters;
import com.linecorp.armeria.server.AbstractHttpService;
import com.linecorp.armeria.server.ServerBuilder;
import com.linecorp.armeria.server.ServiceRequestContext;
import com.linecorp.armeria.testing.junit5.server.ServerExtension;
import io.micrometer.core.instrument.MeterRegistry;
import io.micrometer.core.instrument.simple.SimpleMeterRegistry;
class RetryingClientWithMetricsTest {
private static final MeterIdPrefixFunction meterIdPrefixFunction = MeterIdPrefixFunction.ofDefault("foo");
@RegisterExtension
final ServerExtension server = new ServerExtension() {
@Override
protected boolean runForEachTest() {
return true;
}
@Override
protected void configure(ServerBuilder sb) throws Exception {
sb.service("/ok", (ctx, req) -> HttpResponse.of(200));
sb.service("/hello", new AbstractHttpService() {
final AtomicInteger reqCount = new AtomicInteger();
@Override
protected HttpResponse doGet(ServiceRequestContext ctx, HttpRequest req)
throws Exception {
ctx.mutateAdditionalResponseTrailers(
mutator -> mutator.add(HttpHeaderNames.of("foo"), "bar"));
if (reqCount.getAndIncrement() < 2) {
return HttpResponse.of(HttpStatus.INTERNAL_SERVER_ERROR);
} else {
return HttpResponse.of(HttpStatus.OK, MediaType.PLAIN_TEXT_UTF_8, "hello");
}
}
});
}
};
private ClientFactory clientFactory;
private MeterRegistry meterRegistry;
@BeforeEach
public void init() {
meterRegistry = new SimpleMeterRegistry();
clientFactory = ClientFactory.builder()
.meterRegistry(meterRegistry)
.build();
}
@AfterEach
public void destroy() {
if (clientFactory != null) {
clientFactory.closeAsync();
}
}
// WebClient -> RetryingClient -> MetricCollectingClient -> HttpClientDelegate
// In this case, all of the requests and responses are recorded.
@Test
void retryingThenMetricCollecting() throws Exception {
final RetryRuleWithContent<HttpResponse> rule =
(ctx, response, cause) -> response.aggregate().handle((msg, unused) -> {
if ("hello".equals(msg.contentUtf8())) {
return RetryDecision.noRetry();
}
return RetryDecision.retry(Backoff.ofDefault());
});
final WebClient client = WebClient.builder(server.httpUri())
.factory(clientFactory)
.decorator(MetricCollectingClient.newDecorator(meterIdPrefixFunction))
.decorator(RetryingClient.newDecorator(rule))
.build();
assertThat(client.get("/hello").aggregate().join().contentUtf8()).isEqualTo("hello");
// wait until 3 calls are recorded.
await().untilAsserted(() -> {
assertThat(MoreMeters.measureAll(meterRegistry))
.containsEntry("foo.requests#count{http.status=200,method=GET,result=success,service=none}",
1.0)
.containsEntry("foo.requests#count{http.status=200,method=GET,result=failure,service=none}",
0.0)
.containsEntry("foo.requests#count{http.status=500,method=GET,result=success,service=none}",
0.0)
.containsEntry("foo.requests#count{http.status=500,method=GET,result=failure,service=none}",
2.0);
});
}
@Test
void retryingThenMetricCollectingWithConnectionRefused() throws Exception {
// The first request will fail with an UnprocessedException and
// the second request will succeed with 200.
final EndpointGroup group = EndpointGroup.of(Endpoint.of("127.0.0.1", 1),
server.httpEndpoint());
final WebClient client =
WebClient.builder(SessionProtocol.HTTP, group)
.factory(clientFactory)
.decorator(MetricCollectingClient.newDecorator(meterIdPrefixFunction))
.decorator(RetryingClient.newDecorator(RetryRule.onUnprocessed()))
.build();
assertThat(client.get("/ok").aggregate().join().status()).isEqualTo(HttpStatus.OK);
// wait until 2 calls are recorded.
await().untilAsserted(() -> {
assertThat(MoreMeters.measureAll(meterRegistry))
.containsEntry("foo.requests#count{http.status=200,method=GET,result=success,service=none}",
1.0)
.containsEntry("foo.requests#count{http.status=200,method=GET,result=failure,service=none}",
0.0)
.containsEntry("foo.requests#count{http.status=0,method=GET,result=success,service=none}",
0.0)
.containsEntry("foo.requests#count{http.status=0,method=GET,result=failure,service=none}",
1.0);
});
}
// WebClient -> MetricCollectingClient -> RetryingClient -> HttpClientDelegate
// In this case, only the first request and the last response are recorded.
@Test
public void metricCollectingThenRetrying() throws Exception {
final WebClient client =
WebClient.builder(server.httpUri())
.factory(clientFactory)
.decorator(RetryingClient.newDecorator(
RetryRule.builder().onServerErrorStatus().onException().thenBackoff()))
.decorator(MetricCollectingClient.newDecorator(meterIdPrefixFunction))
.build();
assertThat(client.get("/hello").aggregate().join().contentUtf8()).isEqualTo("hello");
// wait until 1 call is recorded.
await().untilAsserted(() -> {
assertThat(MoreMeters.measureAll(meterRegistry))
.containsEntry("foo.requests#count{http.status=200,method=GET,result=success,service=none}",
1.0)
.containsEntry("foo.requests#count{http.status=200,method=GET,result=failure,service=none}",
0.0);
});
}
@Test
public void metricCollectingThenRetryingWithConnectionRefused() throws Exception {
// The first request will fail with an UnprocessedException and
// the second request will succeed with 200.
final EndpointGroup group = EndpointGroup.of(Endpoint.of("127.0.0.1", 1),
server.httpEndpoint());
final WebClient client =
WebClient.builder(SessionProtocol.HTTP, group)
.factory(clientFactory)
.decorator(RetryingClient.newDecorator(RetryRule.onUnprocessed()))
.decorator(MetricCollectingClient.newDecorator(MeterIdPrefixFunction.ofDefault("foo")))
.build();
assertThat(client.get("/ok").aggregate().join().status()).isEqualTo(HttpStatus.OK);
// wait until 1 call is recorded.
await().untilAsserted(() -> {
assertThat(MoreMeters.measureAll(meterRegistry))
.containsEntry("foo.requests#count{http.status=200,method=GET,result=success,service=none}",
1.0)
.containsEntry("foo.requests#count{http.status=200,method=GET,result=failure,service=none}",
0.0);
});
}
}
| |
package com.neo4j.docker.neo4jserver;
import com.github.dockerjava.api.command.CreateContainerCmd;
import com.github.dockerjava.api.model.Bind;
import com.neo4j.docker.utils.DatabaseIO;
import com.neo4j.docker.utils.HostFileSystemOperations;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.Assumptions;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.Arguments;
import org.junit.jupiter.params.provider.MethodSource;
import org.junit.jupiter.params.provider.ValueSource;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.testcontainers.containers.ContainerLaunchException;
import org.testcontainers.containers.GenericContainer;
import org.testcontainers.containers.output.OutputFrame;
import org.testcontainers.containers.output.Slf4jLogConsumer;
import com.neo4j.docker.utils.SetContainerUser;
import com.neo4j.docker.utils.Neo4jVersion;
import com.neo4j.docker.utils.TestSettings;
import org.testcontainers.containers.startupcheck.OneShotStartupCheckStrategy;
import org.testcontainers.containers.wait.strategy.Wait;
import java.io.File;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.LinkOption;
import java.nio.file.Path;
import java.nio.file.attribute.UserPrincipal;
import java.time.Duration;
import java.util.Random;
import java.util.function.Consumer;
import java.util.stream.Stream;
public class TestMounting
{
private static Logger log = LoggerFactory.getLogger( TestMounting.class );
static Stream<Arguments> defaultUserFlagSecurePermissionsFlag()
{
// "asUser={0}, secureFlag={1}"
// expected behaviour is that if you set --user flag, your data should be read/writable
// if you don't set --user flag then read/writability should be controlled by the secure file permissions flag
// the asCurrentUser=false, secureflag=true combination is tested separately because the container should fail to start.
return Stream.of(
Arguments.arguments( false, false ),
Arguments.arguments( true, false ),
Arguments.arguments( true, true ));
}
private GenericContainer setupBasicContainer( boolean asCurrentUser, boolean isSecurityFlagSet )
{
log.info( "Running as user {}, {}",
asCurrentUser?"non-root":"root",
isSecurityFlagSet?"with secure file permissions":"with unsecured file permissions" );
GenericContainer container = new GenericContainer( TestSettings.IMAGE_ID );
container.withExposedPorts( 7474, 7687 )
.withLogConsumer( new Slf4jLogConsumer( log ) )
.withEnv( "NEO4J_ACCEPT_LICENSE_AGREEMENT", "yes" )
.withEnv( "NEO4J_AUTH", "none" );
if(asCurrentUser)
{
SetContainerUser.nonRootUser( container );
}
if(isSecurityFlagSet)
{
container.withEnv( "SECURE_FILE_PERMISSIONS", "yes" );
}
return container;
}
private void verifySingleFolder( Path folderToCheck, boolean shouldBeWritable )
{
String folderForDiagnostics = folderToCheck.toAbsolutePath().toString();
Assertions.assertTrue( folderToCheck.toFile().exists(), "did not create " + folderForDiagnostics + " folder on host" );
if( shouldBeWritable )
{
Assertions.assertTrue( folderToCheck.toFile().canRead(), "cannot read host "+folderForDiagnostics+" folder" );
Assertions.assertTrue(folderToCheck.toFile().canWrite(), "cannot write to host "+folderForDiagnostics+" folder" );
}
}
private void verifyDataFolderContentsArePresentOnHost( Path dataMount, boolean shouldBeWritable )
{
//verifySingleFolder( dataMount.resolve( "dbms" ), shouldBeWritable );
verifySingleFolder( dataMount.resolve( "databases" ), shouldBeWritable );
if(TestSettings.NEO4J_VERSION.isAtLeastVersion( Neo4jVersion.NEO4J_VERSION_400 ))
{
verifySingleFolder( dataMount.resolve( "transactions" ), shouldBeWritable );
}
}
private void verifyLogsFolderContentsArePresentOnHost( Path logsMount, boolean shouldBeWritable )
{
verifySingleFolder( logsMount, shouldBeWritable );
Assertions.assertTrue( logsMount.resolve( "debug.log" ).toFile().exists(),
"Neo4j did not write a debug.log file to "+logsMount.toString() );
Assertions.assertEquals( shouldBeWritable,
logsMount.resolve( "debug.log" ).toFile().canWrite(),
String.format( "The debug.log file should %sbe writable", shouldBeWritable ? "" : "not ") );
}
@ParameterizedTest(name = "as current user={0}")
@ValueSource(booleans = {true, false})
void canDumpConfig(boolean asCurrentUser) throws Exception
{
File confFile;
Path confMount;
String assertMsg;
String mountPrefix;
if(asCurrentUser)
{
assertMsg = "Conf file was not successfully dumped when running container as current user";
mountPrefix = "candumpconf-user-";
}
else
{
assertMsg = "Conf file was not successfully dumped when running container as root";
mountPrefix = "candumpconf-root-";
}
try(GenericContainer container = setupBasicContainer(asCurrentUser, false))
{
//Mount /conf
confMount = HostFileSystemOperations.createTempFolderAndMountAsVolume(
container, mountPrefix,"/conf" );
confFile = confMount.resolve( "neo4j.conf" ).toFile();
//Start the container
container.setWaitStrategy(
Wait.forLogMessage( ".*Config Dumped.*", 1 )
.withStartupTimeout( Duration.ofSeconds( 30 ) ) );
container.setStartupCheckStrategy( new OneShotStartupCheckStrategy() );
container.setCommand( "dump-config" );
container.start();
}
// verify conf file was written
Assertions.assertTrue( confFile.exists(), assertMsg );
// verify conf folder does not have new owner if not running as root
if(asCurrentUser)
{
int fileUID = (Integer) Files.getAttribute( confFile.toPath(), "unix:uid" );
int expectedUID = Integer.parseInt( SetContainerUser.getNonRootUserString().split( ":" )[0] );
Assertions.assertEquals( expectedUID, fileUID, "Owner of dumped conf file is not the currently running user" );
}
}
@Test
void canDumpConfig_errorsWithoutConfMount() throws Exception
{
try(GenericContainer container = setupBasicContainer( false, false ))
{
container.setWaitStrategy(
Wait.forLogMessage( ".*Config Dumped.*", 1 )
.withStartupTimeout( Duration.ofSeconds( 30 ) ) );
container.setStartupCheckStrategy( new OneShotStartupCheckStrategy() );
container.setCommand( "dump-config" );
Assertions.assertThrows( ContainerLaunchException.class,
()->container.start(),
"Did not error when dump config requested without mounted /conf folder");
String stderr = container.getLogs( OutputFrame.OutputType.STDERR);
Assertions.assertTrue( stderr.endsWith( "You must mount a folder to /conf so that the configuration file(s) can be dumped to there.\n" ) );
}
}
@ParameterizedTest(name = "asUser={0}, secureFlag={1}")
@MethodSource( "defaultUserFlagSecurePermissionsFlag" )
void testCanMountJustDataFolder(boolean asCurrentUser, boolean isSecurityFlagSet) throws IOException
{
Assumptions.assumeTrue(TestSettings.NEO4J_VERSION.isAtLeastVersion( new Neo4jVersion( 3,1,0 ) ),
"User checks not valid before 3.1" );
try(GenericContainer container = setupBasicContainer( asCurrentUser, isSecurityFlagSet ))
{
Path dataMount = HostFileSystemOperations.createTempFolderAndMountAsVolume(
container,
"canmountjustdata-",
"/data" );
container.start();
// neo4j should now have started, so there'll be stuff in the data folder
// we need to check that stuff is readable and owned by the correct user
verifyDataFolderContentsArePresentOnHost( dataMount, asCurrentUser );
}
}
@ParameterizedTest(name = "asUser={0}, secureFlag={1}")
@MethodSource( "defaultUserFlagSecurePermissionsFlag" )
void testCanMountJustLogsFolder(boolean asCurrentUser, boolean isSecurityFlagSet) throws IOException
{
Assumptions.assumeTrue(TestSettings.NEO4J_VERSION.isAtLeastVersion( new Neo4jVersion( 3,1,0 ) ),
"User checks not valid before 3.1" );
try(GenericContainer container = setupBasicContainer( asCurrentUser, isSecurityFlagSet ))
{
Path logsMount = HostFileSystemOperations.createTempFolderAndMountAsVolume(
container,
"canmountjustlogs-",
"/logs" );
container.start();
verifyLogsFolderContentsArePresentOnHost( logsMount, asCurrentUser );
}
}
@ParameterizedTest(name = "asUser={0}, secureFlag={1}")
@MethodSource( "defaultUserFlagSecurePermissionsFlag" )
void testCanMountDataAndLogsFolder(boolean asCurrentUser, boolean isSecurityFlagSet) throws IOException
{
Assumptions.assumeTrue(TestSettings.NEO4J_VERSION.isAtLeastVersion( new Neo4jVersion( 3,1,0 ) ),
"User checks not valid before 3.1" );
try(GenericContainer container = setupBasicContainer( asCurrentUser, isSecurityFlagSet ))
{
Path testOutputFolder = HostFileSystemOperations.createTempFolder( "canmountdataandlogs-" );
Path dataMount = HostFileSystemOperations.createTempFolderAndMountAsVolume(
container,
"data-", "/data", testOutputFolder
);
Path logsMount = HostFileSystemOperations.createTempFolderAndMountAsVolume(
container,
"logs-", "/logs", testOutputFolder
);
container.start();
verifyDataFolderContentsArePresentOnHost( dataMount, asCurrentUser );
verifyLogsFolderContentsArePresentOnHost( logsMount, asCurrentUser );
}
}
@Test
void testCantWriteIfSecureEnabledAndNoPermissions_data() throws IOException
{
Assumptions.assumeTrue(TestSettings.NEO4J_VERSION.isAtLeastVersion( new Neo4jVersion( 3,1,0 ) ),
"User checks not valid before 3.1" );
try(GenericContainer container = setupBasicContainer( false, true ))
{
HostFileSystemOperations.createTempFolderAndMountAsVolume(
container,
"nopermissioninsecuremode-data-",
"/data" );
// currently Neo4j will try to start and fail. It should be fixed to throw an error and not try starting
container.setWaitStrategy( Wait.forLogMessage( "[fF]older /data is not accessible for user", 1 )
.withStartupTimeout( Duration.ofSeconds( 20 ) ) );
Assertions.assertThrows( org.testcontainers.containers.ContainerLaunchException.class,
() -> container.start(),
"Neo4j should not start in secure mode if data folder is unwritable" );
}
}
@Test
void testCantWriteIfSecureEnabledAndNoPermissions_logs() throws IOException
{
Assumptions.assumeTrue(TestSettings.NEO4J_VERSION.isAtLeastVersion( new Neo4jVersion( 3,1,0 ) ),
"User checks not valid before 3.1" );
try(GenericContainer container = setupBasicContainer( false, true ))
{
HostFileSystemOperations.createTempFolderAndMountAsVolume(
container,
"nopermissioninsecuremode-logs-",
"/logs" );
// currently Neo4j will try to start and fail. It should be fixed to throw an error and not try starting
container.setWaitStrategy( Wait.forLogMessage( "[fF]older /logs is not accessible for user", 1 )
.withStartupTimeout( Duration.ofSeconds( 20 ) ) );
Assertions.assertThrows( org.testcontainers.containers.ContainerLaunchException.class,
() -> container.start(),
"Neo4j should not start in secure mode if logs folder is unwritable" );
}
}
@ParameterizedTest(name = "as current user={0}")
@ValueSource(booleans = {true, false})
void canMountAllTheThings_fileMounts(boolean asCurrentUser) throws Exception
{
Path testOutputFolder = HostFileSystemOperations.createTempFolder( "mount-everything-" );
try(GenericContainer container = setupBasicContainer( asCurrentUser, false ))
{
HostFileSystemOperations.createTempFolderAndMountAsVolume( container, "conf", "/conf", testOutputFolder );
HostFileSystemOperations.createTempFolderAndMountAsVolume( container, "data", "/data", testOutputFolder );
HostFileSystemOperations.createTempFolderAndMountAsVolume( container, "import", "/import",
testOutputFolder );
HostFileSystemOperations.createTempFolderAndMountAsVolume( container, "logs", "/logs", testOutputFolder );
HostFileSystemOperations.createTempFolderAndMountAsVolume( container, "metrics", "/metrics",
testOutputFolder );
HostFileSystemOperations.createTempFolderAndMountAsVolume( container, "plugins", "/plugins",
testOutputFolder );
container.start();
DatabaseIO databaseIO = new DatabaseIO( container );
// do some database writes so that we try writing to writable folders.
databaseIO.putInitialDataIntoContainer( "neo4j", "none" );
databaseIO.verifyInitialDataInContainer( "neo4j", "none" );
}
}
@ParameterizedTest(name = "as current user={0}")
@ValueSource(booleans = {true, false})
void canMountAllTheThings_namedVolumes(boolean asCurrentUser) throws Exception
{
String id = String.format( "%04d", new Random().nextInt( 10000 ));
try(GenericContainer container = setupBasicContainer( asCurrentUser, false ))
{
container.withCreateContainerCmdModifier(
(Consumer<CreateContainerCmd>) cmd -> cmd.getHostConfig().withBinds(
Bind.parse("conf-"+id+":/conf"),
Bind.parse("data-"+id+":/data"),
Bind.parse("import-"+id+":/import"),
Bind.parse("logs-"+id+":/logs"),
//Bind.parse("metrics-"+id+":/metrics"), //todo metrics needs to be writable but we aren't chowning in the dockerfile, so a named volume for metrics will fail
Bind.parse("plugins-"+id+":/plugins")
));
container.start();
DatabaseIO databaseIO = new DatabaseIO( container );
// do some database writes so that we try writing to writable folders.
databaseIO.putInitialDataIntoContainer( "neo4j", "none" );
databaseIO.verifyInitialDataInContainer( "neo4j", "none" );
}
}
}
| |
package com.cmu.scout.ui;
import java.lang.ref.WeakReference;
import java.util.HashMap;
import java.util.Map;
import android.content.Intent;
import android.content.res.Configuration;
import android.os.Bundle;
import android.support.v4.app.Fragment;
import android.support.v4.app.FragmentManager;
import android.support.v4.app.FragmentPagerAdapter;
import android.support.v4.view.ViewPager;
import android.view.View;
import android.view.ViewGroup;
import android.widget.EditText;
import android.widget.Toast;
import com.actionbarsherlock.app.ActionBar;
import com.actionbarsherlock.app.SherlockFragmentActivity;
import com.actionbarsherlock.view.Menu;
import com.actionbarsherlock.view.MenuItem;
import com.cmu.scout.R;
import com.cmu.scout.fragment.MatchFragment;
import com.cmu.scout.fragment.MatchInputAutoFragment;
import com.cmu.scout.fragment.MatchInputGeneralFragment;
import com.cmu.scout.fragment.MatchInputTeleOpFragment;
import com.viewpagerindicator.PageIndicator;
import com.viewpagerindicator.TabPageIndicator;
import com.viewpagerindicator.TitleProvider;
public class MatchPagerActivity extends SherlockFragmentActivity {
// private static final String TAG = "MatchPagerActivity";
// private static final boolean DEBUG = true;
private int mTeamId = -1;
private int mMatchId = -1;
private int mTeamNum = -1;
private int mMatchNum = -1;
public static final int MAX_SCORE = 999;
private MatchFragmentAdapter mAdapter;
private ViewPager mPager;
private PageIndicator mIndicator;
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
// if (DEBUG) Log.v(TAG, "+++ ON CREATE +++");
setContentView(R.layout.match_scout_pager);
final Intent data = getIntent();
/* this should NEVER be null */
if (data != null) {
// retrieve team/match information
mTeamId = data.getIntExtra(DashboardActivity.INTENT_TEAM_ID, -1);
mMatchId = data.getIntExtra(DashboardActivity.INTENT_MATCH_ID, -1);
mTeamNum = data.getIntExtra(DashboardActivity.INTENT_TEAM_NUM, -1);
mMatchNum = data.getIntExtra(DashboardActivity.INTENT_MATCH_NUM, -1);
ActionBar actionBar = getSupportActionBar();
actionBar.setDisplayHomeAsUpEnabled(true);
setActionBarTitle(getResources().getString(R.string.match_scouting_title));
setActionBarSubtitle("Team " + mTeamNum + ", Match " + mMatchNum);
}
mAdapter = new MatchFragmentAdapter(getSupportFragmentManager());
mPager = (ViewPager) findViewById(R.id.match_pager);
mPager.setAdapter(mAdapter);
mPager.setOffscreenPageLimit(MatchFragmentAdapter.NUM_TITLES);
mIndicator = (TabPageIndicator) findViewById(R.id.match_indicator);
mIndicator.setViewPager(mPager);
}
// bypass any recreation when screen rotate
// save operations for efficiency
@Override
public void onConfigurationChanged(Configuration newconfig){
super.onConfigurationChanged(newconfig);
}
@Override
public void onPause() {
super.onPause();
Toast.makeText(this, R.string.save_match_successful, Toast.LENGTH_SHORT).show();
}
@Override
public void onSaveInstanceState(Bundle outState) {
outState.putInt("mTeamId", mTeamId);
outState.putInt("mMatchId", mMatchId);
outState.putInt("mTeamNum", mTeamNum);
outState.putInt("mMatchNum", mMatchNum);
super.onSaveInstanceState(outState);
}
@Override
public void onRestoreInstanceState(Bundle savedInstanceState) {
super.onRestoreInstanceState(savedInstanceState);
mTeamId = savedInstanceState.getInt("mTeamId");
mMatchId = savedInstanceState.getInt("mMatchId");
mTeamNum = savedInstanceState.getInt("mTeamNum");
mMatchNum = savedInstanceState.getInt("mMatchNum");
}
private void setActionBarTitle(String title) {
// if (DEBUG) Log.v(TAG, "setActionBarTitle()");
if (title != null) {
final ActionBar actionBar = getSupportActionBar();
actionBar.setTitle(title);
}
}
private void setActionBarSubtitle(String subtitle) {
// if (DEBUG) Log.v(TAG, "setActionBarSubtitle()");
if (subtitle != null) {
final ActionBar actionBar = getSupportActionBar();
actionBar.setSubtitle(subtitle);
}
}
public int getTeamId() {
return mTeamId;
}
public int getMatchId() {
return mMatchId;
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
getSupportMenuInflater().inflate(R.menu.match_input_options_menu, menu);
return super.onCreateOptionsMenu(menu);
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
// if (DEBUG) Log.v(TAG, "onOptionsItemSelected");
switch (item.getItemId()) {
case android.R.id.home:
// go to home screen when app icon in action bar is clicked
Intent intent = new Intent(this, DashboardActivity.class);
intent.addFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP);
startActivity(intent);
return true;
case R.id.clear_data:
clearScreen();
return true;
}
return super.onOptionsItemSelected(item);
}
public void clearScreen() {
// if (DEBUG) Log.v(TAG, "clearScreen()");
MatchFragment fragAuto = ((MatchInputAutoFragment) mAdapter.getFragment(MatchFragmentAdapter.POSITION_AUTO));
MatchFragment fragTeleOp = ((MatchInputTeleOpFragment) mAdapter.getFragment(MatchFragmentAdapter.POSITION_TELEOP));
MatchFragment fragGeneral = ((MatchInputGeneralFragment) mAdapter.getFragment(MatchFragmentAdapter.POSITION_GENERAL));
if (fragAuto != null) fragAuto.clearScreen();
if (fragTeleOp != null) fragTeleOp.clearScreen();
if (fragGeneral != null) fragGeneral.clearScreen();
Toast.makeText(this, R.string.screen_reset, Toast.LENGTH_SHORT).show();
}
// handles clicks from this activity's attached fragments
public void onClickHandler(View v) {
// if (DEBUG) Log.v(TAG, "onClickHandler()");
MatchFragment frag = null;
final int viewId = v.getId();
int pos = -1;
switch (viewId) {
case R.id.BT_Auto_Shots_Hit_High:
pos = MatchFragmentAdapter.POSITION_AUTO;
break;
case R.id.BT_Auto_Shots_Miss_High:
pos = MatchFragmentAdapter.POSITION_AUTO;
break;
case R.id.BT_Auto_Shots_Hit_Med:
pos = MatchFragmentAdapter.POSITION_AUTO;
break;
case R.id.BT_Auto_Shots_Miss_Med:
pos = MatchFragmentAdapter.POSITION_AUTO;
break;
case R.id.BT_Auto_Shots_Hit_Low:
pos = MatchFragmentAdapter.POSITION_AUTO;
break;
case R.id.BT_Auto_Shots_Miss_Low:
pos = MatchFragmentAdapter.POSITION_AUTO;
break;
case R.id.BT_Shots_Hit_High:
pos = MatchFragmentAdapter.POSITION_TELEOP;
break;
case R.id.BT_Shots_Miss_High:
pos = MatchFragmentAdapter.POSITION_TELEOP;
break;
case R.id.BT_Shots_Hit_Med:
pos = MatchFragmentAdapter.POSITION_TELEOP;
break;
case R.id.BT_Shots_Miss_Med:
pos = MatchFragmentAdapter.POSITION_TELEOP;
break;
case R.id.BT_Shots_Hit_Low:
pos = MatchFragmentAdapter.POSITION_TELEOP;
break;
case R.id.BT_Shots_Miss_Low:
pos = MatchFragmentAdapter.POSITION_TELEOP;
break;
case R.id.TBT_Balance:
pos = MatchFragmentAdapter.POSITION_GENERAL;
break;
case R.id.TBT_did_nothing:
pos = MatchFragmentAdapter.POSITION_GENERAL;
break;
}
frag = mAdapter.getFragment(pos);
if (frag != null) frag.updateDisplay(viewId);
}
public static class MatchFragmentAdapter extends FragmentPagerAdapter
implements TitleProvider {
// private static final String TAG = "MatchFragmentAdapter";
// private static final boolean DEBUG = false;
public static final int POSITION_AUTO = 0;
public static final int POSITION_TELEOP = 1;
public static final int POSITION_GENERAL = 2;
private Map<Integer, WeakReference<MatchFragment>> mPageReferenceMap
= new HashMap<Integer, WeakReference<MatchFragment>>();
private static final String[] TITLES = new String[] { "Autonomous", "Tele-Op", "Other" };
public static final int NUM_TITLES = TITLES.length;
public MatchFragmentAdapter(FragmentManager fm) {
super(fm);
}
@Override
public Fragment getItem(int position) {
// if (DEBUG) Log.v(TAG, "getItem()");
MatchFragment result = null;
switch (position) {
case POSITION_AUTO:
result = MatchInputAutoFragment.newInstance();
break;
case POSITION_TELEOP:
result = MatchInputTeleOpFragment.newInstance();
break;
case POSITION_GENERAL:
result = MatchInputGeneralFragment.newInstance();
break;
}
mPageReferenceMap.put(position, new WeakReference<MatchFragment>(result));
return result;
}
@Override
public void destroyItem(ViewGroup container, int position, Object object) {
super.destroyItem(container, position, object);
mPageReferenceMap.remove(position);
}
@Override
public int getCount() {
// if (DEBUG) Log.v(TAG, "getCount()");
return NUM_TITLES;
}
@Override
public String getTitle(int position) {
// if (DEBUG) Log.v(TAG, "getTitle()");
return TITLES[position % NUM_TITLES].toUpperCase();
}
public MatchFragment getFragment(int position) {
WeakReference<MatchFragment> weakRef = mPageReferenceMap.get(position);
return (weakRef != null) ? weakRef.get() : null;
}
}
public void decCounter(View v){
EditText et = (EditText)(v);
int value = Integer.valueOf(et.getText().toString())-1;
value = Math.max(0, value);
et.setText(""+value);
}
}
| |
/*
* Copyright (C) 2011 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.android.email.activity;
import com.android.email.R;
import com.android.emailcommon.provider.Account;
import com.android.emailcommon.provider.EmailContent.AccountColumns;
import com.android.emailcommon.provider.EmailContent.MailboxColumns;
import com.android.emailcommon.provider.HostAuth;
import com.android.emailcommon.provider.Mailbox;
import android.app.Activity;
import android.app.FragmentTransaction;
import android.app.ListFragment;
import android.app.LoaderManager.LoaderCallbacks;
import android.content.ContentValues;
import android.content.Context;
import android.content.CursorLoader;
import android.content.Loader;
import android.content.res.Resources;
import android.database.Cursor;
import android.database.MatrixCursor;
import android.database.MatrixCursor.RowBuilder;
import android.database.MergeCursor;
import android.net.Uri;
import android.os.Bundle;
import android.view.View;
import android.widget.AdapterView;
import android.widget.AdapterView.OnItemClickListener;
import android.widget.ListView;
import android.widget.SimpleCursorAdapter;
/**
* Fragment containing a list of accounts to show during shortcut creation.
* <p>
* NOTE: In order to receive callbacks, the activity containing this fragment must implement
* the {@link PickerCallback} interface.
*/
public abstract class ShortcutPickerFragment extends ListFragment
implements OnItemClickListener, LoaderCallbacks<Cursor> {
/** Callback methods. Enclosing activities must implement to receive fragment notifications. */
public static interface PickerCallback {
/** Builds a mailbox filter for the given account. See MailboxShortcutPickerFragment. */
public Integer buildFilter(Account account);
/** Invoked when an account and mailbox have been selected. */
public void onSelected(Account account, long mailboxId);
/** Required data is missing; either the account and/or mailbox */
public void onMissingData(boolean missingAccount, boolean missingMailbox);
}
/** A no-op callback */
private final PickerCallback EMPTY_CALLBACK = new PickerCallback() {
@Override public Integer buildFilter(Account account) { return null; }
@Override public void onSelected(Account account, long mailboxId){ getActivity().finish(); }
@Override public void onMissingData(boolean missingAccount, boolean missingMailbox) { }
};
private final static int LOADER_ID = 0;
private final static int[] TO_VIEWS = new int[] {
android.R.id.text1,
};
PickerCallback mCallback = EMPTY_CALLBACK;
/** Cursor adapter that provides either the account or mailbox list */
private SimpleCursorAdapter mAdapter;
@Override
public void onAttach(Activity activity) {
super.onAttach(activity);
if (activity instanceof PickerCallback) {
mCallback = (PickerCallback) activity;
}
final String[] fromColumns = getFromColumns();
mAdapter = new SimpleCursorAdapter(activity,
android.R.layout.simple_expandable_list_item_1, null, fromColumns, TO_VIEWS, 0);
setListAdapter(mAdapter);
getLoaderManager().initLoader(LOADER_ID, null, this);
}
@Override
public void onActivityCreated(Bundle savedInstanceState) {
super.onActivityCreated(savedInstanceState);
ListView listView = getListView();
listView.setOnItemClickListener(this);
listView.setItemsCanFocus(false);
}
@Override
public void onLoadFinished(Loader<Cursor> loader, Cursor data) {
mAdapter.swapCursor(data);
}
@Override
public void onLoaderReset(Loader<Cursor> loader) {
mAdapter.swapCursor(null);
}
/** Returns the cursor columns to map into list */
abstract String[] getFromColumns();
// TODO if we add meta-accounts to the database, remove this class entirely
private static final class AccountPickerLoader extends CursorLoader {
public AccountPickerLoader(Context context, Uri uri, String[] projection, String selection,
String[] selectionArgs, String sortOrder) {
super(context, uri, projection, selection, selectionArgs, sortOrder);
}
@Override
public Cursor loadInBackground() {
Cursor parentCursor = super.loadInBackground();
int cursorCount = parentCursor.getCount();
final Cursor returnCursor;
if (cursorCount > 1) {
// Only add "All accounts" if there is more than 1 account defined
MatrixCursor allAccountCursor = new MatrixCursor(getProjection());
addCombinedAccountRow(allAccountCursor, cursorCount);
returnCursor = new MergeCursor(new Cursor[] { allAccountCursor, parentCursor });
} else {
returnCursor = parentCursor;
}
return returnCursor;
}
/** Adds a row for "All Accounts" into the given cursor */
private void addCombinedAccountRow(MatrixCursor cursor, int accountCount) {
Context context = getContext();
Account account = new Account();
account.mId = Account.ACCOUNT_ID_COMBINED_VIEW;
Resources res = context.getResources();
String countString = res.getQuantityString(R.plurals.picker_combined_view_account_count,
accountCount, accountCount);
account.mDisplayName = res.getString(R.string.picker_combined_view_fmt, countString);
ContentValues values = account.toContentValues();
RowBuilder row = cursor.newRow();
for (String rowName : cursor.getColumnNames()) {
// special case some of the rows ...
if (AccountColumns.ID.equals(rowName)) {
row.add(Account.ACCOUNT_ID_COMBINED_VIEW);
continue;
} else if (AccountColumns.IS_DEFAULT.equals(rowName)) {
row.add(0);
continue;
}
row.add(values.get(rowName));
}
}
}
/** Account picker */
public static class AccountShortcutPickerFragment extends ShortcutPickerFragment {
private volatile Boolean mLoadFinished = new Boolean(false);
private final static String[] ACCOUNT_FROM_COLUMNS = new String[] {
AccountColumns.DISPLAY_NAME,
};
@Override
public void onActivityCreated(Bundle savedInstanceState) {
super.onActivityCreated(savedInstanceState);
getActivity().setTitle(R.string.account_shortcut_picker_title);
if (!mLoadFinished) {
getActivity().setVisible(false);
}
}
@Override
public void onItemClick(AdapterView<?> parent, View view, int position, long id) {
Cursor cursor = (Cursor) parent.getItemAtPosition(position);
selectAccountCursor(cursor, true);
}
@Override
public Loader<Cursor> onCreateLoader(int id, Bundle args) {
Context context = getActivity();
return new AccountPickerLoader(
context, Account.CONTENT_URI, Account.CONTENT_PROJECTION, null, null, null);
}
@Override
public void onLoadFinished(Loader<Cursor> loader, Cursor data) {
// if there is only one account, auto-select it
// No accounts; close the dialog
if (data.getCount() == 0) {
mCallback.onMissingData(true, false);
return;
}
if (data.getCount() == 1 && data.moveToFirst()) {
selectAccountCursor(data, false);
return;
}
super.onLoadFinished(loader, data);
mLoadFinished = true;
getActivity().setVisible(true);
}
@Override
String[] getFromColumns() {
return ACCOUNT_FROM_COLUMNS;
}
/** Selects the account specified by the given cursor */
private void selectAccountCursor(Cursor cursor, boolean allowBack) {
Account account = new Account();
account.restore(cursor);
ShortcutPickerFragment fragment = MailboxShortcutPickerFragment.newInstance(
getActivity(), account, mCallback.buildFilter(account));
FragmentTransaction transaction = getFragmentManager().beginTransaction();
transaction.replace(R.id.shortcut_list, fragment);
if (allowBack) {
transaction.addToBackStack(null);
}
transaction.commitAllowingStateLoss();
}
}
// TODO if we add meta-mailboxes to the database, remove this class entirely
private static final class MailboxPickerLoader extends CursorLoader {
private final long mAccountId;
private final boolean mAllowUnread;
public MailboxPickerLoader(Context context, Uri uri, String[] projection, String selection,
String[] selectionArgs, String sortOrder, long accountId, boolean allowUnread) {
super(context, uri, projection, selection, selectionArgs, sortOrder);
mAccountId = accountId;
mAllowUnread = allowUnread;
}
@Override
public Cursor loadInBackground() {
MatrixCursor unreadCursor =
new MatrixCursor(MailboxShortcutPickerFragment.MATRIX_PROJECTION);
Context context = getContext();
if (mAllowUnread) {
// For the special mailboxes, their ID is < 0. The UI list does not deal with
// negative values very well, so, add MAX_VALUE to ensure they're positive, but,
// don't clash with legitimate mailboxes.
String mailboxName = context.getString(R.string.picker_mailbox_name_all_unread);
unreadCursor.addRow(
new Object[] {
Integer.MAX_VALUE + Mailbox.QUERY_ALL_UNREAD,
Mailbox.QUERY_ALL_UNREAD,
mailboxName,
});
}
if (mAccountId == Account.ACCOUNT_ID_COMBINED_VIEW) {
// Do something special for the "combined" view
MatrixCursor combinedMailboxesCursor =
new MatrixCursor(MailboxShortcutPickerFragment.MATRIX_PROJECTION);
// For the special mailboxes, their ID is < 0. The UI list does not deal with
// negative values very well, so, add MAX_VALUE to ensure they're positive, but,
// don't clash with legitimate mailboxes.
String mailboxName = context.getString(R.string.picker_mailbox_name_all_inbox);
combinedMailboxesCursor.addRow(
new Object[] {
Integer.MAX_VALUE + Mailbox.QUERY_ALL_INBOXES,
Mailbox.QUERY_ALL_INBOXES,
mailboxName
});
return new MergeCursor(new Cursor[] { combinedMailboxesCursor, unreadCursor });
}
// Loading for a regular account; perform a normal load
return new MergeCursor(new Cursor[] { super.loadInBackground(), unreadCursor });
}
}
/** Mailbox picker */
public static class MailboxShortcutPickerFragment extends ShortcutPickerFragment {
/** Allow all mailboxes in the mailbox list */
public static int FILTER_ALLOW_ALL = 0;
/** Only allow an account's INBOX */
public static int FILTER_INBOX_ONLY = 1 << 0;
/** Allow an "unread" mailbox; this is not affected by {@link #FILTER_INBOX_ONLY} */
public static int FILTER_ALLOW_UNREAD = 1 << 1;
/** Fragment argument to set filter values */
static final String ARG_FILTER = "MailboxShortcutPickerFragment.filter";
static final String ARG_ACCOUNT = "MailboxShortcutPickerFragment.account";
private final static String REAL_ID = "realId";
private final static String[] MAILBOX_FROM_COLUMNS = new String[] {
MailboxColumns.DISPLAY_NAME,
};
/** Loader projection used for IMAP & POP3 accounts */
private final static String[] IMAP_PROJECTION = new String [] {
MailboxColumns.ID, MailboxColumns.ID + " as " + REAL_ID,
MailboxColumns.SERVER_ID + " as " + MailboxColumns.DISPLAY_NAME
};
/** Loader projection used for EAS accounts */
private final static String[] EAS_PROJECTION = new String [] {
MailboxColumns.ID, MailboxColumns.ID + " as " + REAL_ID,
MailboxColumns.DISPLAY_NAME
};
/** Loader projection used for a matrix cursor */
private final static String[] MATRIX_PROJECTION = new String [] {
MailboxColumns.ID, REAL_ID, MailboxColumns.DISPLAY_NAME
};
// TODO #ALL_MAILBOX_SELECTION is identical to MailboxesAdapter#ALL_MAILBOX_SELECTION;
// create a common selection. Move this to the Mailbox class?
/** Selection for all visible mailboxes for an account */
private final static String ALL_MAILBOX_SELECTION = MailboxColumns.ACCOUNT_KEY + "=?" +
" AND " + Mailbox.USER_VISIBLE_MAILBOX_SELECTION;
/** Selection for just the INBOX of an account */
private final static String INBOX_ONLY_SELECTION = ALL_MAILBOX_SELECTION +
" AND " + MailboxColumns.TYPE + " = " + Mailbox.TYPE_INBOX;
private volatile Boolean mLoadFinished = new Boolean(false);
/** The currently selected account */
private Account mAccount;
/** The filter values; default to allow all mailboxes */
private Integer mFilter;
/**
* Builds a mailbox shortcut picker for the given account.
*/
public static MailboxShortcutPickerFragment newInstance(
Context context, Account account, Integer filter) {
MailboxShortcutPickerFragment fragment = new MailboxShortcutPickerFragment();
Bundle args = new Bundle();
args.putParcelable(ARG_ACCOUNT, account);
args.putInt(ARG_FILTER, filter);
fragment.setArguments(args);
return fragment;
}
/** Returns the mailbox filter */
int getFilter() {
if (mFilter == null) {
mFilter = getArguments().getInt(ARG_FILTER, FILTER_ALLOW_ALL);
}
return mFilter;
}
@Override
public void onAttach(Activity activity) {
// Need to setup the account first thing
mAccount = getArguments().getParcelable(ARG_ACCOUNT);
super.onAttach(activity);
}
@Override
public void onActivityCreated(Bundle savedInstanceState) {
super.onActivityCreated(savedInstanceState);
getActivity().setTitle(R.string.mailbox_shortcut_picker_title);
if (!mLoadFinished) {
getActivity().setVisible(false);
}
}
@Override
public void onItemClick(AdapterView<?> parent, View view, int position, long id) {
Cursor cursor = (Cursor) parent.getItemAtPosition(position);
long mailboxId = cursor.getLong(cursor.getColumnIndex(REAL_ID));
mCallback.onSelected(mAccount, mailboxId);
}
@Override
public Loader<Cursor> onCreateLoader(int id, Bundle args) {
Context context = getActivity();
// TODO Create a fully-qualified path name for Exchange accounts [code should also work
// for MoveMessageToDialog.java]
HostAuth recvAuth = mAccount.getOrCreateHostAuthRecv(context);
final String[] projection;
final String orderBy;
final String selection;
if (recvAuth.isEasConnection()) {
projection = EAS_PROJECTION;
orderBy = MailboxColumns.DISPLAY_NAME;
} else {
projection = IMAP_PROJECTION;
orderBy = MailboxColumns.SERVER_ID;
}
if ((getFilter() & FILTER_INBOX_ONLY) == 0) {
selection = ALL_MAILBOX_SELECTION;
} else {
selection = INBOX_ONLY_SELECTION;
}
return new MailboxPickerLoader(
context, Mailbox.CONTENT_URI, projection, selection,
new String[] { Long.toString(mAccount.mId) }, orderBy, mAccount.mId,
(getFilter() & FILTER_ALLOW_UNREAD) != 0);
}
@Override
public void onLoadFinished(Loader<Cursor> loader, Cursor data) {
// No accounts; close the dialog
if (data.getCount() == 0) {
mCallback.onMissingData(false, true);
return;
}
// if there is only one mailbox, auto-select it
if (data.getCount() == 1 && data.moveToFirst()) {
long mailboxId = data.getLong(data.getColumnIndex(REAL_ID));
mCallback.onSelected(mAccount, mailboxId);
return;
}
super.onLoadFinished(loader, data);
mLoadFinished = true;
getActivity().setVisible(true);
}
@Override
String[] getFromColumns() {
return MAILBOX_FROM_COLUMNS;
}
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.drill.exec.physical.impl.join;
import javax.inject.Named;
import org.apache.drill.exec.exception.SchemaChangeException;
import org.apache.drill.exec.ops.FragmentContext;
import org.apache.drill.exec.physical.config.MergeJoinPOP;
import org.apache.drill.exec.record.VectorContainer;
import org.apache.calcite.rel.core.JoinRelType;
/**
* This join template uses a merge join to combine two ordered streams into a single larger batch. When joining
* single values on each side, the values can be copied to the outgoing batch immediately. The outgoing record batch
* should be sent as needed (e.g. schema change or outgoing batch full). When joining multiple values on one or
* both sides, two passes over the vectors will be made; one to construct the selection vector, and another to
* generate the outgoing batches once the duplicate value is no longer encountered.
*
* Given two tables ordered by 'col1':
*
* t1 t2
* --------------- ---------------
* | key | col2 | | key | col2 |
* --------------- ---------------
* | 1 | 'ab' | | 1 | 'AB' |
* | 2 | 'cd' | | 2 | 'CD' |
* | 2 | 'ef' | | 4 | 'EF' |
* | 4 | 'gh' | | 4 | 'GH' |
* | 4 | 'ij' | | 5 | 'IJ' |
* --------------- ---------------
*
* 'SELECT * FROM t1 INNER JOIN t2 on (t1.key == t2.key)' should generate the following:
*
* ---------------------------------
* | t1.key | t2.key | col1 | col2 |
* ---------------------------------
* | 1 | 1 | 'ab' | 'AB' |
* | 2 | 2 | 'cd' | 'CD' |
* | 2 | 2 | 'ef' | 'CD' |
* | 4 | 4 | 'gh' | 'EF' |
* | 4 | 4 | 'gh' | 'GH' |
* | 4 | 4 | 'ij' | 'EF' |
* | 4 | 4 | 'ij' | 'GH' |
* ---------------------------------
*
* In the simple match case, only one row from each table matches. Additional cases should be considered:
* - a left join key matches multiple right join keys
* - duplicate keys which may span multiple record batches (on the left and/or right side)
* - one or both incoming record batches change schemas
*
* In the case where a left join key matches multiple right join keys:
* - add a reference to all of the right table's matching values to the SV4.
*
* A RecordBatchData object should be used to hold onto all batches which have not been sent.
*
* JoinStatus:
* - all state related to the join operation is stored in the JoinStatus object.
* - this is required since code may be regenerated before completion of an outgoing record batch.
*/
public abstract class JoinTemplate implements JoinWorker {
@Override
public void setupJoin(FragmentContext context, JoinStatus status, VectorContainer outgoing) throws SchemaChangeException {
doSetup(context, status, outgoing);
}
/**
* Copy rows from the input record batches until the output record batch is full
* @param status State of the join operation (persists across multiple record batches/schema changes)
* @return true of join succeeded; false if the worker needs to be regenerated
*/
public final boolean doJoin(final JoinStatus status) {
while(!status.isOutgoingBatchFull()) {
// for each record
// validate input iterators (advancing to the next record batch if necessary)
if (!status.isRightPositionAllowed()) {
if (((MergeJoinPOP)status.outputBatch.getPopConfig()).getJoinType() == JoinRelType.LEFT) {
// we've hit the end of the right record batch; copy any remaining values from the left batch
while (status.isLeftPositionAllowed()) {
if (status.isOutgoingBatchFull()) {
return true;
}
doCopyLeft(status.getLeftPosition(), status.getOutPosition());
status.incOutputPos();
status.advanceLeft();
}
}
return true;
}
if (!status.isLeftPositionAllowed()) {
return true;
}
int comparison = doCompare(status.getLeftPosition(), status.getRightPosition());
switch (comparison) {
case -1:
// left key < right key
if (((MergeJoinPOP)status.outputBatch.getPopConfig()).getJoinType() == JoinRelType.LEFT) {
doCopyLeft(status.getLeftPosition(), status.getOutPosition());
status.incOutputPos();
}
status.advanceLeft();
continue;
case 0:
// left key == right key
// check for repeating values on the left side
if (!status.isLeftRepeating() &&
status.isNextLeftPositionInCurrentBatch() &&
doCompareNextLeftKey(status.getLeftPosition()) == 0) {
// subsequent record(s) in the left batch have the same key
status.notifyLeftRepeating();
} else if (status.isLeftRepeating() &&
status.isNextLeftPositionInCurrentBatch() &&
doCompareNextLeftKey(status.getLeftPosition()) != 0) {
// this record marks the end of repeated keys
status.notifyLeftStoppedRepeating();
}
boolean crossedBatchBoundaries;
int initialRightPosition;
if (status.hasIntermediateData()) {
crossedBatchBoundaries = status.getCrossedBatchBoundaries();
initialRightPosition = status.getInitialRightPosition();
status.resetIntermediateData();
} else {
crossedBatchBoundaries = false;
initialRightPosition = status.getRightPosition();
}
do {
if (status.isOutgoingBatchFull()) {
status.setIntermediateData(initialRightPosition, crossedBatchBoundaries);
return true;
}
// copy all equal right keys to the output record batch
doCopyLeft(status.getLeftPosition(), status.getOutPosition());
doCopyRight(status.getRightPosition(), status.getOutPosition());
status.incOutputPos();
// If the left key has duplicates and we're about to cross a boundary in the right batch, add the
// right table's record batch to the sv4 builder before calling next. These records will need to be
// copied again for each duplicate left key.
if (status.isLeftRepeating() && !status.isRightPositionInCurrentBatch()) {
status.outputBatch.addRightToBatchBuilder();
crossedBatchBoundaries = true;
}
status.advanceRight();
} while ((!status.isLeftRepeating() || status.isRightPositionInCurrentBatch())
&& status.isRightPositionAllowed()
&& doCompare(status.getLeftPosition(), status.getRightPosition()) == 0);
if (status.getRightPosition() > initialRightPosition &&
(status.isLeftRepeating() || ! status.isNextLeftPositionInCurrentBatch())) {
// more than one matching result from right table; reset position in case of subsequent left match
status.setRightPosition(initialRightPosition);
}
status.advanceLeft();
if (status.isLeftRepeating() && status.isNextLeftPositionInCurrentBatch() &&
doCompareNextLeftKey(status.getLeftPosition()) != 0) {
// left no longer has duplicates. switch back to incoming batch mode
status.setDefaultAdvanceMode();
status.notifyLeftStoppedRepeating();
} else if (status.isLeftRepeating() && crossedBatchBoundaries) {
try {
// build the right batches and
status.outputBatch.batchBuilder.build();
status.setSV4AdvanceMode();
} catch (SchemaChangeException e) {
status.ok = false;
}
// return to indicate recompile in right-sv4 mode
return true;
}
continue;
case 1:
// left key > right key
status.advanceRight();
continue;
default:
throw new IllegalStateException();
}
}
return true;
}
// Generated Methods
public abstract void doSetup(@Named("context") FragmentContext context,
@Named("status") JoinStatus status,
@Named("outgoing") VectorContainer outgoing) throws SchemaChangeException;
/**
* Copy the data to the new record batch (if it fits).
*
* @param leftIndex position of batch (lower 16 bits) and record (upper 16 bits) in left SV4
* @param outIndex position of the output record batch
* @return Whether or not the data was copied.
*/
public abstract void doCopyLeft(@Named("leftIndex") int leftIndex, @Named("outIndex") int outIndex);
public abstract void doCopyRight(@Named("rightIndex") int rightIndex, @Named("outIndex") int outIndex);
/**
* Compare the values of the left and right join key to determine whether the left is less than, greater than
* or equal to the right.
*
* @param leftIndex
* @param rightIndex
* @return 0 if both keys are equal
* -1 if left is < right
* 1 if left is > right
*/
protected abstract int doCompare(@Named("leftIndex") int leftIndex,
@Named("rightIndex") int rightIndex);
/**
* Compare the current left key to the next left key, if it's within the batch.
* @return 0 if both keys are equal,
* 1 if the keys are not equal, and
* -1 if there are no more keys in this batch
*/
protected abstract int doCompareNextLeftKey(@Named("leftIndex") int leftIndex);
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.geode.internal.cache.tier.sockets.command;
import static org.apache.geode.util.internal.UncheckedUtils.uncheckedCast;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.LinkedHashMap;
import java.util.Map;
import org.jetbrains.annotations.NotNull;
import org.apache.geode.annotations.Immutable;
import org.apache.geode.cache.DynamicRegionFactory;
import org.apache.geode.cache.RegionDestroyedException;
import org.apache.geode.cache.ResourceException;
import org.apache.geode.cache.client.internal.PutAllOp;
import org.apache.geode.cache.operations.PutAllOperationContext;
import org.apache.geode.cache.operations.internal.UpdateOnlyMap;
import org.apache.geode.distributed.internal.DistributionStats;
import org.apache.geode.internal.cache.CachedDeserializableFactory;
import org.apache.geode.internal.cache.EventID;
import org.apache.geode.internal.cache.LocalRegion;
import org.apache.geode.internal.cache.PartitionedRegion;
import org.apache.geode.internal.cache.PutAllPartialResultException;
import org.apache.geode.internal.cache.Token;
import org.apache.geode.internal.cache.ha.ThreadIdentifier;
import org.apache.geode.internal.cache.tier.CachedRegionHelper;
import org.apache.geode.internal.cache.tier.Command;
import org.apache.geode.internal.cache.tier.MessageType;
import org.apache.geode.internal.cache.tier.sockets.BaseCommand;
import org.apache.geode.internal.cache.tier.sockets.CacheServerStats;
import org.apache.geode.internal.cache.tier.sockets.ChunkedMessage;
import org.apache.geode.internal.cache.tier.sockets.Message;
import org.apache.geode.internal.cache.tier.sockets.Part;
import org.apache.geode.internal.cache.tier.sockets.ServerConnection;
import org.apache.geode.internal.cache.tier.sockets.VersionedObjectList;
import org.apache.geode.internal.cache.versions.VersionTag;
import org.apache.geode.internal.security.AuthorizeRequest;
import org.apache.geode.internal.security.SecurityService;
import org.apache.geode.internal.util.Breadcrumbs;
import org.apache.geode.security.ResourcePermission.Operation;
import org.apache.geode.security.ResourcePermission.Resource;
public class PutAll80 extends BaseCommand {
@Immutable
private static final PutAll80 singleton = new PutAll80();
public static Command getCommand() {
return singleton;
}
protected PutAll80() {}
protected String putAllClassName() {
return "putAll80";
}
protected Object getOptionalCallbackArg(Message msg) throws ClassNotFoundException, IOException {
return null;
}
protected int getBasePartCount() {
return 5;
}
@Override
public void cmdExecute(final @NotNull Message clientMessage,
final @NotNull ServerConnection serverConnection,
final @NotNull SecurityService securityService, long startp)
throws IOException, InterruptedException {
long start = startp; // copy this since we need to modify it
StringBuilder errMessage = new StringBuilder();
CachedRegionHelper crHelper = serverConnection.getCachedRegionHelper();
CacheServerStats stats = serverConnection.getCacheServerStats();
// requiresResponse = true;
serverConnection.setAsTrue(REQUIRES_RESPONSE);
serverConnection.setAsTrue(REQUIRES_CHUNKED_RESPONSE); // new in 8.0
{
long oldStart = start;
start = DistributionStats.getStatTime();
stats.incReadPutAllRequestTime(start - oldStart);
}
final String regionName;
boolean replyWithMetaData = false;
VersionedObjectList response;
try {
// Retrieve the data from the message parts
// part 0: region name
Part regionNamePart = clientMessage.getPart(0);
regionName = regionNamePart.getCachedString();
if (regionName == null) {
String putAllMsg =
"The input region name for the putAll request is null";
logger.warn("{}: {}", serverConnection.getName(), putAllMsg);
errMessage.append(putAllMsg);
writeChunkedErrorResponse(clientMessage, MessageType.PUT_DATA_ERROR, errMessage.toString(),
serverConnection);
serverConnection.setAsTrue(RESPONDED);
return;
}
LocalRegion region = (LocalRegion) crHelper.getRegion(regionName);
if (region == null) {
String reason = " was not found during putAll request";
writeRegionDestroyedEx(clientMessage, regionName, reason, serverConnection);
serverConnection.setAsTrue(RESPONDED);
return;
}
final int BASE_PART_COUNT = getBasePartCount();
// part 1: eventID
Part eventPart = clientMessage.getPart(1);
ByteBuffer eventIdPartsBuffer = ByteBuffer.wrap(eventPart.getSerializedForm());
long threadId = EventID.readEventIdPartsFromOptimizedByteArray(eventIdPartsBuffer);
long sequenceId = EventID.readEventIdPartsFromOptimizedByteArray(eventIdPartsBuffer);
EventID eventId =
new EventID(serverConnection.getEventMemberIDByteArray(), threadId, sequenceId);
Breadcrumbs.setEventId(eventId);
// part 2: invoke callbacks (used by import)
Part callbacksPart = clientMessage.getPart(2);
boolean skipCallbacks = callbacksPart.getInt() == 1;
// part 3: flags
int flags = clientMessage.getPart(3).getInt();
boolean clientIsEmpty = (flags & PutAllOp.FLAG_EMPTY) != 0;
boolean clientHasCCEnabled = (flags & PutAllOp.FLAG_CONCURRENCY_CHECKS) != 0;
// part 4: number of keys
Part numberOfKeysPart = clientMessage.getPart(4);
int numberOfKeys = numberOfKeysPart.getInt();
Object callbackArg = getOptionalCallbackArg(clientMessage);
if (logger.isDebugEnabled()) {
final String buffer = serverConnection.getName() + ": Received "
+ putAllClassName() + " request from "
+ serverConnection.getSocketString() + " for region " + regionName
+ (callbackArg != null ? (" callbackArg " + callbackArg) : "") + " with "
+ numberOfKeys + " entries.";
logger.debug(buffer);
}
// building the map
Map<Object, Object> map = new LinkedHashMap<>();
final Map<Object, VersionTag<?>> retryVersions = new LinkedHashMap<>();
// Map isObjectMap = new LinkedHashMap();
Part valuePart;
Object key;
for (int i = 0; i < numberOfKeys; i++) {
Part keyPart = clientMessage.getPart(BASE_PART_COUNT + i * 2);
key = keyPart.getStringOrObject();
if (key == null) {
String putAllMsg =
"One of the input keys for the putAll request is null";
logger.warn("{}: {}", serverConnection.getName(), putAllMsg);
errMessage.append(putAllMsg);
writeChunkedErrorResponse(clientMessage, MessageType.PUT_DATA_ERROR,
errMessage.toString(), serverConnection);
serverConnection.setAsTrue(RESPONDED);
return;
}
valuePart = clientMessage.getPart(BASE_PART_COUNT + i * 2 + 1);
if (valuePart.isNull()) {
String putAllMsg =
"One of the input values for the putAll request is null";
logger.warn("{}: {}", serverConnection.getName(), putAllMsg);
errMessage.append(putAllMsg);
writeChunkedErrorResponse(clientMessage, MessageType.PUT_DATA_ERROR,
errMessage.toString(), serverConnection);
serverConnection.setAsTrue(RESPONDED);
return;
}
// byte[] value = valuePart.getSerializedForm();
Object value;
if (valuePart.isObject()) {
// We're shoe-horning support for invalidated entries into putAll
// here...however Token.INVALID cannot be wrapped in a DataSerializable.
// Also, this code is using skipCallbacks as an import flag. If we make
// skipCallbacks configurable this code will need to be updated.
if (skipCallbacks && Token.INVALID.isSerializedValue(valuePart.getSerializedForm())) {
value = Token.INVALID;
} else {
value = CachedDeserializableFactory.create(valuePart.getSerializedForm(),
region.getCache());
}
} else {
value = valuePart.getSerializedForm();
}
// put serializedform for auth. It will be modified with auth callback
if (clientMessage.isRetry()) {
// Constuct the thread id/sequence id information for this element in the
// put all map
// The sequence id is constructed from the base sequence id and the offset
EventID entryEventId = new EventID(eventId, i);
// For PRs, the thread id assigned as a fake thread id.
if (region instanceof PartitionedRegion) {
PartitionedRegion pr = (PartitionedRegion) region;
int bucketId = pr.getKeyInfo(key).getBucketId();
long entryThreadId =
ThreadIdentifier.createFakeThreadIDForBulkOp(bucketId, entryEventId.getThreadID());
entryEventId = new EventID(entryEventId.getMembershipID(), entryThreadId,
entryEventId.getSequenceID());
}
VersionTag<?> tag = findVersionTagsForRetriedBulkOp(region, entryEventId);
if (tag != null) {
retryVersions.put(key, tag);
}
// FIND THE VERSION TAG FOR THIS KEY - but how? all we have is the
// putAll eventId, not individual eventIds for entries, right?
}
map.put(key, value);
// isObjectMap.put(key, new Boolean(isObject));
} // for
if (clientMessage.getNumberOfParts() == (BASE_PART_COUNT + 2 * numberOfKeys + 1)) {// it means
// optional
// timeout has been
// added
int timeout = clientMessage.getPart(BASE_PART_COUNT + 2 * numberOfKeys).getInt();
serverConnection.setRequestSpecificTimeout(timeout);
}
securityService.authorize(Resource.DATA, Operation.WRITE, regionName);
AuthorizeRequest authzRequest = serverConnection.getAuthzRequest();
if (authzRequest != null) {
if (DynamicRegionFactory.regionIsDynamicRegionList(regionName)) {
authzRequest.createRegionAuthorize(regionName);
} else {
PutAllOperationContext putAllContext =
authzRequest.putAllAuthorize(regionName, map, callbackArg);
map = putAllContext.getMap();
if (map instanceof UpdateOnlyMap) {
map = uncheckedCast(((UpdateOnlyMap) map).getInternalMap());
}
callbackArg = putAllContext.getCallbackArg();
}
}
response =
region.basicBridgePutAll(map, uncheckedCast(retryVersions), serverConnection.getProxyID(),
eventId, skipCallbacks, callbackArg, clientMessage.isRetry());
if (!region.getConcurrencyChecksEnabled() || clientIsEmpty || !clientHasCCEnabled) {
// the client only needs this if versioning is being used and the client
// has storage
if (logger.isTraceEnabled()) {
logger.trace(
"setting response to null. region-cc-enabled={}; clientIsEmpty={}; client-cc-enabled={}",
region.getConcurrencyChecksEnabled(), clientIsEmpty, clientHasCCEnabled);
}
response = null;
}
if (region instanceof PartitionedRegion) {
PartitionedRegion pr = (PartitionedRegion) region;
if (pr.getNetworkHopType() != PartitionedRegion.NETWORK_HOP_NONE) {
writeReplyWithRefreshMetadata(clientMessage, response, serverConnection, pr,
pr.getNetworkHopType());
pr.clearNetworkHopData();
replyWithMetaData = true;
}
}
} catch (RegionDestroyedException | ResourceException | PutAllPartialResultException rde) {
writeChunkedException(clientMessage, rde, serverConnection);
serverConnection.setAsTrue(RESPONDED);
return;
} catch (Exception ce) {
// If an interrupted exception is thrown , rethrow it
checkForInterrupt(serverConnection, ce);
// If an exception occurs during the put, preserve the connection
writeChunkedException(clientMessage, ce, serverConnection);
serverConnection.setAsTrue(RESPONDED);
logger.warn(String.format("%s: Unexpected Exception",
serverConnection.getName()), ce);
return;
} finally {
long oldStart = start;
start = DistributionStats.getStatTime();
stats.incProcessPutAllTime(start - oldStart);
}
if (logger.isDebugEnabled()) {
logger.debug("{}: Sending {} response back to {} for regin {} {}", serverConnection.getName(),
putAllClassName(), serverConnection.getSocketString(), regionName,
(logger.isTraceEnabled() ? ": " + response : ""));
}
// Increment statistics and write the reply
if (!replyWithMetaData) {
writeReply(clientMessage, response, serverConnection);
}
serverConnection.setAsTrue(RESPONDED);
stats.incWritePutAllResponseTime(DistributionStats.getStatTime() - start);
}
@Override
protected void writeReply(@NotNull Message origMsg, @NotNull ServerConnection serverConnection)
throws IOException {
throw new UnsupportedOperationException();
}
protected void writeReply(Message origMsg, VersionedObjectList response,
ServerConnection servConn) throws IOException {
servConn.getCache().getCancelCriterion().checkCancelInProgress(null);
ChunkedMessage replyMsg = servConn.getChunkedResponseMessage();
replyMsg.setMessageType(MessageType.RESPONSE);
replyMsg.setTransactionId(origMsg.getTransactionId());
int listSize = (response == null) ? 0 : response.size();
if (response != null) {
response.setKeys(null);
}
if (logger.isDebugEnabled()) {
logger.debug("sending chunked response header. version list size={}{}", listSize,
(logger.isTraceEnabled() ? " list=" + response : ""));
}
replyMsg.sendHeader();
if (listSize > 0) {
int chunkSize = 2 * MAXIMUM_CHUNK_SIZE;
// Chunker will stream over the list in its toData method
VersionedObjectList.Chunker chunk =
new VersionedObjectList.Chunker(response, chunkSize, false, false);
for (int i = 0; i < listSize; i += chunkSize) {
boolean lastChunk = (i + chunkSize >= listSize);
replyMsg.setNumberOfParts(1);
replyMsg.setMessageType(MessageType.RESPONSE);
replyMsg.setLastChunk(lastChunk);
replyMsg.setTransactionId(origMsg.getTransactionId());
replyMsg.addObjPart(chunk);
if (logger.isDebugEnabled()) {
logger.debug("sending chunk at index {} last chunk={} numParts={}", i, lastChunk,
replyMsg.getNumberOfParts());
}
replyMsg.sendChunk(servConn);
}
} else {
if (logger.isDebugEnabled()) {
logger.debug("sending only header");
}
replyMsg.addObjPart(null);
replyMsg.setLastChunk(true);
replyMsg.sendChunk(servConn);
}
servConn.setAsTrue(RESPONDED);
if (logger.isTraceEnabled()) {
logger.trace("{}: rpl tx: {}", servConn.getName(), origMsg.getTransactionId());
}
}
private void writeReplyWithRefreshMetadata(Message origMsg, VersionedObjectList response,
ServerConnection servConn, PartitionedRegion pr, byte nwHop) throws IOException {
servConn.getCache().getCancelCriterion().checkCancelInProgress(null);
ChunkedMessage replyMsg = servConn.getChunkedResponseMessage();
replyMsg.setMessageType(MessageType.RESPONSE);
replyMsg.setTransactionId(origMsg.getTransactionId());
replyMsg.sendHeader();
int listSize = (response == null) ? 0 : response.size();
if (logger.isDebugEnabled()) {
logger.debug(
"sending chunked response header with metadata refresh status. Version list size = {}{}",
listSize, (logger.isTraceEnabled() ? "; list=" + response : ""));
}
if (response != null) {
response.setKeys(null);
}
replyMsg.setNumberOfParts(1);
replyMsg.setTransactionId(origMsg.getTransactionId());
replyMsg.addBytesPart(new byte[] {pr.getMetadataVersion(), nwHop});
if (listSize > 0) {
replyMsg.setLastChunk(false);
replyMsg.sendChunk(servConn);
int chunkSize = 2 * MAXIMUM_CHUNK_SIZE; // MAXIMUM_CHUNK_SIZE
// Chunker will stream over the list in its toData method
VersionedObjectList.Chunker chunk =
new VersionedObjectList.Chunker(response, chunkSize, false, false);
for (int i = 0; i < listSize; i += chunkSize) {
boolean lastChunk = (i + chunkSize >= listSize);
replyMsg.setNumberOfParts(1); // resets the message
replyMsg.setMessageType(MessageType.RESPONSE);
replyMsg.setLastChunk(lastChunk);
replyMsg.setTransactionId(origMsg.getTransactionId());
replyMsg.addObjPart(chunk);
if (logger.isDebugEnabled()) {
logger.debug("sending chunk at index {} last chunk={} numParts={}", i, lastChunk,
replyMsg.getNumberOfParts());
}
replyMsg.sendChunk(servConn);
}
} else {
replyMsg.setLastChunk(true);
if (logger.isDebugEnabled()) {
logger.debug("sending first and only part of chunked message");
}
replyMsg.sendChunk(servConn);
}
pr.getPrStats().incPRMetaDataSentCount();
if (logger.isTraceEnabled()) {
logger.trace("{}: rpl with REFRESH_METADATA tx: {}", servConn.getName(),
origMsg.getTransactionId());
}
}
}
| |
/*! ******************************************************************************
*
* Pentaho Data Integration
*
* Copyright (C) 2002-2016 by Pentaho : http://www.pentaho.com
*
*******************************************************************************
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
******************************************************************************/
package org.pentaho.di.trans.step;
import java.util.List;
import org.pentaho.di.core.Const;
import org.pentaho.di.core.util.Utils;
import org.pentaho.di.core.changed.ChangedFlag;
import org.pentaho.di.core.row.RowMeta;
import org.pentaho.di.core.row.RowMetaInterface;
import org.pentaho.di.core.row.ValueMetaInterface;
import org.pentaho.di.core.row.value.ValueMetaInteger;
import org.pentaho.di.core.row.value.ValueMetaString;
import org.pentaho.di.core.variables.VariableSpace;
import org.pentaho.di.core.xml.XMLHandler;
import org.pentaho.di.core.xml.XMLInterface;
import org.w3c.dom.Node;
/**
* This class contains the metadata to handle proper error handling on a step level.
*
* @author Matt
*
*/
public class StepErrorMeta extends ChangedFlag implements XMLInterface, Cloneable {
public static final String XML_TAG = "error";
/** The source step that can send the error rows */
private StepMeta sourceStep;
/** The target step to send the error rows to */
private StepMeta targetStep;
/** Is the error handling enabled? */
private boolean enabled;
/** the name of the field value to contain the number of errors (null or empty means it's not needed) */
private String nrErrorsValuename;
/** the name of the field value to contain the error description(s) (null or empty means it's not needed) */
private String errorDescriptionsValuename;
/**
* the name of the field value to contain the fields for which the error(s) occured (null or empty means it's not
* needed)
*/
private String errorFieldsValuename;
/** the name of the field value to contain the error code(s) (null or empty means it's not needed) */
private String errorCodesValuename;
/** The maximum number of errors allowed before we stop processing with a hard error */
private String maxErrors = "";
/** The maximum percent of errors allowed before we stop processing with a hard error */
private String maxPercentErrors = "";
/** The minimum number of rows to read before the percentage evaluation takes place */
private String minPercentRows = "";
private VariableSpace variables;
/**
* Create a new step error handling metadata object
*
* @param sourceStep
* The source step that can send the error rows
*/
public StepErrorMeta( VariableSpace space, StepMeta sourceStep ) {
this.sourceStep = sourceStep;
this.enabled = false;
this.variables = space;
}
/**
* Create a new step error handling metadata object
*
* @param sourceStep
* The source step that can send the error rows
* @param targetStep
* The target step to send the error rows to
*/
public StepErrorMeta( VariableSpace space, StepMeta sourceStep, StepMeta targetStep ) {
this.sourceStep = sourceStep;
this.targetStep = targetStep;
this.enabled = false;
this.variables = space;
}
/**
* Create a new step error handling metadata object
*
* @param sourceStep
* The source step that can send the error rows
* @param targetStep
* The target step to send the error rows to
* @param nrErrorsValuename
* the name of the field value to contain the number of errors (null or empty means it's not needed)
* @param errorDescriptionsValuename
* the name of the field value to contain the error description(s) (null or empty means it's not needed)
* @param errorFieldsValuename
* the name of the field value to contain the fields for which the error(s) occured (null or empty means it's
* not needed)
* @param errorCodesValuename
* the name of the field value to contain the error code(s) (null or empty means it's not needed)
*/
public StepErrorMeta( VariableSpace space, StepMeta sourceStep, StepMeta targetStep, String nrErrorsValuename,
String errorDescriptionsValuename, String errorFieldsValuename, String errorCodesValuename ) {
this.sourceStep = sourceStep;
this.targetStep = targetStep;
this.enabled = false;
this.nrErrorsValuename = nrErrorsValuename;
this.errorDescriptionsValuename = errorDescriptionsValuename;
this.errorFieldsValuename = errorFieldsValuename;
this.errorCodesValuename = errorCodesValuename;
this.variables = space;
}
@Override
public StepErrorMeta clone() {
try {
return (StepErrorMeta) super.clone();
} catch ( CloneNotSupportedException e ) {
return null;
}
}
@Override
public String getXML() {
StringBuilder xml = new StringBuilder( 300 );
xml.append( " " ).append( XMLHandler.openTag( XML_TAG ) ).append( Const.CR );
xml.append( " " ).append(
XMLHandler.addTagValue( "source_step", sourceStep != null ? sourceStep.getName() : "" ) );
xml.append( " " ).append(
XMLHandler.addTagValue( "target_step", targetStep != null ? targetStep.getName() : "" ) );
xml.append( " " ).append( XMLHandler.addTagValue( "is_enabled", enabled ) );
xml.append( " " ).append( XMLHandler.addTagValue( "nr_valuename", nrErrorsValuename ) );
xml
.append( " " ).append(
XMLHandler.addTagValue( "descriptions_valuename", errorDescriptionsValuename ) );
xml.append( " " ).append( XMLHandler.addTagValue( "fields_valuename", errorFieldsValuename ) );
xml.append( " " ).append( XMLHandler.addTagValue( "codes_valuename", errorCodesValuename ) );
xml.append( " " ).append( XMLHandler.addTagValue( "max_errors", maxErrors ) );
xml.append( " " ).append( XMLHandler.addTagValue( "max_pct_errors", maxPercentErrors ) );
xml.append( " " ).append( XMLHandler.addTagValue( "min_pct_rows", minPercentRows ) );
xml.append( " " ).append( XMLHandler.closeTag( XML_TAG ) ).append( Const.CR );
return xml.toString();
}
public StepErrorMeta( VariableSpace variables, Node node, List<StepMeta> steps ) {
this.variables = variables;
sourceStep = StepMeta.findStep( steps, XMLHandler.getTagValue( node, "source_step" ) );
targetStep = StepMeta.findStep( steps, XMLHandler.getTagValue( node, "target_step" ) );
enabled = "Y".equals( XMLHandler.getTagValue( node, "is_enabled" ) );
nrErrorsValuename = XMLHandler.getTagValue( node, "nr_valuename" );
errorDescriptionsValuename = XMLHandler.getTagValue( node, "descriptions_valuename" );
errorFieldsValuename = XMLHandler.getTagValue( node, "fields_valuename" );
errorCodesValuename = XMLHandler.getTagValue( node, "codes_valuename" );
maxErrors = XMLHandler.getTagValue( node, "max_errors" );
maxPercentErrors = XMLHandler.getTagValue( node, "max_pct_errors" );
minPercentRows = XMLHandler.getTagValue( node, "min_pct_rows" );
}
/**
* @return the error codes valuename
*/
public String getErrorCodesValuename() {
return errorCodesValuename;
}
/**
* @param errorCodesValuename
* the error codes valuename to set
*/
public void setErrorCodesValuename( String errorCodesValuename ) {
this.errorCodesValuename = errorCodesValuename;
}
/**
* @return the error descriptions valuename
*/
public String getErrorDescriptionsValuename() {
return errorDescriptionsValuename;
}
/**
* @param errorDescriptionsValuename
* the error descriptions valuename to set
*/
public void setErrorDescriptionsValuename( String errorDescriptionsValuename ) {
this.errorDescriptionsValuename = errorDescriptionsValuename;
}
/**
* @return the error fields valuename
*/
public String getErrorFieldsValuename() {
return errorFieldsValuename;
}
/**
* @param errorFieldsValuename
* the error fields valuename to set
*/
public void setErrorFieldsValuename( String errorFieldsValuename ) {
this.errorFieldsValuename = errorFieldsValuename;
}
/**
* @return the nr errors valuename
*/
public String getNrErrorsValuename() {
return nrErrorsValuename;
}
/**
* @param nrErrorsValuename
* the nr errors valuename to set
*/
public void setNrErrorsValuename( String nrErrorsValuename ) {
this.nrErrorsValuename = nrErrorsValuename;
}
/**
* @return the target step
*/
public StepMeta getTargetStep() {
return targetStep;
}
/**
* @param targetStep
* the target step to set
*/
public void setTargetStep( StepMeta targetStep ) {
this.targetStep = targetStep;
}
/**
* @return The source step can send the error rows
*/
public StepMeta getSourceStep() {
return sourceStep;
}
/**
* @param sourceStep
* The source step can send the error rows
*/
public void setSourceStep( StepMeta sourceStep ) {
this.sourceStep = sourceStep;
}
/**
* @return the enabled flag: Is the error handling enabled?
*/
public boolean isEnabled() {
return enabled;
}
/**
* @param enabled
* the enabled flag to set: Is the error handling enabled?
*/
public void setEnabled( boolean enabled ) {
this.enabled = enabled;
}
public RowMetaInterface getErrorFields() {
return getErrorRowMeta( 0L, null, null, null );
}
public RowMetaInterface getErrorRowMeta( long nrErrors, String errorDescriptions, String fieldNames,
String errorCodes ) {
RowMetaInterface row = new RowMeta();
String nrErr = variables.environmentSubstitute( getNrErrorsValuename() );
if ( !Utils.isEmpty( nrErr ) ) {
ValueMetaInterface v = new ValueMetaInteger( nrErr );
v.setLength( 3 );
row.addValueMeta( v );
}
String errDesc = variables.environmentSubstitute( getErrorDescriptionsValuename() );
if ( !Utils.isEmpty( errDesc ) ) {
ValueMetaInterface v = new ValueMetaString( errDesc );
row.addValueMeta( v );
}
String errFields = variables.environmentSubstitute( getErrorFieldsValuename() );
if ( !Utils.isEmpty( errFields ) ) {
ValueMetaInterface v = new ValueMetaString( errFields );
row.addValueMeta( v );
}
String errCodes = variables.environmentSubstitute( getErrorCodesValuename() );
if ( !Utils.isEmpty( errCodes ) ) {
ValueMetaInterface v = new ValueMetaString( errCodes );
row.addValueMeta( v );
}
return row;
}
public void addErrorRowData( Object[] row, int startIndex, long nrErrors, String errorDescriptions,
String fieldNames, String errorCodes ) {
int index = startIndex;
String nrErr = variables.environmentSubstitute( getNrErrorsValuename() );
if ( !Utils.isEmpty( nrErr ) ) {
row[index] = new Long( nrErrors );
index++;
}
String errDesc = variables.environmentSubstitute( getErrorDescriptionsValuename() );
if ( !Utils.isEmpty( errDesc ) ) {
row[index] = errorDescriptions;
index++;
}
String errFields = variables.environmentSubstitute( getErrorFieldsValuename() );
if ( !Utils.isEmpty( errFields ) ) {
row[index] = fieldNames;
index++;
}
String errCodes = variables.environmentSubstitute( getErrorCodesValuename() );
if ( !Utils.isEmpty( errCodes ) ) {
row[index] = errorCodes;
index++;
}
}
/**
* @return the maxErrors
*/
public String getMaxErrors() {
return maxErrors;
}
/**
* @param maxErrors
* the maxErrors to set
*/
public void setMaxErrors( String maxErrors ) {
this.maxErrors = maxErrors;
}
/**
* @return the maxPercentErrors
*/
public String getMaxPercentErrors() {
return maxPercentErrors;
}
/**
* @param maxPercentErrors
* the maxPercentErrors to set
*/
public void setMaxPercentErrors( String maxPercentErrors ) {
this.maxPercentErrors = maxPercentErrors;
}
/**
* @return the minRowsForPercent
*/
public String getMinPercentRows() {
return minPercentRows;
}
/**
* @param minRowsForPercent
* the minRowsForPercent to set
*/
public void setMinPercentRows( String minRowsForPercent ) {
this.minPercentRows = minRowsForPercent;
}
}
| |
/**
* Copyright 2015 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in
* compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is
* distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See
* the License for the specific language governing permissions and limitations under the License.
*/
package io.reactivex.internal.operators.nbp;
import static org.junit.Assert.*;
import static org.mockito.Matchers.any;
import static org.mockito.Mockito.*;
import java.util.*;
import java.util.concurrent.*;
import java.util.concurrent.atomic.*;
import org.junit.Ignore;
import org.junit.Test;
import org.mockito.InOrder;
import io.reactivex.*;
import io.reactivex.NbpObservable.*;
import io.reactivex.disposables.Disposable;
import io.reactivex.internal.subscribers.nbp.NbpCancelledSubscriber;
import io.reactivex.schedulers.*;
import io.reactivex.subjects.nbp.NbpReplaySubject;
import io.reactivex.subscribers.nbp.NbpTestSubscriber;
public class NbpOnSubscribeRefCountTest {
@Test
public void testRefCountAsync() {
final AtomicInteger subscribeCount = new AtomicInteger();
final AtomicInteger nextCount = new AtomicInteger();
NbpObservable<Long> r = NbpObservable.interval(0, 5, TimeUnit.MILLISECONDS)
.doOnSubscribe(s -> subscribeCount.incrementAndGet())
.doOnNext(l -> nextCount.incrementAndGet())
.publish().refCount();
final AtomicInteger receivedCount = new AtomicInteger();
Disposable s1 = r.subscribe(l -> receivedCount.incrementAndGet());
Disposable s2 = r.subscribe();
// give time to emit
try {
Thread.sleep(52);
} catch (InterruptedException e) {
}
// now unsubscribe
s2.dispose(); // unsubscribe s2 first as we're counting in 1 and there can be a race between unsubscribe and one NbpSubscriber getting a value but not the other
s1.dispose();
System.out.println("onNext: " + nextCount.get());
// should emit once for both subscribers
assertEquals(nextCount.get(), receivedCount.get());
// only 1 subscribe
assertEquals(1, subscribeCount.get());
}
@Test
public void testRefCountSynchronous() {
final AtomicInteger subscribeCount = new AtomicInteger();
final AtomicInteger nextCount = new AtomicInteger();
NbpObservable<Integer> r = NbpObservable.just(1, 2, 3, 4, 5, 6, 7, 8, 9)
.doOnSubscribe(s -> subscribeCount.incrementAndGet())
.doOnNext(l -> nextCount.incrementAndGet())
.publish().refCount();
final AtomicInteger receivedCount = new AtomicInteger();
Disposable s1 = r.subscribe(l -> receivedCount.incrementAndGet());
Disposable s2 = r.subscribe();
// give time to emit
try {
Thread.sleep(50);
} catch (InterruptedException e) {
}
// now unsubscribe
s2.dispose(); // unsubscribe s2 first as we're counting in 1 and there can be a race between unsubscribe and one NbpSubscriber getting a value but not the other
s1.dispose();
System.out.println("onNext Count: " + nextCount.get());
// it will emit twice because it is synchronous
assertEquals(nextCount.get(), receivedCount.get() * 2);
// it will subscribe twice because it is synchronous
assertEquals(2, subscribeCount.get());
}
@Test
public void testRefCountSynchronousTake() {
final AtomicInteger nextCount = new AtomicInteger();
NbpObservable<Integer> r = NbpObservable.just(1, 2, 3, 4, 5, 6, 7, 8, 9)
.doOnNext(l -> {
System.out.println("onNext --------> " + l);
nextCount.incrementAndGet();
})
.take(4)
.publish().refCount();
final AtomicInteger receivedCount = new AtomicInteger();
r.subscribe(l -> receivedCount.incrementAndGet());
System.out.println("onNext: " + nextCount.get());
assertEquals(4, receivedCount.get());
assertEquals(4, receivedCount.get());
}
@Test
public void testRepeat() {
final AtomicInteger subscribeCount = new AtomicInteger();
final AtomicInteger unsubscribeCount = new AtomicInteger();
NbpObservable<Long> r = NbpObservable.interval(0, 1, TimeUnit.MILLISECONDS)
.doOnSubscribe(s -> {
System.out.println("******************************* Subscribe received");
// when we are subscribed
subscribeCount.incrementAndGet();
})
.doOnCancel(() -> {
System.out.println("******************************* Unsubscribe received");
// when we are unsubscribed
unsubscribeCount.incrementAndGet();
})
.publish().refCount();
for (int i = 0; i < 10; i++) {
NbpTestSubscriber<Long> ts1 = new NbpTestSubscriber<>();
NbpTestSubscriber<Long> ts2 = new NbpTestSubscriber<>();
r.subscribe(ts1);
r.subscribe(ts2);
try {
Thread.sleep(50);
} catch (InterruptedException e) {
}
ts1.dispose();
ts2.dispose();
ts1.assertNoErrors();
ts2.assertNoErrors();
assertTrue(ts1.valueCount() > 0);
assertTrue(ts2.valueCount() > 0);
}
assertEquals(10, subscribeCount.get());
assertEquals(10, unsubscribeCount.get());
}
@Test
public void testConnectUnsubscribe() throws InterruptedException {
final CountDownLatch unsubscribeLatch = new CountDownLatch(1);
final CountDownLatch subscribeLatch = new CountDownLatch(1);
NbpObservable<Long> o = synchronousInterval()
.doOnSubscribe(s -> {
System.out.println("******************************* Subscribe received");
// when we are subscribed
subscribeLatch.countDown();
})
.doOnCancel(() -> {
System.out.println("******************************* Unsubscribe received");
// when we are unsubscribed
unsubscribeLatch.countDown();
});
NbpTestSubscriber<Long> s = new NbpTestSubscriber<>();
o.publish().refCount().subscribeOn(Schedulers.newThread()).subscribe(s);
System.out.println("send unsubscribe");
// wait until connected
subscribeLatch.await();
// now unsubscribe
s.dispose();
System.out.println("DONE sending unsubscribe ... now waiting");
if (!unsubscribeLatch.await(3000, TimeUnit.MILLISECONDS)) {
System.out.println("Errors: " + s.errors());
if (s.errors().size() > 0) {
s.errors().get(0).printStackTrace();
}
fail("timed out waiting for unsubscribe");
}
s.assertNoErrors();
}
// Can fail if it takes too much time
@Test
@Ignore
public void testConnectUnsubscribeRaceConditionLoop() throws InterruptedException {
for (int i = 0; i < 100; i++) {
testConnectUnsubscribeRaceCondition();
}
}
@Test
public void testConnectUnsubscribeRaceCondition() throws InterruptedException {
final AtomicInteger subUnsubCount = new AtomicInteger();
NbpObservable<Long> o = synchronousInterval()
.doOnCancel(() -> {
System.out.println("******************************* Unsubscribe received");
// when we are unsubscribed
subUnsubCount.decrementAndGet();
})
.doOnSubscribe(s -> {
System.out.println("******************************* SUBSCRIBE received");
subUnsubCount.incrementAndGet();
});
NbpTestSubscriber<Long> s = new NbpTestSubscriber<>();
o.publish().refCount().subscribeOn(Schedulers.computation()).subscribe(s);
System.out.println("send unsubscribe");
// now immediately unsubscribe while subscribeOn is racing to subscribe
s.dispose();
// this generally will mean it won't even subscribe as it is already unsubscribed by the time connect() gets scheduled
// give time to the counter to update
Thread.sleep(10);
// either we subscribed and then unsubscribed, or we didn't ever even subscribe
assertEquals(0, subUnsubCount.get());
System.out.println("DONE sending unsubscribe ... now waiting");
System.out.println("Errors: " + s.errors());
if (s.errors().size() > 0) {
s.errors().get(0).printStackTrace();
}
s.assertNoErrors();
}
private NbpObservable<Long> synchronousInterval() {
return NbpObservable.create(NbpSubscriber -> {
AtomicBoolean cancel = new AtomicBoolean();
NbpSubscriber.onSubscribe(() -> cancel.set(true));
for (;;) {
if (cancel.get()) {
break;
}
try {
Thread.sleep(100);
} catch (InterruptedException e) {
}
NbpSubscriber.onNext(1L);
}
});
}
@Test
public void onlyFirstShouldSubscribeAndLastUnsubscribe() {
final AtomicInteger subscriptionCount = new AtomicInteger();
final AtomicInteger unsubscriptionCount = new AtomicInteger();
NbpObservable<Integer> o = NbpObservable.create(new NbpOnSubscribe<Integer>() {
@Override
public void accept(NbpSubscriber<? super Integer> NbpObserver) {
subscriptionCount.incrementAndGet();
NbpObserver.onSubscribe(() -> {
unsubscriptionCount.incrementAndGet();
});
}
});
NbpObservable<Integer> refCounted = o.publish().refCount();
Disposable first = refCounted.subscribe();
assertEquals(1, subscriptionCount.get());
Disposable second = refCounted.subscribe();
assertEquals(1, subscriptionCount.get());
first.dispose();
assertEquals(0, unsubscriptionCount.get());
second.dispose();
assertEquals(1, unsubscriptionCount.get());
}
@Test
public void testRefCount() {
TestScheduler s = new TestScheduler();
NbpObservable<Long> interval = NbpObservable.interval(100, TimeUnit.MILLISECONDS, s).publish().refCount();
// subscribe list1
final List<Long> list1 = new ArrayList<>();
Disposable s1 = interval.subscribe(t1 -> list1.add(t1));
s.advanceTimeBy(200, TimeUnit.MILLISECONDS);
assertEquals(2, list1.size());
assertEquals(0L, list1.get(0).longValue());
assertEquals(1L, list1.get(1).longValue());
// subscribe list2
final List<Long> list2 = new ArrayList<>();
Disposable s2 = interval.subscribe(t1 -> list2.add(t1));
s.advanceTimeBy(300, TimeUnit.MILLISECONDS);
// list 1 should have 5 items
assertEquals(5, list1.size());
assertEquals(2L, list1.get(2).longValue());
assertEquals(3L, list1.get(3).longValue());
assertEquals(4L, list1.get(4).longValue());
// list 2 should only have 3 items
assertEquals(3, list2.size());
assertEquals(2L, list2.get(0).longValue());
assertEquals(3L, list2.get(1).longValue());
assertEquals(4L, list2.get(2).longValue());
// unsubscribe list1
s1.dispose();
// advance further
s.advanceTimeBy(300, TimeUnit.MILLISECONDS);
// list 1 should still have 5 items
assertEquals(5, list1.size());
// list 2 should have 6 items
assertEquals(6, list2.size());
assertEquals(5L, list2.get(3).longValue());
assertEquals(6L, list2.get(4).longValue());
assertEquals(7L, list2.get(5).longValue());
// unsubscribe list2
s2.dispose();
// advance further
s.advanceTimeBy(1000, TimeUnit.MILLISECONDS);
// subscribing a new one should start over because the source should have been unsubscribed
// subscribe list3
final List<Long> list3 = new ArrayList<>();
interval.subscribe(t1 -> list3.add(t1));
s.advanceTimeBy(200, TimeUnit.MILLISECONDS);
assertEquals(2, list3.size());
assertEquals(0L, list3.get(0).longValue());
assertEquals(1L, list3.get(1).longValue());
}
@Test
public void testAlreadyUnsubscribedClient() {
NbpSubscriber<Integer> done = NbpCancelledSubscriber.instance();
NbpSubscriber<Integer> o = TestHelper.mockNbpSubscriber();
NbpObservable<Integer> result = NbpObservable.just(1).publish().refCount();
result.subscribe(done);
result.subscribe(o);
verify(o).onNext(1);
verify(o).onComplete();
verify(o, never()).onError(any(Throwable.class));
}
@Test
public void testAlreadyUnsubscribedInterleavesWithClient() {
NbpReplaySubject<Integer> source = NbpReplaySubject.create();
NbpSubscriber<Integer> done = NbpCancelledSubscriber.instance();
NbpSubscriber<Integer> o = TestHelper.mockNbpSubscriber();
InOrder inOrder = inOrder(o);
NbpObservable<Integer> result = source.publish().refCount();
result.subscribe(o);
source.onNext(1);
result.subscribe(done);
source.onNext(2);
source.onComplete();
inOrder.verify(o).onNext(1);
inOrder.verify(o).onNext(2);
inOrder.verify(o).onComplete();
verify(o, never()).onError(any(Throwable.class));
}
@Test
public void testConnectDisconnectConnectAndSubjectState() {
NbpObservable<Integer> o1 = NbpObservable.just(10);
NbpObservable<Integer> o2 = NbpObservable.just(20);
NbpObservable<Integer> combined = NbpObservable.combineLatest(o1, o2, (t1, t2) -> t1 + t2)
.publish().refCount();
NbpTestSubscriber<Integer> ts1 = new NbpTestSubscriber<>();
NbpTestSubscriber<Integer> ts2 = new NbpTestSubscriber<>();
combined.subscribe(ts1);
combined.subscribe(ts2);
ts1.assertTerminated();
ts1.assertNoErrors();
ts1.assertValue(30);
ts2.assertTerminated();
ts2.assertNoErrors();
ts2.assertValue(30);
}
@Test(timeout = 10000)
public void testUpstreamErrorAllowsRetry() throws InterruptedException {
final AtomicInteger intervalSubscribed = new AtomicInteger();
NbpObservable<String> interval =
NbpObservable.interval(200,TimeUnit.MILLISECONDS)
.doOnSubscribe(s -> {
System.out.println("Subscribing to interval " + intervalSubscribed.incrementAndGet());
}
)
.flatMap(t1 -> {
return NbpObservable.defer(() -> {
return NbpObservable.<String>error(new Exception("Some exception"));
});
})
.onErrorResumeNext(t1 -> {
return NbpObservable.error(t1);
})
.publish()
.refCount();
interval
.doOnError(t1 -> {
System.out.println("NbpSubscriber 1 onError: " + t1);
})
.retry(5)
.subscribe(t1 -> {
System.out.println("NbpSubscriber 1: " + t1);
});
Thread.sleep(100);
interval
.doOnError(t1 -> {
System.out.println("NbpSubscriber 2 onError: " + t1);
})
.retry(5)
.subscribe(t1 -> {
System.out.println("NbpSubscriber 2: " + t1);
});
Thread.sleep(1300);
System.out.println(intervalSubscribed.get());
assertEquals(6, intervalSubscribed.get());
}
}
| |
/*
* Copyright 2012-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.facebook.buck.jvm.java;
import com.facebook.buck.event.CompilerErrorEvent;
import com.facebook.buck.io.ProjectFilesystem;
import com.facebook.buck.jvm.core.SuggestBuildRules;
import com.facebook.buck.model.BuildTarget;
import com.facebook.buck.rules.SourcePathResolver;
import com.facebook.buck.step.ExecutionContext;
import com.facebook.buck.step.Step;
import com.facebook.buck.util.CapturingPrintStream;
import com.facebook.buck.util.Verbosity;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Charsets;
import com.google.common.base.Function;
import com.google.common.base.Joiner;
import com.google.common.base.Optional;
import com.google.common.base.Splitter;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.ImmutableSortedSet;
import java.io.File;
import java.io.IOException;
import java.nio.file.Path;
import java.util.Collection;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import javax.annotation.Nullable;
/**
* Command used to compile java libraries with a variety of ways to handle dependencies.
*/
public class JavacStep implements Step {
private final Path outputDirectory;
private final Optional<Path> usedClassesFile;
private final Optional<StandardJavaFileManagerFactory> fileManagerFactory;
private final Optional<Path> workingDirectory;
private final ImmutableSortedSet<Path> javaSourceFilePaths;
private final Path pathToSrcsList;
private final JavacOptions javacOptions;
private final ImmutableSortedSet<Path> declaredClasspathEntries;
private final BuildTarget invokingRule;
private final Optional<SuggestBuildRules> suggestBuildRules;
private final SourcePathResolver resolver;
private final ProjectFilesystem filesystem;
private final Javac javac;
private static final Pattern IMPORT_FAILURE =
Pattern.compile("import ([\\w\\.\\*]*);");
private static final Pattern PACKAGE_FAILURE =
Pattern.compile(".*?package ([\\w\\.\\*]*) does not exist");
private static final Pattern ACCESS_FAILURE =
Pattern.compile(".*?error: cannot access ([\\w\\.\\*]*)");
private static final Pattern CLASS_NOT_FOUND =
Pattern.compile(".*?class file for ([\\w\\.\\*]*) not found");
private static final Pattern CLASS_SYMBOL_NOT_FOUND =
Pattern.compile(".*?symbol:\\s*class\\s*([\\w\\.\\*]*)");
private static final ImmutableList<Pattern> MISSING_IMPORT_PATTERNS =
ImmutableList.of(
IMPORT_FAILURE,
PACKAGE_FAILURE,
ACCESS_FAILURE,
CLASS_NOT_FOUND,
CLASS_SYMBOL_NOT_FOUND);
@Nullable
private static final String LINE_SEPARATOR = System.getProperty("line.separator");
public JavacStep(
Path outputDirectory,
Optional<Path> usedClassesFile,
Optional<StandardJavaFileManagerFactory> fileManagerFactory,
Optional<Path> workingDirectory,
ImmutableSortedSet<Path> javaSourceFilePaths,
Path pathToSrcsList,
ImmutableSortedSet<Path> declaredClasspathEntries,
Javac javac,
JavacOptions javacOptions,
BuildTarget invokingRule,
Optional<SuggestBuildRules> suggestBuildRules,
SourcePathResolver resolver,
ProjectFilesystem filesystem) {
this.outputDirectory = outputDirectory;
this.usedClassesFile = usedClassesFile;
this.fileManagerFactory = fileManagerFactory;
this.workingDirectory = workingDirectory;
this.javaSourceFilePaths = javaSourceFilePaths;
this.pathToSrcsList = pathToSrcsList;
this.javacOptions = javacOptions;
this.declaredClasspathEntries = declaredClasspathEntries;
this.javac = javac;
this.invokingRule = invokingRule;
this.suggestBuildRules = suggestBuildRules;
this.resolver = resolver;
this.filesystem = filesystem;
}
@Override
public final int execute(ExecutionContext context) throws IOException, InterruptedException {
return tryBuildWithFirstOrderDeps(context, filesystem);
}
private int tryBuildWithFirstOrderDeps(ExecutionContext context, ProjectFilesystem filesystem)
throws InterruptedException, IOException {
Verbosity verbosity =
context.getVerbosity().isSilent() ? Verbosity.STANDARD_INFORMATION : context.getVerbosity();
try (
CapturingPrintStream stdout = new CapturingPrintStream();
CapturingPrintStream stderr = new CapturingPrintStream();
ExecutionContext firstOrderContext = context.createSubContext(
stdout,
stderr,
Optional.of(verbosity))) {
Javac javac = getJavac();
int declaredDepsResult = javac.buildWithClasspath(
firstOrderContext,
filesystem,
resolver,
invokingRule,
getOptions(context, declaredClasspathEntries),
javaSourceFilePaths,
pathToSrcsList,
workingDirectory,
usedClassesFile,
fileManagerFactory);
String firstOrderStdout = stdout.getContentsAsString(Charsets.UTF_8);
String firstOrderStderr = stderr.getContentsAsString(Charsets.UTF_8);
if (declaredDepsResult != 0) {
ImmutableList.Builder<String> errorMessage = ImmutableList.builder();
errorMessage.add(firstOrderStderr);
if (suggestBuildRules.isPresent()) {
ImmutableSet<String> failedImports = findFailedImports(firstOrderStderr);
ImmutableSet<String> suggestions = suggestBuildRules.get().suggest(failedImports);
if (!suggestions.isEmpty()) {
String invoker = invokingRule.toString();
errorMessage.add(String.format("Rule %s has failed to build.", invoker));
errorMessage.add(Joiner.on(LINE_SEPARATOR).join(failedImports));
errorMessage.add("Try adding the following deps:");
errorMessage.add(Joiner.on(LINE_SEPARATOR).join(suggestions));
errorMessage.add("");
errorMessage.add("");
}
CompilerErrorEvent evt = CompilerErrorEvent.create(
invokingRule,
firstOrderStderr,
CompilerErrorEvent.CompilerType.Java,
suggestions
);
context.postEvent(evt);
} else {
ImmutableSet<String> suggestions = ImmutableSet.of();
CompilerErrorEvent evt = CompilerErrorEvent.create(
invokingRule,
firstOrderStderr,
CompilerErrorEvent.CompilerType.Java,
suggestions
);
context.postEvent(evt);
}
if (!context.getVerbosity().isSilent()) {
context.getStdOut().print(firstOrderStdout);
context.getStdErr().println(Joiner.on("\n").join(errorMessage.build()));
}
}
return declaredDepsResult;
}
}
@VisibleForTesting
Javac getJavac() {
return javac;
}
@Override
public String getDescription(ExecutionContext context) {
return getJavac().getDescription(
getOptions(context, getClasspathEntries()),
javaSourceFilePaths,
pathToSrcsList);
}
@Override
public String getShortName() {
return getJavac().getShortName();
}
@VisibleForTesting
static ImmutableSet<String> findFailedImports(String output) {
Iterable<String> lines = Splitter.on(LINE_SEPARATOR).split(output);
ImmutableSortedSet.Builder<String> failedImports = ImmutableSortedSet.naturalOrder();
for (String line : lines) {
for (Pattern missingImportPattern : MISSING_IMPORT_PATTERNS) {
Matcher lineMatch = missingImportPattern.matcher(line);
if (lineMatch.matches()) {
failedImports.add(lineMatch.group(1));
break;
}
}
}
return failedImports.build();
}
/**
* Returns a list of command-line options to pass to javac. These options reflect
* the configuration of this javac command.
*
* @param context the ExecutionContext with in which javac will run
* @return list of String command-line options.
*/
@VisibleForTesting
ImmutableList<String> getOptions(
ExecutionContext context,
ImmutableSortedSet<Path> buildClasspathEntries) {
return getOptions(
javacOptions,
filesystem,
outputDirectory,
context,
buildClasspathEntries);
}
public static ImmutableList<String> getOptions(
JavacOptions javacOptions,
ProjectFilesystem filesystem,
Path outputDirectory,
ExecutionContext context,
ImmutableSortedSet<Path> buildClasspathEntries) {
final ImmutableList.Builder<String> builder = ImmutableList.builder();
javacOptions.appendOptionsTo(new OptionsConsumer() {
@Override
public void addOptionValue(String option, String value) {
builder.add("-" + option).add(value);
}
@Override
public void addFlag(String flagName) {
builder.add("-" + flagName);
}
@Override
public void addExtras(Collection<String> extras) {
builder.addAll(extras);
}
}, filesystem.getAbsolutifier());
// verbose flag, if appropriate.
if (context.getVerbosity().shouldUseVerbosityFlagIfAvailable()) {
builder.add("-verbose");
}
// Specify the output directory.
Function<Path, Path> pathAbsolutifier = filesystem.getAbsolutifier();
builder.add("-d").add(pathAbsolutifier.apply(outputDirectory).toString());
// Build up and set the classpath.
if (!buildClasspathEntries.isEmpty()) {
String classpath = Joiner.on(File.pathSeparator).join(buildClasspathEntries);
builder.add("-classpath", classpath);
} else {
builder.add("-classpath", "''");
}
return builder.build();
}
/**
* @return The classpath entries used to invoke javac.
*/
@VisibleForTesting
ImmutableSortedSet<Path> getClasspathEntries() {
return declaredClasspathEntries;
}
@VisibleForTesting
ImmutableSortedSet<Path> getSrcs() {
return javaSourceFilePaths;
}
}
| |
package graphene.dao.neo4j;
import graphene.dao.UserDAO;
import graphene.dao.UserWorkspaceDAO;
import graphene.dao.WorkspaceDAO;
import graphene.model.idl.G_User;
import graphene.model.idl.G_UserFields;
import graphene.model.idl.G_UserSpaceRelationshipType;
import graphene.model.idl.G_UserWorkspace;
import graphene.model.idl.G_Workspace;
import graphene.util.ExceptionUtil;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.tapestry5.ioc.annotations.Inject;
import org.neo4j.graphdb.Node;
import org.neo4j.graphdb.Relationship;
import org.neo4j.graphdb.RelationshipType;
import org.neo4j.graphdb.ResourceIterator;
import org.neo4j.graphdb.Transaction;
public class UserWorkspaceDAONeo4JEImpl extends GenericUserSpaceDAONeo4jE implements UserWorkspaceDAO {
@Inject
private WorkspaceDAO workspaceDAO;
@Inject
private UserDAO userDAO;
@Override
public boolean addRelationToWorkspace(final String userId, final G_UserSpaceRelationshipType rel, final String id) {
boolean success = false;
boolean createRelationship = true;
final Node u = getUserNodeById(userId);
final Node w = getWorkspaceNodeById(id);
if (u == null) {
logger.error("Could not find user " + userId);
return false;
} else if (w == null) {
logger.error("Could not find workspace " + id);
return false;
}
try (Transaction tx = beginTx()) {
for (final Relationship r : u.getRelationships(relfunnel.to(rel))) {
logger.debug("r.getEndNode().getId() " + r.getEndNode().getId());
logger.debug("wNode.getId() " + u.getId());
if (r.getEndNode().getId() == w.getId()) {
createRelationship = false;
break;
}
}
success = true;
tx.success();
}
if (success) {
if (createRelationship) {
try (Transaction tx = beginTx()) {
u.createRelationshipTo(w, relfunnel.to(rel));
tx.success();
}
}
return true;
} else {
return false;
}
}
@Override
public int countUsersForWorkspace(final String workspaceId) {
// TODO Auto-generated method stub
return 0;
}
@Override
public boolean delete(final String id) {
// TODO Auto-generated method stub
return false;
}
@Override
public boolean deleteWorkspaceRelations(final String workspaceId) {
// TODO Auto-generated method stub
return false;
}
@Override
public List<G_UserWorkspace> getAll() {
// TODO Auto-generated method stub
return null;
}
@Override
public G_UserWorkspace getById(final String id) {
// TODO Auto-generated method stub
return null;
}
@Override
public List<G_UserWorkspace> getByUserId(final String id) {
// TODO Auto-generated method stub
return null;
}
@Override
public List<G_UserWorkspace> getByUserIdAndWorkspaceId(final String userId, final String workspaceId) {
// TODO Auto-generated method stub
return null;
}
@Override
public List<G_UserWorkspace> getByWorkspaceId(final String id) {
// TODO Auto-generated method stub
return null;
}
@Override
public List<G_Workspace> getMostRecentWorkspacesForUser(final String userId, final int quantity) {
// TODO Auto-generated method stub
return null;
}
@Override
public List<G_User> getUsersForWorkspace(final String workspaceId) {
// TODO Auto-generated method stub
return null;
}
@Override
public List<G_Workspace> getWorkspacesForUser(final String userId) {
final List<G_Workspace> list = new ArrayList<G_Workspace>();
try (Transaction tx = beginTx()) {
final String queryString = "match (n:" + GrapheneNeo4JConstants.userLabel.name() + ")-[r:"
+ G_UserSpaceRelationshipType.EDITOR_OF.name() + "]-w where n." + G_UserFields.id + " = '" + userId
+ "' return w";
final Map<String, Object> parameters = new HashMap<String, Object>();
final ResourceIterator<Object> resultIterator = n4jService.getExecutionEngine()
.execute(queryString, parameters).columnAs("w");
while (resultIterator.hasNext()) {
final G_Workspace d = workspaceFunnel.from((Node) resultIterator.next());
if (d != null) {
list.add(d);
}
}
resultIterator.close();
// tx.success(); //FIXME: this causes an error, although in similar
// RO cases it hasn't caused an error.
}
return list;
}
@Override
public boolean hasRelationship(final String userId, final String id, final G_UserSpaceRelationshipType... rel) {
final Node u = getUserNodeById(userId);
final Node w = getWorkspaceNodeById(id);
if ((u == null) || (w == null)) {
logger.warn("Could not find the user or workspace requested.");
return false;
}
boolean has = false;
try (Transaction tx = beginTx()) {
// iterate through all the relationships of the given types.
final List<RelationshipType> relList = new ArrayList<RelationshipType>();
for (final G_UserSpaceRelationshipType r : rel) {
relList.add(relfunnel.to(r));
}
final Iterable<Relationship> matchingRels = w.getRelationships(relList.toArray(new RelationshipType[0]));
for (final Relationship r : matchingRels) {
if (r.getStartNode().getId() == u.getId()) {
has = true;
break;
}
}
tx.success();
} catch (final Exception e) {
logger.error(ExceptionUtil.getRootCauseMessage(e));
}
return has;
}
@Override
public boolean removeUserFromWorkspace(final String userId, final String workspaceId) {
boolean success = false;
Node uNode, wNode;
uNode = getUserNodeById(userId);
wNode = getWorkspaceNodeById(workspaceId);
if ((uNode != null) && (wNode != null)) {
try (Transaction tx = beginTx()) {
for (final Relationship r : uNode.getRelationships(relfunnel.to(G_UserSpaceRelationshipType.EDITOR_OF))) {
logger.debug("r.getEndNode().getId() " + r.getEndNode().getId());
logger.debug("wNode.getId() " + wNode.getId());
if (r.getEndNode().getId() == wNode.getId()) {
r.delete();
success = true;
break;
}
}
tx.success();
}
}
return success;
}
@Override
public boolean removeUserPermissionFromWorkspace(final String userId, final String permission,
final String workspaceId) {
boolean success = false;
Node uNode, wNode;
uNode = getUserNodeById(userId);
wNode = getWorkspaceNodeById(workspaceId);
final G_UserSpaceRelationshipType rel = G_UserSpaceRelationshipType.valueOf(permission);
if ((uNode != null) && (wNode != null)) {
try (Transaction tx = beginTx()) {
for (final Relationship r : uNode.getRelationships(relfunnel.to(G_UserSpaceRelationshipType.EDITOR_OF))) {
if (r.getOtherNode(uNode).equals(wNode)) {
logger.info("Removing relationship '" + rel + "' between " + userId + " and " + workspaceId);
r.delete();
success = true;
break;
}
}
tx.success();
}
}
return success;
}
@Override
public G_UserWorkspace save(final G_UserWorkspace g) {
// TODO Auto-generated method stub
return null;
}
}
| |
package cgeo.geocaching.gcvote;
import cgeo.geocaching.R;
import cgeo.geocaching.connector.ConnectorFactory;
import cgeo.geocaching.connector.IConnector;
import cgeo.geocaching.connector.capability.ICredentials;
import cgeo.geocaching.connector.capability.IVotingCapability;
import cgeo.geocaching.models.Geocache;
import cgeo.geocaching.network.Network;
import cgeo.geocaching.network.Parameters;
import cgeo.geocaching.settings.Credentials;
import cgeo.geocaching.settings.Settings;
import cgeo.geocaching.utils.LeastRecentlyUsedMap;
import cgeo.geocaching.utils.Log;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import java.io.IOException;
import java.io.InputStream;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import org.apache.commons.collections4.CollectionUtils;
import org.apache.commons.collections4.MapUtils;
import org.apache.commons.io.IOUtils;
import org.apache.commons.lang3.StringUtils;
import org.xmlpull.v1.XmlPullParser;
import org.xmlpull.v1.XmlPullParserException;
import org.xmlpull.v1.XmlPullParserFactory;
public final class GCVote implements ICredentials {
// gcvote.com does not have a https certificate. However, Guido (the owner of gcvote.com) told
// us on 2017-03-21 that the site is accessible through its provider https endpoint at
// https://ssl.webpack.de/gcvote.com
public static final float NO_RATING = 0;
private static final int MAX_CACHED_RATINGS = 1000;
private static final LeastRecentlyUsedMap<String, GCVoteRating> RATINGS_CACHE = new LeastRecentlyUsedMap.LruCache<>(MAX_CACHED_RATINGS);
private GCVote() {
// utility class
}
@NonNull
public static GCVote getInstance() {
return SingletonHolder.INSTANCE;
}
/**
* Get user rating for a given guid or geocode. For a guid first the ratings cache is checked
* before a request to gcvote.com is made.
*/
@Nullable
public static GCVoteRating getRating(final String guid, final String geocode) {
if (StringUtils.isNotBlank(guid) && RATINGS_CACHE.containsKey(guid)) {
return RATINGS_CACHE.get(guid);
}
final Map<String, GCVoteRating> ratings = getRating(singletonOrNull(guid), singletonOrNull(geocode));
return MapUtils.isNotEmpty(ratings) ? ratings.values().iterator().next() : null;
}
@Nullable
private static List<String> singletonOrNull(final String item) {
return StringUtils.isNotBlank(item) ? Collections.singletonList(item) : null;
}
/**
* Get user ratings from gcvote.com
*/
@NonNull
private static Map<String, GCVoteRating> getRating(final List<String> guids, final List<String> geocodes) {
if (guids == null && geocodes == null) {
return Collections.emptyMap();
}
final Parameters params = new Parameters("version", "cgeo");
final Credentials login = Settings.getGCVoteLogin();
if (login.isValid()) {
params.put("userName", login.getUserName(), "password", login.getPassword());
}
// use guid or gccode for lookup
final boolean requestByGuids = CollectionUtils.isNotEmpty(guids);
if (requestByGuids) {
params.put("cacheIds", StringUtils.join(guids, ','));
} else {
params.put("waypoints", StringUtils.join(geocodes, ','));
}
final InputStream response = Network.getResponseStream(Network.getRequest("https://ssl.webpack.de/gcvote.com/getVotes.php", params));
if (response == null) {
return Collections.emptyMap();
}
try {
return getRatingsFromXMLResponse(response, requestByGuids);
} finally {
IOUtils.closeQuietly(response);
}
}
@NonNull
static Map<String, GCVoteRating> getRatingsFromXMLResponse(@NonNull final InputStream response, final boolean requestByGuids) {
try {
final XmlPullParserFactory factory = XmlPullParserFactory.newInstance();
final XmlPullParser xpp = factory.newPullParser();
xpp.setInput(response, StandardCharsets.UTF_8.name());
boolean loggedIn = false;
final Map<String, GCVoteRating> ratings = new HashMap<>();
int eventType = xpp.getEventType();
while (eventType != XmlPullParser.END_DOCUMENT) {
if (eventType == XmlPullParser.START_TAG) {
final String tagName = xpp.getName();
if (StringUtils.equals(tagName, "vote")) {
final String id = xpp.getAttributeValue(null, requestByGuids ? "cacheId" : "waypoint");
final float myVote = loggedIn ? Float.parseFloat(xpp.getAttributeValue(null, "voteUser")) : 0;
final GCVoteRating voteRating = new GCVoteRating(Float.parseFloat(xpp.getAttributeValue(null, "voteAvg")),
Integer.parseInt(xpp.getAttributeValue(null, "voteCnt")),
myVote);
ratings.put(id, voteRating);
} else if (StringUtils.equals(tagName, "votes")) {
loggedIn = StringUtils.equals(xpp.getAttributeValue(null, "loggedIn"), "true");
}
}
eventType = xpp.next();
}
RATINGS_CACHE.putAll(ratings);
return ratings;
} catch (final NumberFormatException | XmlPullParserException | IOException e) {
Log.e("Cannot parse GCVote result", e);
return Collections.emptyMap();
}
}
/**
* Transmit user vote to gcvote.com
*
* @param cache the geocache (supported by GCVote)
* @param rating the rating
* @return {@code true} if the rating was submitted successfully
*/
public static boolean setRating(@NonNull final Geocache cache, final float rating) {
final IConnector connector = ConnectorFactory.getConnector(cache);
if (!(connector instanceof IVotingCapability)) {
throw new IllegalArgumentException("Service does not support voting" + cache);
}
final IVotingCapability votingConnector = (IVotingCapability) connector;
if (!votingConnector.supportsVoting(cache)) {
throw new IllegalArgumentException("voting is not possible for " + cache);
}
if (!votingConnector.isValidRating(rating)) {
throw new IllegalArgumentException("invalid rating " + rating);
}
final Credentials login = Settings.getGCVoteLogin();
if (login.isInvalid()) {
Log.e("GCVote.setRating: cannot find credentials");
return false;
}
final Parameters params = new Parameters(
"userName", login.getUserName(),
"password", login.getPassword(),
"cacheId", cache.getGuid(),
"waypoint", cache.getGeocode(),
"voteUser", String.format(Locale.US, "%.1f", rating),
"version", "cgeo");
final String result = StringUtils.trim(Network.getResponseData(Network.getRequest("https://ssl.webpack.de/gcvote.com/setVote.php", params)));
if (!StringUtils.equalsIgnoreCase(result, "ok")) {
Log.e("GCVote.setRating: could not post rating, answer was " + result);
return false;
}
return true;
}
public static void loadRatings(@NonNull final List<Geocache> caches) {
if (!Settings.isRatingWanted()) {
return;
}
final List<String> geocodes = getVotableGeocodes(caches);
if (geocodes.isEmpty()) {
return;
}
try {
final Map<String, GCVoteRating> ratings = getRating(null, geocodes);
// save found cache coordinates
for (final Geocache cache : caches) {
if (ratings.containsKey(cache.getGeocode())) {
final GCVoteRating rating = ratings.get(cache.getGeocode());
cache.setRating(rating.getRating());
cache.setVotes(rating.getVotes());
cache.setMyVote(rating.getMyVote());
}
}
} catch (final Exception e) {
Log.e("GCVote.loadRatings", e);
}
}
/**
* Get geocodes of all the caches, which can be used with GCVote. Non-GC caches will be filtered out.
*/
@NonNull
private static List<String> getVotableGeocodes(@NonNull final Collection<Geocache> caches) {
final List<String> geocodes = new ArrayList<>(caches.size());
for (final Geocache cache : caches) {
final String geocode = cache.getGeocode();
final IConnector connector = ConnectorFactory.getConnector(cache);
if (StringUtils.isNotBlank(geocode) && connector instanceof IVotingCapability && ((IVotingCapability) connector).supportsVoting(cache)) {
geocodes.add(geocode);
}
}
return geocodes;
}
@NonNull
public static String getWebsite() {
return "http://gcvote.com";
}
@NonNull
public static String getCreateAccountUrl() {
return "http://gcvote.com/help_en.php";
}
@Override
public int getUsernamePreferenceKey() {
return R.string.pref_user_vote;
}
@Override
public int getPasswordPreferenceKey() {
return R.string.pref_pass_vote;
}
@Override
public Credentials getCredentials() {
return Settings.getCredentials(R.string.pref_user_vote, R.string.pref_pass_vote);
}
private static class SingletonHolder {
@NonNull
private static final GCVote INSTANCE = new GCVote();
}
}
| |
package org.dstadler.htmlunit;
import com.gargoylesoftware.htmlunit.*;
import com.gargoylesoftware.htmlunit.html.*;
import com.google.common.base.Preconditions;
import com.google.common.collect.Iterators;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.Stack;
public class HtmlUnitUtils {
private static final Log logger = LogFactory.getLog(HtmlUnitUtils.class);
private HtmlUnitUtils() {
}
public static WebClient createWebClient() {
return createWebClient(true);
}
public static WebClient createWebClient(boolean enableJavaScript) {
return createWebClient(enableJavaScript, BrowserVersion.FIREFOX_78);
}
public static WebClient createWebClient(boolean enableJavaScript, BrowserVersion browserVersion) {
logger.debug("Creating client");
// proxy for some machines
final WebClient webClient = new WebClient(browserVersion);
webClient.waitForBackgroundJavaScriptStartingBefore(1000);
webClient.getOptions().setTimeout(60000);
webClient.getOptions().setJavaScriptEnabled(enableJavaScript);
webClient.getOptions().setCssEnabled(false);
webClient.getOptions().setAppletEnabled(false);
webClient.getOptions().setRedirectEnabled(true); // follow old-school HTTP 302 redirects - standard behaviour
webClient.setHTMLParserListener(null);
webClient.setIncorrectnessListener((message, origin) -> {
// Swallow for now, but maybe collect it for optional retrieval?
});
webClient.setCssErrorHandler(new SilentCssErrorHandler());
webClient.getOptions().setThrowExceptionOnScriptError(false); // ignore script errors
return webClient;
}
public static HtmlPage getInitialPage(final WebClient webClient, final String url) throws IOException {
try {
HtmlPage page = webClient.getPage(url);
logger.debug("Page title = " + page.getTitleText());
/*
* webClient.setAjaxController(new MyAjaxController());
* page.addDomChangeListener( new MyDomChangeListener());
*/
return page;
} catch (FailingHttpStatusCodeException e) {
throw new IOException(e);
}
}
@SuppressWarnings("unchecked")
public static <T extends HtmlElement> T getElementById(final HtmlPage page, String id, Class<T> type) throws HtmlUnitException {
DomElement element = page.getElementById(id);
if(element == null) {
logger.warn("Page contents (" + page.getUrl() + "): " + page.asXml());
throw new NoElementFoundException("Could not find element with id '" + id + "' on page " + page.getUrl());
}
if(!type.isAssignableFrom(element.getClass())) {
logger.warn("Page contents (" + page.getUrl() + "): " + page.asXml());
throw new WrongElementException("Expected a field with id '" + id + "' and type " + type.getName() +
", but had an element of type " + element.getClass() + " on page: " + page.getUrl());
}
return (T) element;
}
@SuppressWarnings("unchecked")
public static <T extends HtmlElement> T getElementByName(final HtmlPage page, String name, Class<T> type) throws HtmlUnitException {
final HtmlElement element;
try {
element = page.getElementByName(name);
} catch (ElementNotFoundException e) {
throw new NoElementFoundException("Could not find element with name '" + name + "' on page " + page.getUrl() + ": " + e);
}
/*will throw exception anyway:
if(element == null) {
logger.warn("Page contents (" + page.getUrl() + "): " + page.asXml());
throw new ElementNotFoundException("Could not find element with name '" + name + "' on page " + page.getUrl());
}*/
if(!type.isAssignableFrom(element.getClass())) {
logger.warn("Page contents (" + page.getUrl() + "): " + page.asXml());
throw new WrongElementException("Expected a field with name '" + name + "' and type " + type.getName() +
", but had an element of type " + element.getClass() + " on page: " + page.getUrl());
}
return (T) element;
}
@SuppressWarnings("unchecked")
public static <T extends HtmlElement> List<T> getElementsByAttribute(final HtmlPage page, String tagName, String attribute, String value, Class<T> type) throws HtmlUnitException {
List<T> list = new ArrayList<>();
DomNodeList<DomElement> elementsByTagName = page.getElementsByTagName(tagName);
for(DomElement element : elementsByTagName) {
String attValue = element.getAttribute(attribute);
if(attValue.equals(value)) {
if(!type.isAssignableFrom(element.getClass())) {
logger.warn("Page contents (" + page.getUrl() + "): " + page.asXml());
throw new WrongElementException("Expected a field with tag '" + tagName + "', attribute '" + attribute +
"', value '" + value + "' and type " + type.getName() +
", but had an element of type " + element.getClass() + " on page: " + page.getUrl());
}
list.add((T) element);
}
}
return list;
}
@SuppressWarnings("unchecked")
public static <T extends HtmlElement> List<T> getElementsByAttributeContains(final HtmlPage page, String tagName, String attribute, String value, Class<T> type) throws WrongElementException {
List<T> list = new ArrayList<>();
DomNodeList<DomElement> elementsByTagName = page.getElementsByTagName(tagName);
for(DomElement element : elementsByTagName) {
String attValue = element.getAttribute(attribute);
if(attValue.contains(value)) {
if(!type.isAssignableFrom(element.getClass())) {
logger.warn("Page contents (" + page.getUrl() + "): " + page.asXml());
throw new WrongElementException("Expected a field with tag '" + tagName + "', attribute '" + attribute +
"', which contains value '" + value + "' and type " + type.getName() +
", but had an element of type " + element.getClass() + " on page: " + page.getUrl());
}
list.add((T) element);
}
}
return list;
}
@SuppressWarnings("unchecked")
public static <T extends HtmlElement> List<T> getElementsByTextContents(final HtmlPage page, String tagName, String text, Class<T> type) throws WrongElementException {
List<T> list = new ArrayList<>();
DomNodeList<DomElement> elementsByTagName = page.getElementsByTagName(tagName);
for(DomElement element : elementsByTagName) {
if(element.getTextContent().equals(text)) {
if(!type.isAssignableFrom(element.getClass())) {
logger.warn("Page contents (" + page.getUrl() + "): " + page.asXml());
throw new WrongElementException("Expected a field with tag '" + tagName + "', " +
"which contains text '" + text + "' and type " + type.getName() +
", but had an element of type " + element.getClass() + " on page: " + page.getUrl());
}
list.add((T) element);
}
}
return list;
}
@SuppressWarnings("unchecked")
public static <T extends HtmlElement> T getFormElementByType(HtmlPage page, String formName, Class<T> type) throws HtmlUnitException {
final HtmlForm form;
try {
form = page.getFormByName(formName);
} catch (ElementNotFoundException e) {
throw new NoElementFoundException("Could not find form with name '" + formName + "' on page " + page.getUrl() + ": " + e);
}
// use a stack to recursively walk into all sub-elements, not just the first level
Stack<DomElement> elements = new Stack<>();
Iterators.addAll(elements, form.getChildElements().iterator());
T search = null;
while(!elements.isEmpty()) {
DomElement it = elements.pop();
if(type.isAssignableFrom(it.getClass())) {
if(search != null) {
logger.warn("Form contents (" + page.getUrl() + '/' + formName + "): " + form.asXml());
throw new HtmlUnitException("Did find more than one element of type " + type.getName() + " in form '" + formName + "' on page " + page.getUrl());
}
search = (T) it;
}
Iterators.addAll(elements, it.getChildElements().iterator());
}
if(search == null) {
logger.warn("Form contents (" + page.getUrl() + '/' + formName + "): " + form.asXml());
throw new NoElementFoundException("Could not find element of type " + type.getName() + " in form '" + formName + "' on page " + page.getUrl());
}
return search;
}
@SuppressWarnings("unchecked")
public static <T extends HtmlElement> T getFormElementByName(final HtmlForm form, String name, Class<T> type) throws HtmlUnitException {
// use a stack to recursively walk into all sub-elements, not just the first level
Stack<DomElement> elements = new Stack<>();
Iterators.addAll(elements, form.getChildElements().iterator());
HtmlElement element = null;
while(!elements.isEmpty()) {
DomElement it = elements.pop();
if(it.getAttribute("name").equals(name)) {
// don't allow to find it twice
if(element != null) {
logger.warn("Form contents: " + form.asXml());
throw new HtmlUnitException("Did find more than one element with name " + name + " and type " + type.getName() + " in form.");
}
element = (HtmlElement)it;
}
Iterators.addAll(elements, it.getChildElements().iterator());
}
if(element == null) {
logger.warn("Form contents (" + form.asXml());
throw new NoElementFoundException("Could not find element with name " + name + " of type " + type.getName() + " in form '" + form.getNameAttribute());
}
if(!type.isAssignableFrom(element.getClass())) {
logger.warn("Form contents: " + form.asXml());
throw new WrongElementException("Expected a field with name '" + name + "' and type " + type.getName() +
", but had an element of type " + element.getClass());
}
return (T) element;
}
@SuppressWarnings("unchecked")
public static <T extends HtmlElement> T getFormElementByNameAndValue(final HtmlForm form, String name, String value, Class<T> type) throws HtmlUnitException {
// use a stack to recursively walk into all sub-elements, not just the first level
Stack<DomElement> elements = new Stack<>();
Iterators.addAll(elements, form.getChildElements().iterator());
HtmlElement element = null;
while(!elements.isEmpty()) {
DomElement it = elements.pop();
if(it.getAttribute("name").equals(name) && it.getAttribute("value").equals(value)) {
// don't allow to find it twice
if(element != null) {
logger.warn("Form contents: " + form.asXml());
throw new HtmlUnitException("Did find more than one element with name " + name + ", value " + value + " and type " + type.getName() + " in form.");
}
element = (HtmlElement)it;
}
Iterators.addAll(elements, it.getChildElements().iterator());
}
if(element == null) {
logger.warn("Form contents (" + form.asXml());
throw new NoElementFoundException("Could not find element with name " + name + ", value " + value + " of type " + type.getName() + " in form '" + form.getNameAttribute());
}
if(!type.isAssignableFrom(element.getClass())) {
logger.warn("Form contents: " + form.asXml());
throw new WrongElementException("Expected a field with name '" + name + "', value '" + value + "' and type " + type.getName() +
", but had an element of type " + element.getClass());
}
return (T) element;
}
@SuppressWarnings("unchecked")
public static <T extends HtmlElement> T getFormElementByType(final HtmlForm form, Class<T> type) throws HtmlUnitException {
// use a stack to recursively walk into all sub-elements, not just the first level
Stack<DomElement> elements = new Stack<>();
Iterators.addAll(elements, form.getChildElements().iterator());
HtmlElement element = null;
while(!elements.isEmpty()) {
DomElement it = elements.pop();
if(type.isAssignableFrom(it.getClass())) {
// don't allow to find it twice
if(element != null) {
logger.warn("Form contents: " + form.asXml());
throw new HtmlUnitException("Did find more than one element of type " + type.getName() + " in form.");
}
element = (HtmlElement)it;
}
Iterators.addAll(elements, it.getChildElements().iterator());
}
if(element == null) {
logger.warn("Form contents (" + form.asXml());
throw new NoElementFoundException("Could not find element of type " + type.getName() + " in form '" + form.getNameAttribute());
}
return (T) element;
}
/**
* Returns the first form on the page with the given action-attribute.
*
* @param page The page to look at
* @param action The action-attribute that the form should have.
* @return The found form, not null always
*
* @throws NoElementFoundException if no form with the given action attribute is found.
*/
public static HtmlForm getFormByAction(HtmlPage page, String action) throws HtmlUnitException {
// <form action="add-perm.action">
for(HtmlForm form : page.getForms()) {
if(form.getActionAttribute().equals(action)) {
return form;
}
}
throw new NoElementFoundException("Could not find form with action '" + action + '\'');
}
/**
* Wait for up to the given time for the given text to appear
*
* @param page The page to look at
* @param str The text that is looked for
* @param waitMS The amount of milliseconds to wait until waiting is ended
*
* @throws IllegalStateException If the text does not appear withing the given time.
*/
public static void waitForText(SgmlPage page, String str, int waitMS) {
for(int i = 0;i < waitMS/100;i++) {
if(!page.asXml().contains(str)) {
// found
break;
}
try {
Thread.sleep(100);
} catch (InterruptedException e) {
throw new IllegalStateException(e);
}
}
Preconditions.checkState(page.asXml().contains(str), "Still found %s", str);
}
/**
* Call {@link WebClient#waitForBackgroundJavaScript(long)} with
* 1 second delay until it returns 0 or the given number of seconds
* has passed.
*
* @param client The WebClient to call.
* @param seconds The number of seconds that the call will take at max
*/
public static void waitForJavascript(WebClient client, int seconds) {
for(int i = 0;i < seconds;i++) {
int jobs = client.waitForBackgroundJavaScript(1000);
if(jobs == 0) {
break;
}
}
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.fs.http.server;
import junit.framework.Assert;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
import org.apache.hadoop.fs.DelegationTokenRenewer;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.http.client.HttpFSFileSystem;
import org.apache.hadoop.fs.http.client.HttpFSKerberosAuthenticator;
import org.apache.hadoop.hdfs.web.WebHdfsFileSystem;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.authentication.client.AuthenticatedURL;
import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.test.HFSTestCase;
import org.apache.hadoop.test.KerberosTestUtils;
import org.apache.hadoop.test.TestDir;
import org.apache.hadoop.test.TestDirHelper;
import org.apache.hadoop.test.TestHdfs;
import org.apache.hadoop.test.TestHdfsHelper;
import org.apache.hadoop.test.TestJetty;
import org.apache.hadoop.test.TestJettyHelper;
import org.json.simple.JSONObject;
import org.json.simple.parser.JSONParser;
import org.junit.After;
import org.junit.Test;
import org.mortbay.jetty.Server;
import org.mortbay.jetty.webapp.WebAppContext;
import java.io.File;
import java.io.FileOutputStream;
import java.io.FileWriter;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.io.Writer;
import java.net.HttpURLConnection;
import java.net.URI;
import java.net.URL;
import java.security.PrivilegedExceptionAction;
import java.util.concurrent.Callable;
public class TestHttpFSWithKerberos extends HFSTestCase {
@After
public void resetUGI() {
Configuration conf = new Configuration();
UserGroupInformation.setConfiguration(conf);
}
private void createHttpFSServer() throws Exception {
File homeDir = TestDirHelper.getTestDir();
Assert.assertTrue(new File(homeDir, "conf").mkdir());
Assert.assertTrue(new File(homeDir, "log").mkdir());
Assert.assertTrue(new File(homeDir, "temp").mkdir());
HttpFSServerWebApp.setHomeDirForCurrentThread(homeDir.getAbsolutePath());
File secretFile = new File(new File(homeDir, "conf"), "secret");
Writer w = new FileWriter(secretFile);
w.write("secret");
w.close();
//HDFS configuration
File hadoopConfDir = new File(new File(homeDir, "conf"), "hadoop-conf");
hadoopConfDir.mkdirs();
String fsDefaultName = TestHdfsHelper.getHdfsConf()
.get(CommonConfigurationKeysPublic.FS_DEFAULT_NAME_KEY);
Configuration conf = new Configuration(false);
conf.set(CommonConfigurationKeysPublic.FS_DEFAULT_NAME_KEY, fsDefaultName);
File hdfsSite = new File(hadoopConfDir, "hdfs-site.xml");
OutputStream os = new FileOutputStream(hdfsSite);
conf.writeXml(os);
os.close();
conf = new Configuration(false);
conf.set("httpfs.proxyuser.client.hosts", "*");
conf.set("httpfs.proxyuser.client.groups", "*");
conf.set("httpfs.authentication.type", "kerberos");
conf.set("httpfs.authentication.signature.secret.file",
secretFile.getAbsolutePath());
File httpfsSite = new File(new File(homeDir, "conf"), "httpfs-site.xml");
os = new FileOutputStream(httpfsSite);
conf.writeXml(os);
os.close();
ClassLoader cl = Thread.currentThread().getContextClassLoader();
URL url = cl.getResource("webapp");
WebAppContext context = new WebAppContext(url.getPath(), "/webhdfs");
Server server = TestJettyHelper.getJettyServer();
server.addHandler(context);
server.start();
HttpFSServerWebApp.get().setAuthority(TestJettyHelper.getAuthority());
}
@Test
@TestDir
@TestJetty
@TestHdfs
public void testValidHttpFSAccess() throws Exception {
createHttpFSServer();
KerberosTestUtils.doAsClient(new Callable<Void>() {
@Override
public Void call() throws Exception {
URL url = new URL(TestJettyHelper.getJettyURL(),
"/webhdfs/v1/?op=GETHOMEDIRECTORY");
AuthenticatedURL aUrl = new AuthenticatedURL();
AuthenticatedURL.Token aToken = new AuthenticatedURL.Token();
HttpURLConnection conn = aUrl.openConnection(url, aToken);
Assert.assertEquals(conn.getResponseCode(), HttpURLConnection.HTTP_OK);
return null;
}
});
}
@Test
@TestDir
@TestJetty
@TestHdfs
public void testInvalidadHttpFSAccess() throws Exception {
createHttpFSServer();
URL url = new URL(TestJettyHelper.getJettyURL(),
"/webhdfs/v1/?op=GETHOMEDIRECTORY");
HttpURLConnection conn = (HttpURLConnection) url.openConnection();
Assert.assertEquals(conn.getResponseCode(),
HttpURLConnection.HTTP_UNAUTHORIZED);
}
@Test
@TestDir
@TestJetty
@TestHdfs
public void testDelegationTokenHttpFSAccess() throws Exception {
createHttpFSServer();
KerberosTestUtils.doAsClient(new Callable<Void>() {
@Override
public Void call() throws Exception {
//get delegation token doing SPNEGO authentication
URL url = new URL(TestJettyHelper.getJettyURL(),
"/webhdfs/v1/?op=GETDELEGATIONTOKEN");
AuthenticatedURL aUrl = new AuthenticatedURL();
AuthenticatedURL.Token aToken = new AuthenticatedURL.Token();
HttpURLConnection conn = aUrl.openConnection(url, aToken);
Assert.assertEquals(conn.getResponseCode(), HttpURLConnection.HTTP_OK);
JSONObject json = (JSONObject) new JSONParser()
.parse(new InputStreamReader(conn.getInputStream()));
json =
(JSONObject) json
.get(HttpFSKerberosAuthenticator.DELEGATION_TOKEN_JSON);
String tokenStr = (String) json
.get(HttpFSKerberosAuthenticator.DELEGATION_TOKEN_URL_STRING_JSON);
//access httpfs using the delegation token
url = new URL(TestJettyHelper.getJettyURL(),
"/webhdfs/v1/?op=GETHOMEDIRECTORY&delegation=" +
tokenStr);
conn = (HttpURLConnection) url.openConnection();
Assert.assertEquals(conn.getResponseCode(), HttpURLConnection.HTTP_OK);
//try to renew the delegation token without SPNEGO credentials
url = new URL(TestJettyHelper.getJettyURL(),
"/webhdfs/v1/?op=RENEWDELEGATIONTOKEN&token=" + tokenStr);
conn = (HttpURLConnection) url.openConnection();
conn.setRequestMethod("PUT");
Assert.assertEquals(conn.getResponseCode(),
HttpURLConnection.HTTP_UNAUTHORIZED);
//renew the delegation token with SPNEGO credentials
url = new URL(TestJettyHelper.getJettyURL(),
"/webhdfs/v1/?op=RENEWDELEGATIONTOKEN&token=" + tokenStr);
conn = aUrl.openConnection(url, aToken);
conn.setRequestMethod("PUT");
Assert.assertEquals(conn.getResponseCode(), HttpURLConnection.HTTP_OK);
//cancel delegation token, no need for SPNEGO credentials
url = new URL(TestJettyHelper.getJettyURL(),
"/webhdfs/v1/?op=CANCELDELEGATIONTOKEN&token=" +
tokenStr);
conn = (HttpURLConnection) url.openConnection();
conn.setRequestMethod("PUT");
Assert.assertEquals(conn.getResponseCode(), HttpURLConnection.HTTP_OK);
//try to access httpfs with the canceled delegation token
url = new URL(TestJettyHelper.getJettyURL(),
"/webhdfs/v1/?op=GETHOMEDIRECTORY&delegation=" +
tokenStr);
conn = (HttpURLConnection) url.openConnection();
Assert.assertEquals(conn.getResponseCode(),
HttpURLConnection.HTTP_UNAUTHORIZED);
return null;
}
});
}
@SuppressWarnings("deprecation")
private void testDelegationTokenWithFS(Class fileSystemClass)
throws Exception {
createHttpFSServer();
Configuration conf = new Configuration();
conf.set("fs.webhdfs.impl", fileSystemClass.getName());
conf.set("fs.hdfs.impl.disable.cache", "true");
URI uri = new URI( "webhdfs://" +
TestJettyHelper.getJettyURL().toURI().getAuthority());
FileSystem fs = FileSystem.get(uri, conf);
Token<?> tokens[] = fs.addDelegationTokens("foo", null);
fs.close();
Assert.assertEquals(1, tokens.length);
fs = FileSystem.get(uri, conf);
((DelegationTokenRenewer.Renewable) fs).setDelegationToken(tokens[0]);
fs.listStatus(new Path("/"));
fs.close();
}
private void testDelegationTokenWithinDoAs(
final Class fileSystemClass, boolean proxyUser) throws Exception {
Configuration conf = new Configuration();
conf.set("hadoop.security.authentication", "kerberos");
UserGroupInformation.setConfiguration(conf);
UserGroupInformation.loginUserFromKeytab("client",
"/Users/tucu/tucu.keytab");
UserGroupInformation ugi = UserGroupInformation.getLoginUser();
if (proxyUser) {
ugi = UserGroupInformation.createProxyUser("foo", ugi);
}
conf = new Configuration();
UserGroupInformation.setConfiguration(conf);
ugi.doAs(
new PrivilegedExceptionAction<Void>() {
@Override
public Void run() throws Exception {
testDelegationTokenWithFS(fileSystemClass);
return null;
}
});
}
@Test
@TestDir
@TestJetty
@TestHdfs
public void testDelegationTokenWithHttpFSFileSystem() throws Exception {
testDelegationTokenWithinDoAs(HttpFSFileSystem.class, false);
}
@Test
@TestDir
@TestJetty
@TestHdfs
public void testDelegationTokenWithWebhdfsFileSystem() throws Exception {
testDelegationTokenWithinDoAs(WebHdfsFileSystem.class, false);
}
@Test
@TestDir
@TestJetty
@TestHdfs
public void testDelegationTokenWithHttpFSFileSystemProxyUser()
throws Exception {
testDelegationTokenWithinDoAs(HttpFSFileSystem.class, true);
}
// TODO: WebHdfsFilesystem does work with ProxyUser HDFS-3509
// @Test
// @TestDir
// @TestJetty
// @TestHdfs
// public void testDelegationTokenWithWebhdfsFileSystemProxyUser()
// throws Exception {
// testDelegationTokenWithinDoAs(WebHdfsFileSystem.class, true);
// }
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* @author Michael Danilov, Dmitry A. Durnev
* @version $Revision$
*/
package java.awt;
import java.awt.event.ComponentEvent;
import java.awt.event.FocusEvent;
import java.awt.event.InputEvent;
import java.awt.event.KeyEvent;
import java.awt.event.MouseEvent;
import java.awt.event.PaintEvent;
import java.awt.event.WindowEvent;
import org.apache.harmony.awt.internal.nls.Messages;
import org.apache.harmony.awt.wtk.NativeEvent;
import org.apache.harmony.awt.wtk.NativeWindow;
/**
* Helper package-private class for managing lightweight components &
* dispatching events from heavyweight source
*/
class Dispatcher {
final PopupDispatcher popupDispatcher = new PopupDispatcher();
final FocusDispatcher focusDispatcher;
final MouseGrabManager mouseGrabManager = new MouseGrabManager();
final MouseDispatcher mouseDispatcher;
private final ComponentDispatcher componentDispatcher = new ComponentDispatcher();
private final KeyDispatcher keyDispatcher = new KeyDispatcher();
private final Toolkit toolkit;
int clickInterval = 250;
/**
* @param toolkit - AWT toolkit
*/
Dispatcher(Toolkit toolkit) {
this.toolkit = toolkit;
focusDispatcher = new FocusDispatcher(toolkit);
mouseDispatcher = new MouseDispatcher(mouseGrabManager, toolkit);
}
/**
* Dispatch native event: produce appropriate AWT events,
* update component's fields when needed
* @param event - native event to dispatch
* @return - true means default processing by OS is not needed
*/
public boolean onEvent(NativeEvent event) {
int eventId = event.getEventId();
if (eventId == NativeEvent.ID_CREATED) {
return toolkit.onWindowCreated(event.getWindowId());
} else if (eventId == NativeEvent.ID_MOUSE_GRAB_CANCELED) {
return mouseGrabManager.onGrabCanceled();
} else if (popupDispatcher.onEvent(event)) {
return false;
} else {
Component src = toolkit.getComponentById(event.getWindowId());
if (src != null) {
if (((eventId >= ComponentEvent.COMPONENT_FIRST) && (eventId <= ComponentEvent.COMPONENT_LAST))
|| ((eventId >= WindowEvent.WINDOW_FIRST) && (eventId <= WindowEvent.WINDOW_LAST))
|| (eventId == NativeEvent.ID_INSETS_CHANGED)
|| (eventId == NativeEvent.ID_BOUNDS_CHANGED)
|| (eventId == NativeEvent.ID_THEME_CHANGED)) {
return componentDispatcher.dispatch(src, event);
} else if ((eventId >= MouseEvent.MOUSE_FIRST)
&& (eventId <= MouseEvent.MOUSE_LAST)) {
return mouseDispatcher.dispatch(src, event);
} else if (eventId == PaintEvent.PAINT) {
src.redrawManager.addPaintRegion(src, event.getClipRects());
return true;
}
}
if ((eventId >= FocusEvent.FOCUS_FIRST)
&& (eventId <= FocusEvent.FOCUS_LAST)) {
return focusDispatcher.dispatch(src, event);
} else if ((eventId >= KeyEvent.KEY_FIRST)
&& (eventId <= KeyEvent.KEY_LAST)) {
return keyDispatcher.dispatch(src, event);
}
}
return false;
}
/**
* The dispatcher of native events that affect
* component's state or bounds
*/
final class ComponentDispatcher {
/**
* Handle native event that affects component's state or bounds
* @param src - the component updated by the event
* @param event - the native event
* @return - as in Dispatcher.onEvent()
* @see Dispatcher#onEvent(NativeEvent)
*/
boolean dispatch(Component src, NativeEvent event) {
int id = event.getEventId();
if ((id == NativeEvent.ID_INSETS_CHANGED)
|| (id == NativeEvent.ID_THEME_CHANGED)) {
return dispatchInsets(event, src);
} else if ((id >= WindowEvent.WINDOW_FIRST)
&& (id <= WindowEvent.WINDOW_LAST)) {
return dispatchWindow(event, src);
} else {
return dispatchPureComponent(event, src);
}
}
/**
* Handle the change of top-level window's native decorations
* @param event - the native event
* @param src - the component updated by the event
* @return - as in Dispatcher.onEvent()
* @see Dispatcher#onEvent(NativeEvent)
*/
boolean dispatchInsets(NativeEvent event, Component src) {
if (src instanceof Window) {
((Window) src).setNativeInsets(event.getInsets());
}
return false;
}
/**
* Handle the change of top-level window's state
* @param event - the native event
* @param src - the component updated by the event
* @return - as in Dispatcher.onEvent()
* @see Dispatcher#onEvent(NativeEvent)
*/
boolean dispatchWindow(NativeEvent event, Component src) {
Window window = (Window) src;
int id = event.getEventId();
if (id == WindowEvent.WINDOW_CLOSING) {
toolkit.getSystemEventQueueImpl().postEvent(
new WindowEvent(window, WindowEvent.WINDOW_CLOSING));
return true;
} else if (id == WindowEvent.WINDOW_STATE_CHANGED) {
if (window instanceof Frame) {
((Frame) window)
.updateExtendedState(event.getWindowState());
}
}
return false;
}
/**
* Handle the change of component's size and/or position
* @param event - the native event
* @param src - the component updated by the event
* @return - as in Dispatcher.onEvent()
* @see Dispatcher#onEvent(NativeEvent)
*/
private boolean dispatchPureComponent(NativeEvent event, Component src) {
Rectangle rect = event.getWindowRect();
Point loc = rect.getLocation();
int mask;
switch (event.getEventId()) {
case NativeEvent.ID_BOUNDS_CHANGED:
mask = 0;
break;
case ComponentEvent.COMPONENT_MOVED:
mask = NativeWindow.BOUNDS_NOSIZE;
break;
case ComponentEvent.COMPONENT_RESIZED:
mask = NativeWindow.BOUNDS_NOMOVE;
break;
default:
// awt.12E=Unknown component event id.
throw new RuntimeException(Messages.getString("awt.12E")); //$NON-NLS-1$
}
if (!(src instanceof Window)) {
Component compTo = src.getParent();
Component compFrom = src.getHWAncestor();
if ((compTo != null) && (compFrom != null)) {
loc = MouseDispatcher.convertPoint(compFrom, loc, compTo);
}
} else {
int windowState = event.getWindowState();
if ((windowState >= 0) && (src instanceof Frame)) {
((Frame) src).updateExtendedState(windowState);
}
}
src.setBounds(loc.x, loc.y, rect.width, rect.height, mask, false);
return false;
}
}
/**
* The dispatcher of the keyboard events
*/
final class KeyDispatcher {
/**
* Handle the keyboard event using the KeyboardFocusManager
* @param src - the component receiving the event
* @param event - the native event
* @return - as in Dispatcher.onEvent()
* @see Dispatcher#onEvent(NativeEvent)
*/
boolean dispatch(Component src, NativeEvent event) {
int id = event.getEventId();
int modifiers = event.getInputModifiers();
int location = event.getKeyLocation();
int code = event.getVKey();
StringBuffer chars = event.getKeyChars();
int charsLength = chars.length();
long time = event.getTime();
char keyChar = event.getLastChar();
if (src == null) {
//retarget focus proxy key events to focusOwner:
Window focusProxyOwner = toolkit.getFocusProxyOwnerById(event
.getWindowId());
if (focusProxyOwner == null) {
return false;
}
src = KeyboardFocusManager.actualFocusOwner;
}
EventQueue eventQueue = toolkit.getSystemEventQueueImpl();
if (src != null) {
eventQueue.postEvent(new KeyEvent(src, id, time, modifiers,
code, keyChar, location));
// KEY_TYPED goes after KEY_PRESSED
if (id == KeyEvent.KEY_PRESSED) {
for (int i = 0; i < charsLength; i++) {
keyChar = chars.charAt(i);
if (keyChar != KeyEvent.CHAR_UNDEFINED) {
eventQueue.postEvent(new KeyEvent(src,
KeyEvent.KEY_TYPED, time, modifiers,
KeyEvent.VK_UNDEFINED, keyChar,
KeyEvent.KEY_LOCATION_UNKNOWN));
}
}
}
}
return false;
}
}
/**
* Retargets the mouse events to the grab owner when mouse is grabbed,
* grab and ungrab mouse when mouse buttons are pressed and released
*/
static final class MouseGrabManager {
/**
* The top-level window holding the mouse grab
* that was explicitly started by startGrab() method
*/
private Window nativeGrabOwner = null;
/**
* The component that owns the synthetic
* mouse grab while at least one of the
* mouse buttons is pressed
*/
private Component syntheticGrabOwner = null;
/**
* Previous value of syntheticGrabOwner
*/
private Component lastSyntheticGrabOwner = null;
/**
* Number of mouse buttons currently pressed
*/
private int syntheticGrabDepth = 0;
/**
* The callback to be called when the explicit mouse grab ends
*/
private Runnable whenCanceled;
/**
* Explicitly start the mouse grab
* @param grabWindow - the window that will own the grab
* @param whenCanceled - the callback to call when the grab ends.
* This parameter can be null
*/
void startGrab(Window grabWindow, Runnable whenCanceled) {
if (nativeGrabOwner != null) {
// awt.12F=Attempt to start nested mouse grab
throw new RuntimeException(Messages.getString("awt.12F")); //$NON-NLS-1$
}
NativeWindow win = grabWindow.getNativeWindow();
if (win == null) {
// awt.130=Attempt to grab mouse in not displayable window
throw new RuntimeException(Messages.getString("awt.130")); //$NON-NLS-1$
}
nativeGrabOwner = grabWindow;
this.whenCanceled = whenCanceled;
win.grabMouse();
}
/**
* Ends the explicit mouse grab. If the non-null callback was provided
* in the startGrab() method, this callback is called
*/
void endGrab() {
if (nativeGrabOwner == null) {
return;
}
Window grabWindow = nativeGrabOwner;
nativeGrabOwner = null;
NativeWindow win = grabWindow.getNativeWindow();
if (win != null) {
win.ungrabMouse();
if (whenCanceled != null) {
whenCanceled.run();
whenCanceled = null;
}
}
}
/**
* Ends both explicit and synthetic grans
* @return - always returns false
*/
boolean onGrabCanceled() {
endGrab();
resetSyntheticGrab();
return false;
}
/**
* Starts the synthetic mouse grab, increases the counter
* of currently pressed mouse buttons
* @param source - the component where mouse press event occured
* @return - the component that owns the synthetic grab
*/
Component onMousePressed(Component source) {
if (syntheticGrabDepth == 0) {
syntheticGrabOwner = source;
lastSyntheticGrabOwner = source;
}
syntheticGrabDepth++;
return syntheticGrabOwner;
}
/**
* Decreases the counter of currently pressed mouse buttons,
* ends the synthetic mouse grab, when this counter becomes zero
* @param source - the component where mouse press event occured
* @return - the component that owns the synthetic grab,
* or source parameter if mouse grab was released
*/
Component onMouseReleased(Component source) {
Component ret = source;
if (syntheticGrabOwner != null && nativeGrabOwner == null) {
ret = syntheticGrabOwner;
}
syntheticGrabDepth--;
if (syntheticGrabDepth <= 0) {
resetSyntheticGrab();
lastSyntheticGrabOwner = null;
}
return ret;
}
/**
* Update the state of synthetic ouse gram
* when the mouse is moved/dragged
* @param event - the native event
*/
void preprocessEvent(NativeEvent event) {
int id = event.getEventId();
switch (id) {
case MouseEvent.MOUSE_MOVED:
if (syntheticGrabOwner != null) {
syntheticGrabOwner = null;
syntheticGrabDepth = 0;
}
if (lastSyntheticGrabOwner != null) {
lastSyntheticGrabOwner = null;
}
case MouseEvent.MOUSE_DRAGGED:
if (syntheticGrabOwner == null
&& lastSyntheticGrabOwner != null) {
syntheticGrabOwner = lastSyntheticGrabOwner;
syntheticGrabDepth = 0;
int mask = event.getInputModifiers();
syntheticGrabDepth += (mask & InputEvent.BUTTON1_DOWN_MASK) != 0 ? 1
: 0;
syntheticGrabDepth += (mask & InputEvent.BUTTON2_DOWN_MASK) != 0 ? 1
: 0;
syntheticGrabDepth += (mask & InputEvent.BUTTON3_DOWN_MASK) != 0 ? 1
: 0;
}
}
}
/**
* @return the component that currently owns the synthetic grab
*/
Component getSyntheticGrabOwner() {
return syntheticGrabOwner;
}
/**
* ends synthetic grab
*/
private void resetSyntheticGrab() {
syntheticGrabOwner = null;
syntheticGrabDepth = 0;
}
}
/**
* Dispatches native events related to the pop-up boxes
* (the non-component windows such as menus and drop lists)
*/
final class PopupDispatcher {
private PopupBox activePopup;
private PopupBox underCursor;
private final MouseGrab grab = new MouseGrab();
/**
* Handles the mouse grab for pop-up boxes
*/
private final class MouseGrab {
private int depth;
private PopupBox owner;
private final Point start = new Point();
/**
* Starts the grab when mouse is pressed
* @param src - the pop-up box where mouse event has occured
* @param where - the mouse pointer location
* @return - the grab owner
*/
PopupBox mousePressed(PopupBox src, Point where) {
if (depth == 0) {
owner = src;
start.setLocation(where);
}
depth++;
return owner;
}
/**
* Ends the grab when all mousebuttons are released
* @param src - the pop-up box where mouse event has occured
* @param where - the mouse pointer location
* @return - the grab owner, or src parameter if the grab has ended
*/
PopupBox mouseReleased(PopupBox src, Point where) {
PopupBox ret = (owner != null) ? owner : src;
if (depth == 0) {
return ret;
}
depth--;
if (depth == 0) {
PopupBox tgt = owner;
owner = null;
if (tgt != null && src == null) {
Point a = new Point(start);
Point b = new Point(where);
Point pos = tgt.getScreenLocation();
a.translate(-pos.x, -pos.y);
b.translate(-pos.x, -pos.y);
if (tgt.closeOnUngrab(a, b)) {
return null;
}
}
}
return ret;
}
/**
* Set the grab owner to null
*/
void reset() {
depth = 0;
owner = null;
start.setLocation(0, 0);
}
/**
* @return - the pop-up box currently owning the grab
*/
public PopupBox getOwner() {
return owner;
}
}
/**
* Call the mouse event handler of the pop-up box
* @param src - the pop-up box where the mouse event occured
* @param eventId - the event ID, one of MouseEvent.MOUSE_* constants
* @param where - the mouse pointer location
* @param event - native event
*/
private void mouseEvent(PopupBox src, int eventId, Point where,
NativeEvent event) {
Point pos = src.getScreenLocation();
pos.setLocation(where.x - pos.x, where.y - pos.y);
src.onMouseEvent(eventId, pos, event.getMouseButton(), event
.getTime(), event.getInputModifiers(), event
.getWheelRotation());
}
/**
* Handle the native event targeted by a pop-up box. This could be
* paint event, mouse or keyboard event.
* @param event - the native event
* @return - false if the event was handled and doesn't
* need the further processing; true when the further
* processing is needed
*/
boolean onEvent(NativeEvent event) {
PopupBox src = toolkit.getPopupBoxById(event.getWindowId());
int id = event.getEventId();
if ((id == PaintEvent.PAINT)) {
if (src != null) {
src.paint(event.getClipRects());
return true;
}
Component c = toolkit.getComponentById(event.getWindowId());
if ((c != null) && (c instanceof Frame)) {
((Frame) c).paintMenuBar(event.getClipRects());
}
return false;
}
if ((id >= MouseEvent.MOUSE_FIRST) && (id <= MouseEvent.MOUSE_LAST)) {
Point where = event.getScreenPos();
if (src != underCursor) {
if (underCursor != null) {
mouseEvent(underCursor, MouseEvent.MOUSE_EXITED, where,
event);
}
underCursor = src;
if (underCursor != null) {
mouseEvent(underCursor, MouseEvent.MOUSE_ENTERED,
where, event);
underCursor.setDefaultCursor();
}
}
if (id == MouseEvent.MOUSE_EXITED) {
underCursor = null;
}
if ((activePopup == null) && (src == null || !src.isMenuBar())) {
return false;
}
if (id == MouseEvent.MOUSE_PRESSED) {
src = grab.mousePressed(src, where);
} else if (id == MouseEvent.MOUSE_RELEASED) {
src = grab.mouseReleased(src, where);
} else if (src == null) {
src = grab.getOwner();
}
PopupBox wasActive = activePopup;
if (src != null) {
mouseEvent(src, id, where, event);
return src.isMenu() || src.contains(where);
}
if (wasActive != null && activePopup == null) {
return wasActive.isMenu();
}
if ((id == MouseEvent.MOUSE_PRESSED)
|| (id == MouseEvent.MOUSE_RELEASED)) {
boolean isMenu = activePopup.isMenu();
deactivateAll();
return !isMenu;
}
return true;
}
if (activePopup == null) {
return false;
}
if ((id >= KeyEvent.KEY_FIRST) && (id <= KeyEvent.KEY_LAST)) {
boolean isMenu = activePopup.isMenu();
activePopup.dispatchKeyEvent(id, event.getVKey(), event
.getTime(), event.getInputModifiers());
return isMenu;
}
return false;
}
/**
* Remember the pop-up as active and grab the mouse on it
* @param popup - the pop-up box to activate
*/
void activate(final PopupBox popup) {
if (activePopup == null) {
activePopup = popup;
mouseGrabManager.startGrab(popup.getOwner(), new Runnable() {
public void run() {
deactivate(popup);
}
});
}
}
/**
* Deactivate the currently active pop-up box
*/
void deactivateAll() {
deactivate(activePopup);
}
/**
* Deactivate the pop-up box, end the mouse grab
*/
void deactivate(PopupBox popup) {
grab.reset();
if (activePopup != null && activePopup == popup) {
activePopup = null;
mouseGrabManager.endGrab();
popup.hide();
underCursor = null;
}
}
/**
* Check that the pop-up box is currently active
* @param popup - the pop-up box to check
* @return - true if active
*/
boolean isActive(PopupBox popup) {
return (popup == activePopup) && (popup != null);
}
}
}
| |
/**
* Copyright (c) 2000-present Liferay, Inc. All rights reserved.
*
* This library is free software; you can redistribute it and/or modify it under
* the terms of the GNU Lesser General Public License as published by the Free
* Software Foundation; either version 2.1 of the License, or (at your option)
* any later version.
*
* This library is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
* FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
* details.
*/
package com.liferay.opensocial.model;
import com.liferay.opensocial.service.ClpSerializer;
import com.liferay.opensocial.service.OAuthTokenLocalServiceUtil;
import com.liferay.portal.kernel.bean.AutoEscapeBeanHandler;
import com.liferay.portal.kernel.exception.SystemException;
import com.liferay.portal.kernel.util.ProxyUtil;
import com.liferay.portal.kernel.util.StringBundler;
import com.liferay.portal.model.BaseModel;
import com.liferay.portal.model.impl.BaseModelImpl;
import com.liferay.portal.util.PortalUtil;
import java.io.Serializable;
import java.lang.reflect.Method;
import java.util.Date;
import java.util.HashMap;
import java.util.Map;
/**
* @author Brian Wing Shun Chan
*/
public class OAuthTokenClp extends BaseModelImpl<OAuthToken>
implements OAuthToken {
public OAuthTokenClp() {
}
@Override
public Class<?> getModelClass() {
return OAuthToken.class;
}
@Override
public String getModelClassName() {
return OAuthToken.class.getName();
}
@Override
public long getPrimaryKey() {
return _oAuthTokenId;
}
@Override
public void setPrimaryKey(long primaryKey) {
setOAuthTokenId(primaryKey);
}
@Override
public Serializable getPrimaryKeyObj() {
return _oAuthTokenId;
}
@Override
public void setPrimaryKeyObj(Serializable primaryKeyObj) {
setPrimaryKey(((Long)primaryKeyObj).longValue());
}
@Override
public Map<String, Object> getModelAttributes() {
Map<String, Object> attributes = new HashMap<String, Object>();
attributes.put("oAuthTokenId", getOAuthTokenId());
attributes.put("companyId", getCompanyId());
attributes.put("userId", getUserId());
attributes.put("userName", getUserName());
attributes.put("createDate", getCreateDate());
attributes.put("modifiedDate", getModifiedDate());
attributes.put("gadgetKey", getGadgetKey());
attributes.put("serviceName", getServiceName());
attributes.put("moduleId", getModuleId());
attributes.put("accessToken", getAccessToken());
attributes.put("tokenName", getTokenName());
attributes.put("tokenSecret", getTokenSecret());
attributes.put("sessionHandle", getSessionHandle());
attributes.put("expiration", getExpiration());
return attributes;
}
@Override
public void setModelAttributes(Map<String, Object> attributes) {
Long oAuthTokenId = (Long)attributes.get("oAuthTokenId");
if (oAuthTokenId != null) {
setOAuthTokenId(oAuthTokenId);
}
Long companyId = (Long)attributes.get("companyId");
if (companyId != null) {
setCompanyId(companyId);
}
Long userId = (Long)attributes.get("userId");
if (userId != null) {
setUserId(userId);
}
String userName = (String)attributes.get("userName");
if (userName != null) {
setUserName(userName);
}
Date createDate = (Date)attributes.get("createDate");
if (createDate != null) {
setCreateDate(createDate);
}
Date modifiedDate = (Date)attributes.get("modifiedDate");
if (modifiedDate != null) {
setModifiedDate(modifiedDate);
}
String gadgetKey = (String)attributes.get("gadgetKey");
if (gadgetKey != null) {
setGadgetKey(gadgetKey);
}
String serviceName = (String)attributes.get("serviceName");
if (serviceName != null) {
setServiceName(serviceName);
}
Long moduleId = (Long)attributes.get("moduleId");
if (moduleId != null) {
setModuleId(moduleId);
}
String accessToken = (String)attributes.get("accessToken");
if (accessToken != null) {
setAccessToken(accessToken);
}
String tokenName = (String)attributes.get("tokenName");
if (tokenName != null) {
setTokenName(tokenName);
}
String tokenSecret = (String)attributes.get("tokenSecret");
if (tokenSecret != null) {
setTokenSecret(tokenSecret);
}
String sessionHandle = (String)attributes.get("sessionHandle");
if (sessionHandle != null) {
setSessionHandle(sessionHandle);
}
Long expiration = (Long)attributes.get("expiration");
if (expiration != null) {
setExpiration(expiration);
}
}
@Override
public long getOAuthTokenId() {
return _oAuthTokenId;
}
@Override
public void setOAuthTokenId(long oAuthTokenId) {
_oAuthTokenId = oAuthTokenId;
if (_oAuthTokenRemoteModel != null) {
try {
Class<?> clazz = _oAuthTokenRemoteModel.getClass();
Method method = clazz.getMethod("setOAuthTokenId", long.class);
method.invoke(_oAuthTokenRemoteModel, oAuthTokenId);
}
catch (Exception e) {
throw new UnsupportedOperationException(e);
}
}
}
@Override
public long getCompanyId() {
return _companyId;
}
@Override
public void setCompanyId(long companyId) {
_companyId = companyId;
if (_oAuthTokenRemoteModel != null) {
try {
Class<?> clazz = _oAuthTokenRemoteModel.getClass();
Method method = clazz.getMethod("setCompanyId", long.class);
method.invoke(_oAuthTokenRemoteModel, companyId);
}
catch (Exception e) {
throw new UnsupportedOperationException(e);
}
}
}
@Override
public long getUserId() {
return _userId;
}
@Override
public void setUserId(long userId) {
_userId = userId;
if (_oAuthTokenRemoteModel != null) {
try {
Class<?> clazz = _oAuthTokenRemoteModel.getClass();
Method method = clazz.getMethod("setUserId", long.class);
method.invoke(_oAuthTokenRemoteModel, userId);
}
catch (Exception e) {
throw new UnsupportedOperationException(e);
}
}
}
@Override
public String getUserUuid() throws SystemException {
return PortalUtil.getUserValue(getUserId(), "uuid", _userUuid);
}
@Override
public void setUserUuid(String userUuid) {
_userUuid = userUuid;
}
@Override
public String getUserName() {
return _userName;
}
@Override
public void setUserName(String userName) {
_userName = userName;
if (_oAuthTokenRemoteModel != null) {
try {
Class<?> clazz = _oAuthTokenRemoteModel.getClass();
Method method = clazz.getMethod("setUserName", String.class);
method.invoke(_oAuthTokenRemoteModel, userName);
}
catch (Exception e) {
throw new UnsupportedOperationException(e);
}
}
}
@Override
public Date getCreateDate() {
return _createDate;
}
@Override
public void setCreateDate(Date createDate) {
_createDate = createDate;
if (_oAuthTokenRemoteModel != null) {
try {
Class<?> clazz = _oAuthTokenRemoteModel.getClass();
Method method = clazz.getMethod("setCreateDate", Date.class);
method.invoke(_oAuthTokenRemoteModel, createDate);
}
catch (Exception e) {
throw new UnsupportedOperationException(e);
}
}
}
@Override
public Date getModifiedDate() {
return _modifiedDate;
}
@Override
public void setModifiedDate(Date modifiedDate) {
_modifiedDate = modifiedDate;
if (_oAuthTokenRemoteModel != null) {
try {
Class<?> clazz = _oAuthTokenRemoteModel.getClass();
Method method = clazz.getMethod("setModifiedDate", Date.class);
method.invoke(_oAuthTokenRemoteModel, modifiedDate);
}
catch (Exception e) {
throw new UnsupportedOperationException(e);
}
}
}
@Override
public String getGadgetKey() {
return _gadgetKey;
}
@Override
public void setGadgetKey(String gadgetKey) {
_gadgetKey = gadgetKey;
if (_oAuthTokenRemoteModel != null) {
try {
Class<?> clazz = _oAuthTokenRemoteModel.getClass();
Method method = clazz.getMethod("setGadgetKey", String.class);
method.invoke(_oAuthTokenRemoteModel, gadgetKey);
}
catch (Exception e) {
throw new UnsupportedOperationException(e);
}
}
}
@Override
public String getServiceName() {
return _serviceName;
}
@Override
public void setServiceName(String serviceName) {
_serviceName = serviceName;
if (_oAuthTokenRemoteModel != null) {
try {
Class<?> clazz = _oAuthTokenRemoteModel.getClass();
Method method = clazz.getMethod("setServiceName", String.class);
method.invoke(_oAuthTokenRemoteModel, serviceName);
}
catch (Exception e) {
throw new UnsupportedOperationException(e);
}
}
}
@Override
public long getModuleId() {
return _moduleId;
}
@Override
public void setModuleId(long moduleId) {
_moduleId = moduleId;
if (_oAuthTokenRemoteModel != null) {
try {
Class<?> clazz = _oAuthTokenRemoteModel.getClass();
Method method = clazz.getMethod("setModuleId", long.class);
method.invoke(_oAuthTokenRemoteModel, moduleId);
}
catch (Exception e) {
throw new UnsupportedOperationException(e);
}
}
}
@Override
public String getAccessToken() {
return _accessToken;
}
@Override
public void setAccessToken(String accessToken) {
_accessToken = accessToken;
if (_oAuthTokenRemoteModel != null) {
try {
Class<?> clazz = _oAuthTokenRemoteModel.getClass();
Method method = clazz.getMethod("setAccessToken", String.class);
method.invoke(_oAuthTokenRemoteModel, accessToken);
}
catch (Exception e) {
throw new UnsupportedOperationException(e);
}
}
}
@Override
public String getTokenName() {
return _tokenName;
}
@Override
public void setTokenName(String tokenName) {
_tokenName = tokenName;
if (_oAuthTokenRemoteModel != null) {
try {
Class<?> clazz = _oAuthTokenRemoteModel.getClass();
Method method = clazz.getMethod("setTokenName", String.class);
method.invoke(_oAuthTokenRemoteModel, tokenName);
}
catch (Exception e) {
throw new UnsupportedOperationException(e);
}
}
}
@Override
public String getTokenSecret() {
return _tokenSecret;
}
@Override
public void setTokenSecret(String tokenSecret) {
_tokenSecret = tokenSecret;
if (_oAuthTokenRemoteModel != null) {
try {
Class<?> clazz = _oAuthTokenRemoteModel.getClass();
Method method = clazz.getMethod("setTokenSecret", String.class);
method.invoke(_oAuthTokenRemoteModel, tokenSecret);
}
catch (Exception e) {
throw new UnsupportedOperationException(e);
}
}
}
@Override
public String getSessionHandle() {
return _sessionHandle;
}
@Override
public void setSessionHandle(String sessionHandle) {
_sessionHandle = sessionHandle;
if (_oAuthTokenRemoteModel != null) {
try {
Class<?> clazz = _oAuthTokenRemoteModel.getClass();
Method method = clazz.getMethod("setSessionHandle", String.class);
method.invoke(_oAuthTokenRemoteModel, sessionHandle);
}
catch (Exception e) {
throw new UnsupportedOperationException(e);
}
}
}
@Override
public long getExpiration() {
return _expiration;
}
@Override
public void setExpiration(long expiration) {
_expiration = expiration;
if (_oAuthTokenRemoteModel != null) {
try {
Class<?> clazz = _oAuthTokenRemoteModel.getClass();
Method method = clazz.getMethod("setExpiration", long.class);
method.invoke(_oAuthTokenRemoteModel, expiration);
}
catch (Exception e) {
throw new UnsupportedOperationException(e);
}
}
}
public BaseModel<?> getOAuthTokenRemoteModel() {
return _oAuthTokenRemoteModel;
}
public void setOAuthTokenRemoteModel(BaseModel<?> oAuthTokenRemoteModel) {
_oAuthTokenRemoteModel = oAuthTokenRemoteModel;
}
public Object invokeOnRemoteModel(String methodName,
Class<?>[] parameterTypes, Object[] parameterValues)
throws Exception {
Object[] remoteParameterValues = new Object[parameterValues.length];
for (int i = 0; i < parameterValues.length; i++) {
if (parameterValues[i] != null) {
remoteParameterValues[i] = ClpSerializer.translateInput(parameterValues[i]);
}
}
Class<?> remoteModelClass = _oAuthTokenRemoteModel.getClass();
ClassLoader remoteModelClassLoader = remoteModelClass.getClassLoader();
Class<?>[] remoteParameterTypes = new Class[parameterTypes.length];
for (int i = 0; i < parameterTypes.length; i++) {
if (parameterTypes[i].isPrimitive()) {
remoteParameterTypes[i] = parameterTypes[i];
}
else {
String parameterTypeName = parameterTypes[i].getName();
remoteParameterTypes[i] = remoteModelClassLoader.loadClass(parameterTypeName);
}
}
Method method = remoteModelClass.getMethod(methodName,
remoteParameterTypes);
Object returnValue = method.invoke(_oAuthTokenRemoteModel,
remoteParameterValues);
if (returnValue != null) {
returnValue = ClpSerializer.translateOutput(returnValue);
}
return returnValue;
}
@Override
public void persist() throws SystemException {
if (this.isNew()) {
OAuthTokenLocalServiceUtil.addOAuthToken(this);
}
else {
OAuthTokenLocalServiceUtil.updateOAuthToken(this);
}
}
@Override
public OAuthToken toEscapedModel() {
return (OAuthToken)ProxyUtil.newProxyInstance(OAuthToken.class.getClassLoader(),
new Class[] { OAuthToken.class }, new AutoEscapeBeanHandler(this));
}
@Override
public Object clone() {
OAuthTokenClp clone = new OAuthTokenClp();
clone.setOAuthTokenId(getOAuthTokenId());
clone.setCompanyId(getCompanyId());
clone.setUserId(getUserId());
clone.setUserName(getUserName());
clone.setCreateDate(getCreateDate());
clone.setModifiedDate(getModifiedDate());
clone.setGadgetKey(getGadgetKey());
clone.setServiceName(getServiceName());
clone.setModuleId(getModuleId());
clone.setAccessToken(getAccessToken());
clone.setTokenName(getTokenName());
clone.setTokenSecret(getTokenSecret());
clone.setSessionHandle(getSessionHandle());
clone.setExpiration(getExpiration());
return clone;
}
@Override
public int compareTo(OAuthToken oAuthToken) {
long primaryKey = oAuthToken.getPrimaryKey();
if (getPrimaryKey() < primaryKey) {
return -1;
}
else if (getPrimaryKey() > primaryKey) {
return 1;
}
else {
return 0;
}
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (!(obj instanceof OAuthTokenClp)) {
return false;
}
OAuthTokenClp oAuthToken = (OAuthTokenClp)obj;
long primaryKey = oAuthToken.getPrimaryKey();
if (getPrimaryKey() == primaryKey) {
return true;
}
else {
return false;
}
}
@Override
public int hashCode() {
return (int)getPrimaryKey();
}
@Override
public String toString() {
StringBundler sb = new StringBundler(29);
sb.append("{oAuthTokenId=");
sb.append(getOAuthTokenId());
sb.append(", companyId=");
sb.append(getCompanyId());
sb.append(", userId=");
sb.append(getUserId());
sb.append(", userName=");
sb.append(getUserName());
sb.append(", createDate=");
sb.append(getCreateDate());
sb.append(", modifiedDate=");
sb.append(getModifiedDate());
sb.append(", gadgetKey=");
sb.append(getGadgetKey());
sb.append(", serviceName=");
sb.append(getServiceName());
sb.append(", moduleId=");
sb.append(getModuleId());
sb.append(", accessToken=");
sb.append(getAccessToken());
sb.append(", tokenName=");
sb.append(getTokenName());
sb.append(", tokenSecret=");
sb.append(getTokenSecret());
sb.append(", sessionHandle=");
sb.append(getSessionHandle());
sb.append(", expiration=");
sb.append(getExpiration());
sb.append("}");
return sb.toString();
}
@Override
public String toXmlString() {
StringBundler sb = new StringBundler(46);
sb.append("<model><model-name>");
sb.append("com.liferay.opensocial.model.OAuthToken");
sb.append("</model-name>");
sb.append(
"<column><column-name>oAuthTokenId</column-name><column-value><![CDATA[");
sb.append(getOAuthTokenId());
sb.append("]]></column-value></column>");
sb.append(
"<column><column-name>companyId</column-name><column-value><![CDATA[");
sb.append(getCompanyId());
sb.append("]]></column-value></column>");
sb.append(
"<column><column-name>userId</column-name><column-value><![CDATA[");
sb.append(getUserId());
sb.append("]]></column-value></column>");
sb.append(
"<column><column-name>userName</column-name><column-value><![CDATA[");
sb.append(getUserName());
sb.append("]]></column-value></column>");
sb.append(
"<column><column-name>createDate</column-name><column-value><![CDATA[");
sb.append(getCreateDate());
sb.append("]]></column-value></column>");
sb.append(
"<column><column-name>modifiedDate</column-name><column-value><![CDATA[");
sb.append(getModifiedDate());
sb.append("]]></column-value></column>");
sb.append(
"<column><column-name>gadgetKey</column-name><column-value><![CDATA[");
sb.append(getGadgetKey());
sb.append("]]></column-value></column>");
sb.append(
"<column><column-name>serviceName</column-name><column-value><![CDATA[");
sb.append(getServiceName());
sb.append("]]></column-value></column>");
sb.append(
"<column><column-name>moduleId</column-name><column-value><![CDATA[");
sb.append(getModuleId());
sb.append("]]></column-value></column>");
sb.append(
"<column><column-name>accessToken</column-name><column-value><![CDATA[");
sb.append(getAccessToken());
sb.append("]]></column-value></column>");
sb.append(
"<column><column-name>tokenName</column-name><column-value><![CDATA[");
sb.append(getTokenName());
sb.append("]]></column-value></column>");
sb.append(
"<column><column-name>tokenSecret</column-name><column-value><![CDATA[");
sb.append(getTokenSecret());
sb.append("]]></column-value></column>");
sb.append(
"<column><column-name>sessionHandle</column-name><column-value><![CDATA[");
sb.append(getSessionHandle());
sb.append("]]></column-value></column>");
sb.append(
"<column><column-name>expiration</column-name><column-value><![CDATA[");
sb.append(getExpiration());
sb.append("]]></column-value></column>");
sb.append("</model>");
return sb.toString();
}
private long _oAuthTokenId;
private long _companyId;
private long _userId;
private String _userUuid;
private String _userName;
private Date _createDate;
private Date _modifiedDate;
private String _gadgetKey;
private String _serviceName;
private long _moduleId;
private String _accessToken;
private String _tokenName;
private String _tokenSecret;
private String _sessionHandle;
private long _expiration;
private BaseModel<?> _oAuthTokenRemoteModel;
}
| |
/**
* Derby - Class org.apache.derbyTesting.functionTests.tests.lang.NestedWhereSubqueryTest
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.derbyTesting.functionTests.tests.lang;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import junit.framework.Test;
import org.apache.derbyTesting.junit.BaseJDBCTestCase;
import org.apache.derbyTesting.junit.JDBC;
import org.apache.derbyTesting.junit.TestConfiguration;
/**
* Nested WHERE subquery tests. Tests nested WHERE EXISTS | ANY | IN functionality.
*
* Please refer to DERBY-3301 for more details.
*/
public class NestedWhereSubqueryTest extends BaseJDBCTestCase {
public NestedWhereSubqueryTest(String name) {
super(name);
}
/**
* Main test body
*
* @throws SQLException
*/
public void testBasicOperations()
throws SQLException {
Statement s = createStatement();
/*
* Create tables needed for DERBY-3301 regression test
*/
StringBuffer sb = new StringBuffer();
sb.append("CREATE TABLE departments ( ");
sb.append("ID INTEGER NOT NULL, ");
sb.append("NAME VARCHAR(32) NOT NULL, ");
sb.append("COMPANYID INTEGER, ");
sb.append("CONSTRAINT DEPTS_PK PRIMARY KEY (ID) ");
sb.append(")");
s.executeUpdate(sb.toString());
sb = new StringBuffer();
sb.append("CREATE TABLE employees ( ");
sb.append("EMPID INTEGER NOT NULL, ");
sb.append("FIRSTNAME VARCHAR(32) NOT NULL, ");
sb.append("DEPARTMENT INTEGER, ");
sb.append("CONSTRAINT PERS_DEPT_FK FOREIGN KEY (DEPARTMENT) REFERENCES departments, ");
sb.append("CONSTRAINT EMPS_PK PRIMARY KEY (EMPID) ");
sb.append(")");
s.executeUpdate(sb.toString());
sb = new StringBuffer();
sb.append("CREATE TABLE projects ( ");
sb.append("PROJID INTEGER NOT NULL, ");
sb.append("NAME VARCHAR(32) NOT NULL, ");
sb.append("CONSTRAINT PROJS_PK PRIMARY KEY (PROJID) ");
sb.append(")");
s.executeUpdate(sb.toString());
sb = new StringBuffer();
sb.append("CREATE TABLE project_employees ( ");
sb.append("PROJID INTEGER REFERENCES projects NOT NULL, ");
sb.append("EMPID INTEGER REFERENCES employees NOT NULL ");
sb.append(")");
s.executeUpdate(sb.toString());
/*
* Fill some data into the tables
*/
s.executeUpdate("INSERT INTO departments VALUES (1, 'Research', 1)");
s.executeUpdate("INSERT INTO departments VALUES (2, 'Marketing', 1)");
s.executeUpdate("INSERT INTO employees VALUES (11, 'Alex', 1)");
s.executeUpdate("INSERT INTO employees VALUES (12, 'Bill', 1)");
s.executeUpdate("INSERT INTO employees VALUES (13, 'Charles', 1)");
s.executeUpdate("INSERT INTO employees VALUES (14, 'David', 2)");
s.executeUpdate("INSERT INTO employees VALUES (15, 'Earl', 2)");
s.executeUpdate("INSERT INTO projects VALUES (101, 'red')");
s.executeUpdate("INSERT INTO projects VALUES (102, 'orange')");
s.executeUpdate("INSERT INTO projects VALUES (103, 'yellow')");
s.executeUpdate("INSERT INTO project_employees VALUES (102, 13)");
s.executeUpdate("INSERT INTO project_employees VALUES (101, 13)");
s.executeUpdate("INSERT INTO project_employees VALUES (102, 12)");
s.executeUpdate("INSERT INTO project_employees VALUES (103, 15)");
s.executeUpdate("INSERT INTO project_employees VALUES (103, 14)");
s.executeUpdate("INSERT INTO project_employees VALUES (101, 12)");
s.executeUpdate("INSERT INTO project_employees VALUES (101, 11)");
/*
* Preliminary data check
*/
ResultSet rs = s.executeQuery("select * from employees");
String[][] expectedRows = {{"11", "Alex", "1"},
{"12", "Bill", "1"},
{"13", "Charles", "1"},
{"14", "David", "2"},
{"15", "Earl", "2"}};
JDBC.assertUnorderedResultSet(rs, expectedRows);
rs = s.executeQuery("select * from departments");
expectedRows = new String [][] {{"1", "Research", "1"},
{"2","Marketing","1"}};
JDBC.assertUnorderedResultSet(rs, expectedRows);
rs = s.executeQuery("select * from projects");
expectedRows = new String [][] {{"101","red"},
{"102","orange"},
{"103","yellow"}};
JDBC.assertUnorderedResultSet(rs, expectedRows);
rs = s.executeQuery("select * from project_employees");
expectedRows = new String [][] {{"102","13"},
{"101","13"},
{"102","12"},
{"103","15"},
{"103","14"},
{"101","12"},
{"101","11"}};
JDBC.assertUnorderedResultSet(rs, expectedRows);
/*
* DERBY-3301: This query should return 7 rows
*/
sb = new StringBuffer();
sb.append("select unbound_e.empid, unbound_p.projid ");
sb.append("from departments this, ");
sb.append(" employees unbound_e, ");
sb.append(" projects unbound_p ");
sb.append("where exists ( ");
sb.append(" select 1 from employees this_employees_e ");
sb.append(" where exists ( ");
sb.append(" select 1 from project_employees this_employees_e_projects_p ");
sb.append(" where this_employees_e_projects_p.empid = this_employees_e.empid ");
sb.append(" and this_employees_e.department = this.id ");
sb.append(" and unbound_p.projid = this_employees_e_projects_p.projid ");
sb.append(" and unbound_e.empid = this_employees_e.empid) ");
sb.append(" )");
rs = s.executeQuery(sb.toString());
expectedRows = new String [][] {{"13", "101"},
{"12", "101"},
{"11", "101"},
{"13", "102"},
{"12", "102"},
{"15", "103"},
{"14", "103"}};
JDBC.assertUnorderedResultSet(rs, expectedRows);
/* A variation of the above WHERE EXISTS but using IN should return the same rows */
sb = new StringBuffer();
sb.append("select unbound_e.empid, unbound_p.projid ");
sb.append("from departments this, ");
sb.append(" employees unbound_e, ");
sb.append(" projects unbound_p ");
sb.append("where exists ( ");
sb.append(" select 1 from employees this_employees_e ");
sb.append(" where this_employees_e.empid in ( ");
sb.append(" select this_employees_e_projects_p.empid ");
sb.append(" from project_employees this_employees_e_projects_p ");
sb.append(" where this_employees_e_projects_p.empid = this_employees_e.empid ");
sb.append(" and this_employees_e.department = this.id ");
sb.append(" and unbound_p.projid = this_employees_e_projects_p.projid ");
sb.append(" and unbound_e.empid = this_employees_e.empid) ");
sb.append(" )");
rs = s.executeQuery(sb.toString());
JDBC.assertUnorderedResultSet(rs, expectedRows);
/* A variation of the above WHERE EXISTS but using ANY should return the same rows */
sb = new StringBuffer();
sb.append("select unbound_e.empid, unbound_p.projid ");
sb.append("from departments this, ");
sb.append(" employees unbound_e, ");
sb.append(" projects unbound_p ");
sb.append("where exists ( ");
sb.append(" select 1 from employees this_employees_e ");
sb.append(" where this_employees_e.empid = any ( ");
sb.append(" select this_employees_e_projects_p.empid ");
sb.append(" from project_employees this_employees_e_projects_p ");
sb.append(" where this_employees_e_projects_p.empid = this_employees_e.empid ");
sb.append(" and this_employees_e.department = this.id ");
sb.append(" and unbound_p.projid = this_employees_e_projects_p.projid ");
sb.append(" and unbound_e.empid = this_employees_e.empid) ");
sb.append(" )");
rs = s.executeQuery(sb.toString());
JDBC.assertUnorderedResultSet(rs, expectedRows);
/*
* The next 5 queries were also found problematic as part DERBY-3301
*/
sb = new StringBuffer();
sb.append("select unbound_e.empid from departments this, employees unbound_e ");
sb.append("where exists ( ");
sb.append(" select 1 from employees this_employees_e ");
sb.append(" where this_employees_e.department = this.id and ");
sb.append(" unbound_e.empid = this_employees_e.empid and this.id = 2)");
rs = s.executeQuery(sb.toString());
expectedRows = new String [][] {{"14"},{"15"}};
JDBC.assertUnorderedResultSet(rs, expectedRows);
sb = new StringBuffer();
sb.append("select this.id,unbound_e.empid,unbound_p.projid from departments this, ");
sb.append(" employees unbound_e, projects unbound_p ");
sb.append("where exists ( ");
sb.append(" select 1 from employees this_employees_e ");
sb.append(" where exists ( ");
sb.append(" select 1 from project_employees this_employees_e_projects_p ");
sb.append(" where this_employees_e_projects_p.\"EMPID\" = this_employees_e.empid and ");
sb.append(" unbound_p.projid = this_employees_e_projects_p.projid and ");
sb.append(" this_employees_e.department = this.id and ");
sb.append(" unbound_e.empid = this_employees_e.empid ");
sb.append(" )) ");
rs = s.executeQuery(sb.toString());
expectedRows = new String [][] {{"1","11","101"},
{"1","12","101"},
{"1","13","101"},
{"1","12","102"},
{"1","13","102"},
{"2","14","103"},
{"2","15","103"}};
JDBC.assertUnorderedResultSet(rs, expectedRows);
sb = new StringBuffer();
sb.append("select unbound_e.empid,unbound_p.projid from departments this, ");
sb.append(" employees unbound_e, projects unbound_p ");
sb.append("where exists ( ");
sb.append(" select 1 from employees this_employees_e ");
sb.append(" where exists ( ");
sb.append(" select 1 from project_employees this_employees_e_projects_p ");
sb.append(" where this_employees_e_projects_p.\"EMPID\" = this_employees_e.empid ");
sb.append(" and unbound_p.projid = this_employees_e_projects_p.projid ");
sb.append(" and this_employees_e.department = this.id ");
sb.append(" and unbound_e.empid = this_employees_e.empid ");
sb.append(" and this.id = 1)) ");
rs = s.executeQuery(sb.toString());
expectedRows = new String [][] {{"11","101"},
{"12","101"},
{"13","101"},
{"12","102"},
{"13","102"}};
JDBC.assertUnorderedResultSet(rs, expectedRows);
sb = new StringBuffer();
sb.append("select unbound_e.empid,unbound_p.projid from departments this, ");
sb.append(" employees unbound_e, projects unbound_p ");
sb.append("where exists ( ");
sb.append(" select 1 from employees this_employees_e ");
sb.append(" where exists ( ");
sb.append(" select 1 from project_employees this_employees_e_projects_p ");
sb.append(" where this_employees_e_projects_p.\"EMPID\" = this_employees_e.empid ");
sb.append(" and unbound_p.projid = this_employees_e_projects_p.projid ");
sb.append(" and this_employees_e.department = this.id ");
sb.append(" and unbound_e.empid = this_employees_e.empid ");
sb.append(" and this.companyid = 1))");
rs = s.executeQuery(sb.toString());
expectedRows = new String [][] {{"11","101"},
{"12","101"},
{"13","101"},
{"12","102"},
{"13","102"},
{"14","103"},
{"15","103"}};
JDBC.assertUnorderedResultSet(rs, expectedRows);
sb = new StringBuffer();
sb.append("select unbound_e.empid, unbound_p.projid ");
sb.append("from departments this, ");
sb.append(" employees unbound_e, ");
sb.append(" projects unbound_p ");
sb.append("where exists ( ");
sb.append(" select 1 from employees this_employees_e ");
sb.append(" where 1 = 1 and exists ( ");
sb.append(" select 1 from project_employees this_employees_e_projects_p ");
sb.append(" where this_employees_e_projects_p.empid = this_employees_e.empid ");
sb.append(" and this_employees_e.department = this.id ");
sb.append(" and unbound_p.projid = this_employees_e_projects_p.projid ");
sb.append(" and unbound_e.empid = this_employees_e.empid) ");
sb.append(")");
rs = s.executeQuery(sb.toString());
expectedRows = new String [][] {{"11","101"},
{"12","101"},
{"13","101"},
{"12","102"},
{"13","102"},
{"14","103"},
{"15","103"}};
JDBC.assertUnorderedResultSet(rs, expectedRows);
/* Variation of the above using WHERE IN ... WHERE IN */
sb = new StringBuffer();
sb.append("select unbound_e.empid, unbound_p.projid ");
sb.append("from departments this, employees unbound_e, projects unbound_p ");
sb.append("where this.id in ( ");
sb.append(" select this_employees_e.department from employees this_employees_e ");
sb.append(" where this_employees_e.empid in ( ");
sb.append(" select this_employees_e_projects_p.empid ");
sb.append(" from project_employees this_employees_e_projects_p ");
sb.append(" where this_employees_e_projects_p.empid = this_employees_e.empid ");
sb.append(" and this_employees_e.department = this.id ");
sb.append(" and unbound_p.projid = this_employees_e_projects_p.projid ");
sb.append(" and unbound_e.empid = this_employees_e.empid)");
sb.append(")");
rs = s.executeQuery(sb.toString());
/* Expected rows are as above */
JDBC.assertUnorderedResultSet(rs, expectedRows);
/* Variation of the above using WHERE ANY ... WHERE ANY */
sb = new StringBuffer();
sb.append("select unbound_e.empid, unbound_p.projid ");
sb.append("from departments this, employees unbound_e, projects unbound_p ");
sb.append("where this.id = any ( ");
sb.append(" select this_employees_e.department from employees this_employees_e ");
sb.append(" where this_employees_e.empid = any ( ");
sb.append(" select this_employees_e_projects_p.empid ");
sb.append(" from project_employees this_employees_e_projects_p ");
sb.append(" where this_employees_e_projects_p.empid = this_employees_e.empid ");
sb.append(" and this_employees_e.department = this.id ");
sb.append(" and unbound_p.projid = this_employees_e_projects_p.projid ");
sb.append(" and unbound_e.empid = this_employees_e.empid)");
sb.append(")");
rs = s.executeQuery(sb.toString());
/* Expected rows are as above */
JDBC.assertUnorderedResultSet(rs, expectedRows);
/*
* DERBY-3321 revealed an NPE with a subquery in the [NOT] EXIST subuery FromList.
*/
s.executeUpdate("create table a (aa int, bb int)");
s.executeUpdate("create table b (bb int)");
s.executeUpdate("insert into a values (1,1),(1,2),(2,2)");
s.executeUpdate("insert into b values (1)");
/* NOT EXISTS */
sb = new StringBuffer();
sb.append("select * from a ");
sb.append("where not exists ");
sb.append("(select bb from (select bb from b) p where a.bb=p.bb)");
rs = s.executeQuery(sb.toString());
expectedRows = new String [][] {{"1","2"},
{"2","2"}};
JDBC.assertUnorderedResultSet(rs, expectedRows);
/* EXISTS */
sb = new StringBuffer();
sb.append("select * from a ");
sb.append("where exists ");
sb.append("(select bb from (select bb from b) p where a.bb=p.bb)");
rs = s.executeQuery(sb.toString());
expectedRows = new String [][] {{"1","1"}};
JDBC.assertUnorderedResultSet(rs, expectedRows);
/*
* Clean up the tables used.
*/
s.executeUpdate("drop table project_employees");
s.executeUpdate("drop table projects");
s.executeUpdate("drop table employees");
s.executeUpdate("drop table departments");
s.executeUpdate("drop table a");
s.executeUpdate("drop table b");
s.close();
}
public static Test suite() {
return TestConfiguration.defaultSuite(NestedWhereSubqueryTest.class);
}
}
| |
/*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.wso2.andes.management.ui.views;
import java.util.LinkedList;
import javax.management.MBeanServerConnection;
import static org.wso2.andes.management.ui.Constants.*;
import org.wso2.andes.management.ui.ApiVersion;
import org.wso2.andes.management.ui.ApplicationRegistry;
import org.wso2.andes.management.ui.ManagedBean;
import org.wso2.andes.management.ui.ManagedServer;
import org.wso2.andes.management.ui.ServerRegistry;
import org.wso2.andes.management.ui.actions.BackAction;
import org.wso2.andes.management.ui.jmx.JMXManagedObject;
import org.wso2.andes.management.ui.jmx.JMXServerRegistry;
import org.wso2.andes.management.ui.jmx.MBeanUtility;
import org.eclipse.jface.viewers.ISelection;
import org.eclipse.jface.viewers.IStructuredSelection;
import org.eclipse.swt.SWT;
import org.eclipse.swt.graphics.Image;
import org.eclipse.swt.layout.FormAttachment;
import org.eclipse.swt.layout.FormData;
import org.eclipse.swt.layout.FormLayout;
import org.eclipse.swt.widgets.Composite;
import org.eclipse.swt.widgets.TabFolder;
import org.eclipse.swt.widgets.TabItem;
import org.eclipse.ui.IActionBars;
import org.eclipse.ui.ISelectionListener;
import org.eclipse.ui.IWorkbenchPart;
import org.eclipse.ui.forms.widgets.Form;
import org.eclipse.ui.forms.widgets.FormToolkit;
import org.eclipse.ui.part.ViewPart;
/**
* MBean View create appropriate view based on the user selection on the Navigation View.
*/
public class MBeanView extends ViewPart
{
public static final String ID = "org.wso2.andes.management.ui.mbeanView";
private FormToolkit _toolkit = null;
private Form _form = null;
private String _formText = APPLICATION_NAME;
private static ManagedServer _server = null;
private TreeObject _selectedNode = null;
private ManagedBean _mbean = null;
private static String _virtualHostName = null;
private static MBeanServerConnection _mbsc = null;
private TabFolder _tabFolder = null;
private ISelectionListener _selectionListener = new SelectionListenerImpl();
// TabFolder to list all the mbeans for a given mbeantype(eg Connection, Queue, Exchange)
private TabFolder _typeTabFolder = null;
private TabFolder _notificationTabFolder = null;
private LinkedList<Object> _backHistory;
private BackAction _backAction;
/*
* Listener for the selection events in the navigation view
*/
private class SelectionListenerImpl implements ISelectionListener
{
public void selectionChanged(IWorkbenchPart part, ISelection sel)
{
if (!(sel instanceof IStructuredSelection))
return;
IStructuredSelection ss = (IStructuredSelection) sel;
_selectedNode = (TreeObject)ss.getFirstElement();
// mbean should be set to null. A selection done on the navigation view can be either an mbean or
// an mbeantype. For mbeantype selection(eg Connection, Queue, Exchange) _mbean will remain null.
_mbean = null;
clearView();
//clear the back history, it is only for use when opening subsequent mbeans not in the nav tree
_backHistory.clear();
_backAction.setEnabled(false);
// If a selected node(mbean) gets unregistered from mbean server, mbeanview should
// make the tabfolber for that mbean invisible
if (_selectedNode == null)
{
return;
}
setServer();
if(!ApplicationRegistry.isServerConnected(_server))
{
return;
}
if (MBEAN.equals(_selectedNode.getType()))
{
_mbean = (ManagedBean)_selectedNode.getManagedObject();
}
setFormTitle();
showRelevantTabView();
}
}
public void openMBean(ManagedBean mbean)
{
openMBean(mbean, false);
}
private void openMBean(ManagedBean mbean, boolean undoing)
{
if(mbean == null)
{
return;
}
//if an mbean is about to be opened (but not returning to using back) from the mbean view,
//then record the current viewed area/object as a means of back history
if(!undoing)
{
if(_backHistory.isEmpty())
{
//ensure the button is enabled if this is to be the first history item
_backAction.setEnabled(true);
}
if(_mbean == null)
{
//queue etc selection area is open, record the tree object
_backHistory.addLast(_selectedNode);
}
else
{
_backHistory.addLast(_mbean);
}
}
_mbean = mbean;
try
{
clearView();
setFormTitle();
showMBean(mbean);
_form.layout(true);
_form.getBody().layout(true, true);
}
catch(Exception ex)
{
MBeanUtility.handleException(mbean, ex);
}
}
private void setFormTitle()
{
if (_mbean != null)
{
_formText = _mbean.getType();
if ((_mbean.getVirtualHostName() != null) && (!DEFAULT_VH.equals(_mbean.getVirtualHostName())) )
{
_formText = _formText.replaceFirst(VIRTUAL_HOST, _mbean.getVirtualHostName());
if (_mbean.getName() != null && _mbean.getName().length() != 0)
{
_formText = _formText + ": " + _mbean.getName();
}
}
}
else if ((_selectedNode.getVirtualHost() != null) && (!DEFAULT_VH.equals(_selectedNode.getVirtualHost())))
{
_formText = _selectedNode.getVirtualHost();
}
else
{
_formText = APPLICATION_NAME;
}
_form.setText(_formText);
}
public void showRelevantTabView()
{
try
{
if (_selectedNode == null)
{
return;
}
String mbeanType = _selectedNode.getType();
if (NODE_TYPE_TYPEINSTANCE.equals(mbeanType))
{
// An virtual host instance is selected
generateTypeTabFolder();
}
else if (NODE_TYPE_MBEANTYPE.equals(mbeanType))
{
showTypeTabFolder(_selectedNode.getName());
}
else if (NOTIFICATIONS.equals(mbeanType))
{
refreshNotificationPage();
}
else if (MBEAN.equals(mbeanType))
{
showMBean(_mbean);
}
else if(NODE_TYPE_SERVER.equals(mbeanType))
{
ServerRegistry serverReg = ApplicationRegistry.getServerRegistry(_server);
//check the server is connected
if(serverReg != null)
{
//post a message if the server supports a newer API version.
ApiVersion serverAPI = serverReg.getManagementApiVersion();
int supportedMajor = ApplicationRegistry.SUPPORTED_QPID_JMX_API_MAJOR_VERSION;
int supportedMinor = ApplicationRegistry.SUPPORTED_QPID_JMX_API_MINOR_VERSION;
if(serverAPI.greaterThan(supportedMajor, supportedMinor))
{
_form.setText("The server supports an updated management API and may offer " +
"functionality not available with this console. " +
"Please check for an updated console release.");
}
}
}
else
{
return;
}
_form.layout(true);
_form.getBody().layout(true, true);
}
catch(Exception ex)
{
MBeanUtility.handleException(_mbean, ex);
}
}
/**
* Sets the managedServer based on the selection in the navigation view
* At any given time MBeanView will be displaying information for an mbean of mbeantype
* for a specifiv managed server. This server information will be used by the tab controllers
* to get server registry.
*/
private void setServer()
{
if (NODE_TYPE_SERVER.equals(_selectedNode.getType()))
{
_server = (ManagedServer)_selectedNode.getManagedObject();
_virtualHostName = null;
}
else
{
TreeObject parent = _selectedNode.getParent();
while (parent != null && !parent.getType().equals(NODE_TYPE_SERVER))
{
parent = parent.getParent();
}
if (parent != null && parent.getType().equals(NODE_TYPE_SERVER))
_server = (ManagedServer)parent.getManagedObject();
_virtualHostName = _selectedNode.getVirtualHost();
}
JMXServerRegistry serverRegistry = (JMXServerRegistry)ApplicationRegistry.getServerRegistry(_server);
if(serverRegistry != null){
_mbsc = serverRegistry.getServerConnection();
}
}
public static ManagedServer getServer()
{
return _server;
}
public static String getVirtualHost()
{
return _virtualHostName;
}
private void showMBean(ManagedBean mbean) throws Exception
{
try
{
MBeanUtility.getMBeanInfo(mbean);
}
catch(Exception ex)
{
MBeanUtility.handleException(mbean, ex);
return;
}
if (_tabFolder != null && !_tabFolder.isDisposed())
{
_tabFolder.dispose();
}
_tabFolder = MBeanTabFolderFactory.generateMBeanTabFolder(_form.getBody(),(JMXManagedObject)mbean,_mbsc);
int tabIndex = 0;
if (NOTIFICATIONS.equals(_selectedNode.getType()))
{
tabIndex = _tabFolder.getItemCount() -1;
}
TabItem tab = _tabFolder.getItem(tabIndex);
// If folder is being set as visible after tab refresh, then the tab
// doesn't have the focus.
_tabFolder.setSelection(tabIndex);
refreshTab(tab);
}
public void createPartControl(Composite parent)
{
// Create the Form
_toolkit = new FormToolkit(parent.getDisplay());
_form = _toolkit.createForm(parent);
_form.getBody().setLayout(new FormLayout());
_form.setText(APPLICATION_NAME);
// Add selection listener for selection events in the Navigation view
getSite().getPage().addSelectionListener(NavigationView.ID, _selectionListener);
createNotificationsTabFolder();
ViewUtility.setMBeanView(this);
_backAction = new BackAction();
getViewSite().getActionBars().getToolBarManager().add(_backAction);
_backAction.setEnabled(false);
_backHistory = new LinkedList<Object>();
}
private void refreshTab(TabItem tab)
{
if (tab == null)
{
return;
}
TabControl controller = (TabControl)tab.getData(TabControl.CONTROLLER);
if(controller != null)
{
controller.refresh(_mbean);
}
}
public void setFocus()
{
//_form.setFocus();
}
public void dispose()
{
_toolkit.dispose();
super.dispose();
}
private void createNotificationsTabFolder()
{
_notificationTabFolder = new TabFolder(_form.getBody(), SWT.NONE);
FormData layoutData = new FormData();
layoutData.left = new FormAttachment(0);
layoutData.top = new FormAttachment(0);
layoutData.right = new FormAttachment(100);
layoutData.bottom = new FormAttachment(100);
_notificationTabFolder.setLayoutData(layoutData);
_notificationTabFolder.setVisible(false);
VHNotificationsTabControl controller = new VHNotificationsTabControl(_notificationTabFolder);
TabItem tab = new TabItem(_notificationTabFolder, SWT.NONE);
tab.setText(NOTIFICATIONS);
tab.setData(TabControl.CONTROLLER, controller);
tab.setControl(controller.getControl());
}
private void refreshNotificationPage()
{
TabItem tab = _notificationTabFolder.getItem(0);
VHNotificationsTabControl controller = (VHNotificationsTabControl)tab.getData(TabControl.CONTROLLER);
controller.refresh();
_notificationTabFolder.setVisible(true);
}
private void generateTypeTabFolder() throws Exception
{
if (_typeTabFolder != null && !_typeTabFolder.isDisposed())
{
_typeTabFolder.dispose();
}
//Generates the full Queue/Connection/Exchange selection tab set
_typeTabFolder = MBeanTabFolderFactory.generateMBeanTypeTabFolder(
_form.getBody(), getServer(), getVirtualHost());
refreshTab(_typeTabFolder.getItem(0));
}
private void showTypeTabFolder(String type) throws Exception
{
if (_typeTabFolder != null && !_typeTabFolder.isDisposed())
{
_typeTabFolder.dispose();
}
if (CONNECTION.equals(type))
{
//Generates the Connection selection tab
_typeTabFolder = MBeanTabFolderFactory.generateConnectionTypeTabFolder(
_form.getBody(), getServer(), getVirtualHost());
refreshTab(_typeTabFolder.getItem(0));
}
else if (EXCHANGE.equals(type))
{
//Generates the Exchange selection tab
_typeTabFolder = MBeanTabFolderFactory.generateExchangeTypeTabFolder(
_form.getBody(), getServer(), getVirtualHost());
refreshTab(_typeTabFolder.getItem(0));
}
else if (QUEUE.equals(type))
{
//Generates the Queue selection tab
_typeTabFolder = MBeanTabFolderFactory.generateQueueTypeTabFolder(
_form.getBody(), getServer(), getVirtualHost());
refreshTab(_typeTabFolder.getItem(0));
}
}
private void clearView()
{
if (_tabFolder != null && !_tabFolder.isDisposed())
{
_tabFolder.setVisible(false);
}
if (_typeTabFolder != null && !_typeTabFolder.isDisposed())
{
_typeTabFolder.setVisible(false);
}
if (_notificationTabFolder != null && !_notificationTabFolder.isDisposed())
{
_notificationTabFolder.setVisible(false);
}
_form.setText(APPLICATION_NAME);
clearStatusBar();
}
public void mbeanUnregistered(ManagedBean mbean)
{
//if the mbean is actually open, clear the view and empty the Back history
if(mbean == _mbean)
{
clearView();
_backHistory.clear();
_backAction.setEnabled(false);
ViewUtility.popupInfoMessage("MBean Unregistered",
"The open MBean was unregistered from the server.");
}
}
public void refresh()
{
if(!ApplicationRegistry.isServerConnected(_server))
{
return;
}
if (_tabFolder != null && !_tabFolder.isDisposed())
{
if(_tabFolder.getVisible())
{
int selectedTab = _tabFolder.getSelectionIndex();
TabItem tab = _tabFolder.getItem(selectedTab);
TabControl controller = (TabControl) tab.getData(TabControl.CONTROLLER);
if(controller != null)
{
controller.refresh(_mbean);
}
return;
}
}
if (_typeTabFolder != null && !_typeTabFolder.isDisposed())
{
if(_typeTabFolder.getVisible())
{
int selectedTab = _typeTabFolder.getSelectionIndex();
TabItem tab = _typeTabFolder.getItem(selectedTab);
TabControl controller = (TabControl) tab.getData(TabControl.CONTROLLER);
if(controller != null)
{
controller.refresh(_mbean);
}
return;
}
}
if (_notificationTabFolder != null && !_notificationTabFolder.isDisposed())
{
if(_notificationTabFolder.getVisible())
{
int selectedTab = _notificationTabFolder.getSelectionIndex();
TabItem tab = _notificationTabFolder.getItem(selectedTab);
TabControl controller = (TabControl) tab.getData(TabControl.CONTROLLER);
if(controller != null)
{
controller.refresh(_mbean);
}
return;
}
}
}
public void populateStatusBar(Image icon, String message)
{
IActionBars bars = getViewSite().getActionBars();
bars.getStatusLineManager().setMessage(icon, message);
}
public void populateStatusBar(String message)
{
IActionBars bars = getViewSite().getActionBars();
bars.getStatusLineManager().setMessage(message);
}
public void clearStatusBar()
{
populateStatusBar("");
}
public void back() throws Exception
{
if(_backHistory.isEmpty())
{
return;
}
Object previous = _backHistory.removeLast();
if(_backHistory.isEmpty())
{
//if this is the last history item, disable the action button
_backAction.setEnabled(false);
}
if(previous instanceof ManagedBean)
{
openMBean((ManagedBean)previous, true);
}
else if (previous instanceof TreeObject)
{
_mbean = null;
clearView();
setFormTitle();
TreeObject node = (TreeObject) previous;
String mbeanType = node.getType();
if (NODE_TYPE_TYPEINSTANCE.equals(mbeanType))
{
generateTypeTabFolder();
}
else if (NODE_TYPE_MBEANTYPE.equals(mbeanType))
{
showTypeTabFolder(node.getName());
}
}
_form.layout(true);
_form.getBody().layout(true, true);
}
}
| |
/**
* Copyright 2015-2017 Red Hat, Inc, and individual contributors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.wildfly.swarm.bootstrap.modules;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.nio.file.Files;
import java.nio.file.StandardCopyOption;
import java.util.jar.JarFile;
import org.jboss.modules.DependencySpec;
import org.jboss.modules.ModuleLoadException;
import org.jboss.modules.ModuleLoader;
import org.jboss.modules.ModuleSpec;
import org.jboss.modules.ResourceLoader;
import org.jboss.modules.ResourceLoaderSpec;
import org.jboss.modules.ResourceLoaders;
import org.jboss.modules.filter.ClassFilters;
import org.jboss.modules.filter.PathFilters;
import org.jboss.modules.maven.ArtifactCoordinates;
import org.wildfly.swarm.bootstrap.env.ApplicationEnvironment;
import org.wildfly.swarm.bootstrap.logging.BootstrapLogger;
import org.wildfly.swarm.bootstrap.util.BootstrapUtil;
import org.wildfly.swarm.bootstrap.util.JarFileManager;
import org.wildfly.swarm.bootstrap.util.TempFileManager;
/**
* Module-finder used only for loading the module <code>swarm.application</code> when run in an fat-jar scenario.
*
* @author Bob McWhirter
*/
public class ApplicationModuleFinder extends AbstractSingleModuleFinder {
public static final String MODULE_NAME = "swarm.application";
public ApplicationModuleFinder() {
super(MODULE_NAME);
}
protected ApplicationModuleFinder(String slot) {
super(MODULE_NAME + ": " + slot);
}
@Override
public void buildModule(ModuleSpec.Builder builder, ModuleLoader delegateLoader) throws ModuleLoadException {
ApplicationEnvironment env = ApplicationEnvironment.get();
env.bootstrapModules()
.forEach((module) -> {
builder.addDependency(
DependencySpec.createModuleDependencySpec(
PathFilters.acceptAll(),
PathFilters.acceptAll(),
PathFilters.acceptAll(),
PathFilters.acceptAll(),
ClassFilters.acceptAll(),
ClassFilters.acceptAll(),
null,
module,
false));
});
try {
addAsset(builder, env);
} catch (IOException e) {
throw new ModuleLoadException(e);
}
addDependencies(builder, env);
try {
addClasspathJars(builder);
} catch (IOException e) {
throw new ModuleLoadException(e);
}
builder.addDependency(DependencySpec.createModuleDependencySpec("org.jboss.modules"));
builder.addDependency(DependencySpec.createModuleDependencySpec("org.jboss.shrinkwrap"));
builder.addDependency(DependencySpec.createModuleDependencySpec("org.wildfly.swarm.configuration", false, true));
builder.addDependency(DependencySpec.createModuleDependencySpec("sun.jdk", false, true));
builder.addDependency(
DependencySpec.createModuleDependencySpec(
PathFilters.acceptAll(),
PathFilters.acceptAll(),
PathFilters.acceptAll(),
PathFilters.acceptAll(),
ClassFilters.acceptAll(),
ClassFilters.acceptAll(),
null,
"org.wildfly.swarm.container:api", true));
builder.addDependency(DependencySpec.createLocalDependencySpec());
}
protected void addAsset(ModuleSpec.Builder builder, ApplicationEnvironment env) throws IOException {
String path = env.getAsset();
if (path == null) {
return;
}
int slashLoc = path.lastIndexOf('/');
String name = path;
if (slashLoc > 0) {
name = path.substring(slashLoc + 1);
}
String ext = ".jar";
int dotLoc = name.lastIndexOf('.');
if (dotLoc > 0) {
ext = name.substring(dotLoc);
name = name.substring(0, dotLoc);
}
File tmp = TempFileManager.INSTANCE.newTempFile(name, ext);
try (InputStream artifactIn = getClass().getClassLoader().getResourceAsStream(path)) {
Files.copy(artifactIn, tmp.toPath(), StandardCopyOption.REPLACE_EXISTING);
}
final String jarName = tmp.getName().toString();
final JarFile jarFile = new JarFile(tmp);
File tmpDir = TempFileManager.INSTANCE.newTempDirectory(name, ext);
// Explode jar due to some issues in Windows on stopping (JarFiles cannot be deleted)
BootstrapUtil.explodeJar(jarFile, tmpDir.getAbsolutePath());
// SWARM-1473: exploded app artifact is also used to back ShrinkWrap archive used by deployment processors
TempFileManager.INSTANCE.setExplodedApplicationArtifact(tmpDir);
jarFile.close();
tmp.delete();
final ResourceLoader jarLoader = ResourceLoaders.createFileResourceLoader(jarName, tmpDir);
builder.addResourceRoot(ResourceLoaderSpec.createResourceLoaderSpec(jarLoader));
if (".war".equalsIgnoreCase(ext)) {
final ResourceLoader warLoader = ResourceLoaders.createFileResourceLoader(jarName + "WEBINF",
new File(tmpDir.getAbsolutePath() + File.separator + "WEB-INF" + File.separator + "classes"));
builder.addResourceRoot(ResourceLoaderSpec.createResourceLoaderSpec(warLoader));
}
}
protected void addDependencies(ModuleSpec.Builder builder, ApplicationEnvironment env) {
env.getDependencies()
.forEach((dep) -> {
String[] parts = dep.split(":");
ArtifactCoordinates coords = null;
if (!parts[2].equals("jar")) {
return;
}
if (parts.length == 4) {
coords = new ArtifactCoordinates(parts[0], parts[1], parts[3]);
} else if (parts.length == 5) {
coords = new ArtifactCoordinates(parts[0], parts[1], parts[4], parts[3]);
}
try {
File artifact = MavenResolvers.get().resolveJarArtifact(coords);
if (artifact == null) {
LOG.error("Unable to find artifact for " + coords);
return;
}
JarFile jar = JarFileManager.INSTANCE.addJarFile(artifact);
builder.addResourceRoot(
ResourceLoaderSpec.createResourceLoaderSpec(
ResourceLoaders.createJarResourceLoader(artifact.getName(), jar)
)
);
} catch (IOException e) {
throw new RuntimeException(e);
}
});
}
private void addClasspathJars(ModuleSpec.Builder builder) throws IOException {
String driversList = System.getProperty("swarm.classpath");
if (driversList != null && driversList.trim().length() > 0) {
String[] drivers = driversList.split(";");
for (String driver : drivers) {
File driverFile = new File(driver);
if (driverFile.exists()) {
builder.addResourceRoot(
ResourceLoaderSpec.createResourceLoaderSpec(
ResourceLoaders.createJarResourceLoader(driverFile.getName(), new JarFile(driverFile))
)
);
}
}
}
}
private static final BootstrapLogger LOG = BootstrapLogger.logger("org.wildfly.swarm.modules.application");
}
| |
/**
* Copyright 2007-2015, Kaazing Corporation. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* Copyright 2012 The Netty Project
*
* The Netty Project licenses this file to you under the Apache License,
* version 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package org.jboss.netty.channel.socket;
import static org.jboss.netty.channel.Channels.future;
import static org.jboss.netty.util.TestUtil.getLocalHost;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import java.io.IOException;
import java.net.InetSocketAddress;
import java.util.Random;
import java.util.concurrent.Executor;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicReference;
import org.jboss.netty.bootstrap.ClientBootstrap;
import org.jboss.netty.bootstrap.ServerBootstrap;
import org.jboss.netty.buffer.ChannelBuffer;
import org.jboss.netty.buffer.ChannelBuffers;
import org.jboss.netty.channel.Channel;
import org.jboss.netty.channel.ChannelFactory;
import org.jboss.netty.channel.ChannelFuture;
import org.jboss.netty.channel.ChannelHandlerContext;
import org.jboss.netty.channel.ChannelStateEvent;
import org.jboss.netty.channel.ExceptionEvent;
import org.jboss.netty.channel.MessageEvent;
import org.jboss.netty.channel.SimpleChannelUpstreamHandler;
import org.jboss.netty.util.internal.ExecutorUtil;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
public abstract class AbstractSocketEchoTest {
private static final Random random = new Random();
static final byte[] data = new byte[1048576];
private static ExecutorService executor;
static {
random.nextBytes(data);
}
@BeforeClass
public static void init() {
executor = Executors.newCachedThreadPool();
}
@AfterClass
public static void destroy() {
ExecutorUtil.terminate(executor);
}
protected abstract ChannelFactory newServerSocketChannelFactory(Executor executor);
protected abstract ChannelFactory newClientSocketChannelFactory(Executor executor);
@Test(timeout = 5000)
public void testSimpleEcho() throws Throwable {
ServerBootstrap sb = new ServerBootstrap(newServerSocketChannelFactory(executor));
ClientBootstrap cb = new ClientBootstrap(newClientSocketChannelFactory(executor));
EchoHandler sh = new EchoHandler();
EchoHandler ch = new EchoHandler();
try {
sb.getPipeline().addFirst("handler", sh);
cb.getPipeline().addFirst("handler", ch);
Channel sc = sb.bind(new InetSocketAddress(0));
int port = ((InetSocketAddress) sc.getLocalAddress()).getPort();
ChannelFuture ccf = cb.connect(new InetSocketAddress(getLocalHost(), port));
assertTrue(ccf.awaitUninterruptibly().isSuccess());
Channel cc = ccf.getChannel();
for (int i = 0; i < data.length;) {
int length = Math.min(random.nextInt(1024 * 64), data.length - i);
cc.write(ChannelBuffers.wrappedBuffer(data, i, length));
i += length;
}
while (ch.counter.get() < data.length) {
if (sh.exception.get() != null) {
break;
}
if (ch.exception.get() != null) {
break;
}
try {
Thread.sleep(10);
} catch (InterruptedException e) {
// Ignore.
}
}
while (sh.counter.get() < data.length) {
if (sh.exception.get() != null) {
break;
}
if (ch.exception.get() != null) {
break;
}
try {
Thread.sleep(10);
} catch (InterruptedException e) {
// Ignore.
}
}
sh.channel.close().awaitUninterruptibly();
ch.channel.close().awaitUninterruptibly();
sc.close().awaitUninterruptibly();
ch.expectException(true);
ChannelFuture connectAfterCloseFuture = cb.connect(new InetSocketAddress(getLocalHost(), port));
assertTrue(!connectAfterCloseFuture.awaitUninterruptibly().isSuccess());
assertTrue(ch.caughtExceptionFuture.awaitUninterruptibly().isSuccess());
ch.expectException(false);
} finally {
cb.shutdown();
sb.shutdown();
cb.releaseExternalResources();
sb.releaseExternalResources();
}
if (sh.exception.get() != null && !(sh.exception.get() instanceof IOException)) {
throw sh.exception.get();
}
if (ch.exception.get() != null && !(ch.exception.get() instanceof IOException)) {
throw ch.exception.get();
}
if (sh.exception.get() != null) {
throw sh.exception.get();
}
if (ch.exception.get() != null) {
throw ch.exception.get();
}
}
private static class EchoHandler extends SimpleChannelUpstreamHandler {
volatile Channel channel;
final AtomicReference<Throwable> exception = new AtomicReference<Throwable>();
final AtomicInteger counter = new AtomicInteger();
volatile boolean expectException;
ChannelFuture caughtExceptionFuture;
EchoHandler() {
}
@Override
public void channelOpen(ChannelHandlerContext ctx, ChannelStateEvent e)
throws Exception {
channel = e.getChannel();
}
@Override
public void messageReceived(ChannelHandlerContext ctx, MessageEvent e)
throws Exception {
ChannelBuffer m = (ChannelBuffer) e.getMessage();
byte[] actual = new byte[m.readableBytes()];
m.getBytes(0, actual);
int lastIdx = counter.get();
for (int i = 0; i < actual.length; i ++) {
assertEquals(data[i + lastIdx], actual[i]);
}
if (channel.getParent() != null) {
channel.write(m);
}
counter.addAndGet(actual.length);
}
@Override
public void exceptionCaught(ChannelHandlerContext ctx, ExceptionEvent e)
throws Exception {
if (expectException) {
caughtExceptionFuture.setSuccess();
} else if (exception.compareAndSet(null, e.getCause())) {
e.getChannel().close();
}
}
public void expectException(boolean on) {
expectException = on;
if (on) {
caughtExceptionFuture = future(channel);
}
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.sysml.test.integration.functions.binary.matrix;
import org.junit.Test;
import org.apache.sysml.api.DMLException;
import org.apache.sysml.runtime.functionobjects.Modulus;
import org.apache.sysml.test.integration.AutomatedTestBase;
import org.apache.sysml.test.integration.TestConfiguration;
public class ElementwiseModulusTest extends AutomatedTestBase
{
private final static String TEST_DIR = "functions/binary/matrix/";
private final static String TEST_CLASS_DIR = TEST_DIR + ElementwiseModulusTest.class.getSimpleName() + "/";
@Override
public void setUp() {
// positive tests
addTestConfiguration("DenseTest",
new TestConfiguration(TEST_CLASS_DIR, "ElementwiseModulusTest", new String[] { "c" }));
addTestConfiguration("SparseTest",
new TestConfiguration(TEST_CLASS_DIR, "ElementwiseModulusTest", new String[] { "c" }));
addTestConfiguration("EmptyTest",
new TestConfiguration(TEST_CLASS_DIR, "ElementwiseModulusTest", new String[] { "c" }));
addTestConfiguration("WrongDimensionLessRowsTest",
new TestConfiguration(TEST_CLASS_DIR, "ElementwiseModulusVariableDimensionsTest", new String[] { "c" }));
addTestConfiguration("WrongDimensionMoreRowsTest",
new TestConfiguration(TEST_CLASS_DIR, "ElementwiseModulusVariableDimensionsTest", new String[] { "c" }));
addTestConfiguration("WrongDimensionLessColsTest",
new TestConfiguration(TEST_CLASS_DIR, "ElementwiseModulusVariableDimensionsTest", new String[] { "c" }));
addTestConfiguration("WrongDimensionMoreColsTest",
new TestConfiguration(TEST_CLASS_DIR, "ElementwiseModulusVariableDimensionsTest", new String[] { "c" }));
addTestConfiguration("WrongDimensionLessRowsLessColsTest",
new TestConfiguration(TEST_CLASS_DIR, "ElementwiseModulusVariableDimensionsTest", new String[] { "c" }));
addTestConfiguration("WrongDimensionMoreRowsMoreColsTest",
new TestConfiguration(TEST_CLASS_DIR, "ElementwiseModulusVariableDimensionsTest", new String[] { "c" }));
addTestConfiguration("WrongDimensionLessRowsMoreColsTest",
new TestConfiguration(TEST_CLASS_DIR, "ElementwiseModulusVariableDimensionsTest", new String[] { "c" }));
addTestConfiguration("WrongDimensionMoreRowsLessColsTest",
new TestConfiguration(TEST_CLASS_DIR, "ElementwiseModulusVariableDimensionsTest", new String[] { "c" }));
addTestConfiguration("DivisionByZeroTest",
new TestConfiguration(TEST_CLASS_DIR, "ElementwiseModulusTest", new String[] { "c" }));
// negative tests
}
@Test
public void testDense() {
int rows = 10;
int cols = 10;
TestConfiguration config = availableTestConfigurations.get("DenseTest");
config.addVariable("rows", rows);
config.addVariable("cols", cols);
loadTestConfiguration(config);
double[][] a = getRandomMatrix(rows, cols, -5, 5, 1, -1);
double[][] b = getNonZeroRandomMatrix(rows, cols, -20, 20, -1);
double[][] c = new double[rows][cols];
Modulus fnmod = Modulus.getModulusFnObject();
for(int i = 0; i < rows; i++) {
for(int j = 0; j < cols; j++) {
c[i][j] = fnmod.execute(a[i][j], b[i][j]);
}
}
writeInputMatrix("a", a);
writeInputMatrix("b", b);
writeExpectedMatrix("c", c);
runTest();
compareResults();
}
@Test
public void testSparse() {
int rows = 50;
int cols = 50;
TestConfiguration config = availableTestConfigurations.get("SparseTest");
config.addVariable("rows", rows);
config.addVariable("cols", cols);
loadTestConfiguration(config);
double[][] a = getRandomMatrix(rows, cols, -5, 5, 0.05, -1);
double[][] b = getNonZeroRandomMatrix(rows, cols, -20, 20, -1);
double[][] c = new double[rows][cols];
Modulus fnmod = Modulus.getModulusFnObject();
for(int i = 0; i < rows; i++) {
for(int j = 0; j < cols; j++) {
c[i][j] = fnmod.execute(a[i][j], b[i][j]);
}
}
writeInputMatrix("a", a);
writeInputMatrix("b", b);
writeExpectedMatrix("c", c);
runTest();
compareResults();
}
@Test
public void testWrongDimensionsLessRows() {
int rows1 = 8;
int cols1 = 10;
int rows2 = 10;
int cols2 = 10;
TestConfiguration config = availableTestConfigurations.get("WrongDimensionLessRowsTest");
config.addVariable("rows1", rows1);
config.addVariable("cols1", cols1);
config.addVariable("rows2", rows2);
config.addVariable("cols2", cols2);
loadTestConfiguration(config);
runTest(true, DMLException.class);
}
@Test
public void testWrongDimensionsMoreRows() {
int rows1 = 12;
int cols1 = 10;
int rows2 = 10;
int cols2 = 10;
TestConfiguration config = availableTestConfigurations.get("WrongDimensionMoreRowsTest");
config.addVariable("rows1", rows1);
config.addVariable("cols1", cols1);
config.addVariable("rows2", rows2);
config.addVariable("cols2", cols2);
loadTestConfiguration(config);
runTest(true, DMLException.class);
}
@Test
public void testWrongDimensionsLessCols() {
int rows1 = 10;
int cols1 = 8;
int rows2 = 10;
int cols2 = 10;
TestConfiguration config = availableTestConfigurations.get("WrongDimensionLessColsTest");
config.addVariable("rows1", rows1);
config.addVariable("cols1", cols1);
config.addVariable("rows2", rows2);
config.addVariable("cols2", cols2);
loadTestConfiguration(config);
runTest(true, DMLException.class);
}
@Test
public void testWrongDimensionsMoreCols() {
int rows1 = 10;
int cols1 = 12;
int rows2 = 10;
int cols2 = 10;
TestConfiguration config = availableTestConfigurations.get("WrongDimensionMoreColsTest");
config.addVariable("rows1", rows1);
config.addVariable("cols1", cols1);
config.addVariable("rows2", rows2);
config.addVariable("cols2", cols2);
loadTestConfiguration(config);
runTest(true, DMLException.class);
}
@Test
public void testWrongDimensionsLessRowsLessCols() {
int rows1 = 8;
int cols1 = 8;
int rows2 = 10;
int cols2 = 10;
TestConfiguration config = availableTestConfigurations.get("WrongDimensionLessRowsLessColsTest");
config.addVariable("rows1", rows1);
config.addVariable("cols1", cols1);
config.addVariable("rows2", rows2);
config.addVariable("cols2", cols2);
loadTestConfiguration(config);
runTest(true, DMLException.class);
}
@Test
public void testWrongDimensionsMoreRowsMoreCols() {
int rows1 = 12;
int cols1 = 12;
int rows2 = 10;
int cols2 = 10;
TestConfiguration config = availableTestConfigurations.get("WrongDimensionMoreRowsMoreColsTest");
config.addVariable("rows1", rows1);
config.addVariable("cols1", cols1);
config.addVariable("rows2", rows2);
config.addVariable("cols2", cols2);
loadTestConfiguration(config);
runTest(true, DMLException.class);
}
@Test
public void testWrongDimensionsLessRowsMoreCols() {
int rows1 = 8;
int cols1 = 12;
int rows2 = 10;
int cols2 = 10;
TestConfiguration config = availableTestConfigurations.get("WrongDimensionLessRowsMoreColsTest");
config.addVariable("rows1", rows1);
config.addVariable("cols1", cols1);
config.addVariable("rows2", rows2);
config.addVariable("cols2", cols2);
loadTestConfiguration(config);
runTest(true, DMLException.class);
}
@Test
public void testWrongDimensionsMoreRowsLessCols() {
int rows1 = 12;
int cols1 = 8;
int rows2 = 10;
int cols2 = 10;
TestConfiguration config = availableTestConfigurations.get("WrongDimensionMoreRowsLessColsTest");
config.addVariable("rows1", rows1);
config.addVariable("cols1", cols1);
config.addVariable("rows2", rows2);
config.addVariable("cols2", cols2);
loadTestConfiguration(config);
runTest(true, DMLException.class);
}
@Test
public void testDivisionByZero() {
int rows = 10;
int cols = 10;
TestConfiguration config = availableTestConfigurations.get("DivisionByZeroTest");
config.addVariable("rows", rows);
config.addVariable("cols", cols);
loadTestConfiguration(config);
double[][] a = getRandomMatrix(rows, cols, -1, 1, 0.5, -1);
double[][] b = getRandomMatrix(rows, cols, -1, 1, 0.5, -1);
double[][] c = new double[rows][cols];
Modulus fnmod = Modulus.getModulusFnObject();
for(int i = 0; i < rows; i++) {
for(int j = 0; j < cols; j++) {
c[i][j] = fnmod.execute(a[i][j], b[i][j]);
}
}
writeInputMatrix("a", a);
writeInputMatrix("b", b);
writeExpectedMatrix("c", c);
runTest();
compareResults();
}
}
| |
package com.strobel.assembler.metadata;
import com.strobel.compilerservices.RuntimeHelpers;
import org.junit.Test;
import static com.strobel.assembler.metadata.MetadataHelper.isAssignableFrom;
import static com.strobel.core.CollectionUtilities.single;
import static java.lang.String.format;
import static org.junit.Assert.*;
@SuppressWarnings("UnusedDeclaration")
public class MetadataHelperTests {
static {
RuntimeHelpers.ensureClassInitialized(MetadataSystem.class);
}
private static final boolean[][] IS_ASSIGNABLE_BIT_SET = {
{ true, true, false, true, true, true, true }, // byte
{ false, true, false, true, true, true, true }, // short
{ false, false, true, true, true, true, true }, // char
{ false, false, false, true, true, true, true }, // int
{ false, false, false, false, true, true, true }, // long
{ false, false, false, false, false, true, true }, // float
{ false, false, false, false, false, false, true }, // double
};
private static TypeReference string() {
return MetadataSystem.instance().lookupTypeCore("java/lang/String");
}
private static TypeReference charSequence() {
return MetadataSystem.instance().lookupTypeCore("java/lang/CharSequence");
}
private static TypeReference integer() {
return MetadataSystem.instance().lookupTypeCore("java/lang/Integer");
}
private static TypeReference list() {
return MetadataSystem.instance().lookupTypeCore("java/util/List");
}
private static TypeReference arrayList() {
return MetadataSystem.instance().lookupTypeCore("java/util/ArrayList");
}
private static TypeReference iterable() {
return MetadataSystem.instance().lookupTypeCore("java/lang/Iterable");
}
private static void assertSameType(final TypeReference expected, final TypeReference actual) {
if (MetadataHelper.isSameType(expected, actual, true)) {
return;
}
fail(
format(
"Type comparison failed!%nExpected: %s%n Actual: %s",
expected != null ? expected.getSignature() : null,
actual != null ? actual.getSignature() : null
)
);
}
@Test
public void testIsAssignableBetweenPrimitives() throws Throwable {
final JvmType[] jvmTypes = JvmType.values();
final TypeReference[] primitiveTypes = {
BuiltinTypes.Byte,
BuiltinTypes.Short,
BuiltinTypes.Character,
BuiltinTypes.Integer,
BuiltinTypes.Long,
BuiltinTypes.Float,
BuiltinTypes.Double,
};
for (int i = 0, n = IS_ASSIGNABLE_BIT_SET.length; i < n; i++) {
for (int j = 0; j < n; j++) {
assertEquals(
format(
"%s (assignable from) %s = %s",
primitiveTypes[i],
primitiveTypes[j],
IS_ASSIGNABLE_BIT_SET[j][i]
),
MetadataHelper.isAssignableFrom(
primitiveTypes[i],
primitiveTypes[j]
),
IS_ASSIGNABLE_BIT_SET[j][i]
);
assertEquals(
format(
"%s (conversion from) %s = %s",
primitiveTypes[i],
primitiveTypes[j],
IS_ASSIGNABLE_BIT_SET[j][i] ? (i == j ? ConversionType.IDENTITY : ConversionType.IMPLICIT)
: ConversionType.EXPLICIT
),
MetadataHelper.getConversionType(
primitiveTypes[i],
primitiveTypes[j]
),
IS_ASSIGNABLE_BIT_SET[j][i] ? (i == j ? ConversionType.IDENTITY : ConversionType.IMPLICIT)
: ConversionType.EXPLICIT
);
}
}
}
@Test
public void testIsSameTypeWithSimpleGenerics() throws Throwable {
/*
final TypeReference arrayList = arrayList();
final TypeReference rawArrayList = new RawType(arrayList());
final TypeReference genericArrayList = arrayList().makeGenericType(string());
assertTrue(isSameType(rawArrayList, genericArrayList, false));
assertTrue(isSameType(genericArrayList, rawArrayList, false));
assertFalse(isSameType(rawArrayList, genericArrayList, true));
assertFalse(isSameType(genericArrayList, rawArrayList, true));
assertTrue(isSameType(arrayList, arrayList, false));
assertTrue(isSameType(rawArrayList, rawArrayList, false));
assertTrue(isSameType(genericArrayList, genericArrayList, false));
assertTrue(isSameType(arrayList, arrayList, true));
assertTrue(isSameType(rawArrayList, rawArrayList, true));
assertTrue(isSameType(genericArrayList, genericArrayList, true));
assertFalse(isSameType(arrayList, rawArrayList, false));
assertFalse(isSameType(arrayList, genericArrayList, false));
assertFalse(isSameType(rawArrayList, arrayList, false));
assertFalse(isSameType(genericArrayList, arrayList, false));
assertFalse(isSameType(arrayList, rawArrayList, true));
assertFalse(isSameType(arrayList, genericArrayList, true));
assertFalse(isSameType(rawArrayList, arrayList, true));
assertFalse(isSameType(genericArrayList, arrayList, true));
*/
}
@Test
public void testAsSuperWithSimpleGenerics() throws Throwable {
final TypeReference arrayList = arrayList();
final TypeReference rawArrayList = new RawType(arrayList());
final TypeReference genericArrayList = arrayList().makeGenericType(string());
final TypeReference iterable = iterable();
final TypeReference rawIterable = new RawType(iterable());
final TypeReference genericIterable = iterable().makeGenericType(string());
final TypeReference wildIterable = iterable().makeGenericType(WildcardType.unbounded());
final TypeReference t1 = MetadataHelper.asSuper(genericIterable, arrayList);
final TypeReference t2 = MetadataHelper.asSuper(genericIterable, genericArrayList);
final TypeReference t3 = MetadataHelper.asSuper(genericIterable, rawArrayList);
final TypeReference t4 = MetadataHelper.asSuper(iterable, arrayList);
final TypeReference t5 = MetadataHelper.asSuper(iterable, genericArrayList);
final TypeReference t6 = MetadataHelper.asSuper(iterable, rawArrayList);
final TypeReference t7 = MetadataHelper.asSuper(rawIterable, arrayList);
final TypeReference t8 = MetadataHelper.asSuper(rawIterable, genericArrayList);
final TypeReference t9 = MetadataHelper.asSuper(rawIterable, rawArrayList);
assertSameType(iterable.makeGenericType(single(arrayList.getGenericParameters())), t1);
assertSameType(genericIterable, t2);
assertSameType(rawIterable, t3);
assertSameType(iterable.makeGenericType(single(arrayList.getGenericParameters())), t4);
assertSameType(genericIterable, t5);
assertSameType(rawIterable, t6);
assertSameType(iterable.makeGenericType(single(arrayList.getGenericParameters())), t7);
assertSameType(genericIterable, t8);
assertSameType(rawIterable, t9);
}
@Test
public void testAsSuperWithWildcards() throws Throwable {
final TypeReference arrayList = arrayList();
final TypeReference rawArrayList = new RawType(arrayList());
final TypeReference genericArrayList = arrayList().makeGenericType(string());
final TypeReference wildArrayList = arrayList().makeGenericType(WildcardType.unbounded());
final TypeReference iterable = iterable();
final TypeReference rawIterable = new RawType(iterable());
final TypeReference genericIterable = iterable().makeGenericType(string());
final TypeReference wildIterable = iterable().makeGenericType(WildcardType.unbounded());
final TypeReference iterableOfE = iterable.makeGenericType(arrayList.getGenericParameters());
final TypeReference t1 = MetadataHelper.asSuper(rawIterable, wildIterable);
final TypeReference t2 = MetadataHelper.asSuper(rawIterable, wildArrayList);
final TypeReference t3 = MetadataHelper.asSuper(wildIterable, iterable);
final TypeReference t4 = MetadataHelper.asSuper(wildIterable, rawIterable);
final TypeReference t5 = MetadataHelper.asSuper(wildIterable, genericIterable);
final TypeReference t6 = MetadataHelper.asSuper(wildIterable, arrayList);
final TypeReference t7 = MetadataHelper.asSuper(wildIterable, rawArrayList);
final TypeReference t8 = MetadataHelper.asSuper(wildIterable, genericArrayList);
final TypeReference t9 = MetadataHelper.asSuper(wildIterable, wildArrayList);
assertSameType(wildIterable, t1);
assertSameType(wildIterable, t2);
assertSameType(iterable, t3);
assertSameType(rawIterable, t4);
assertSameType(genericIterable, t5);
assertSameType(iterableOfE, t6);
assertSameType(rawIterable, t7);
assertSameType(genericIterable, t8);
assertSameType(wildIterable, t9);
}
@Test
public void testIsAssignableWithWildcards() throws Throwable {
final TypeReference arrayList = arrayList();
final TypeReference rawArrayList = new RawType(arrayList());
final TypeReference genericArrayList = arrayList().makeGenericType(string());
final TypeReference wildArrayList = arrayList().makeGenericType(WildcardType.unbounded());
final TypeReference iterable = iterable();
final TypeReference rawIterable = new RawType(iterable());
final TypeReference genericIterable = iterable().makeGenericType(string());
final TypeReference wildIterable = iterable().makeGenericType(WildcardType.unbounded());
final TypeReference t1 = MetadataHelper.asSuper(rawIterable, wildIterable);
final TypeReference t2 = MetadataHelper.asSuper(rawIterable, wildArrayList);
final TypeReference t3 = MetadataHelper.asSuper(wildIterable, iterable);
final TypeReference t4 = MetadataHelper.asSuper(wildIterable, rawIterable);
final TypeReference t5 = MetadataHelper.asSuper(wildIterable, genericIterable);
final TypeReference t6 = MetadataHelper.asSuper(wildIterable, arrayList);
final TypeReference t7 = MetadataHelper.asSuper(wildIterable, rawArrayList);
final TypeReference t8 = MetadataHelper.asSuper(wildIterable, genericArrayList);
final TypeReference t9 = MetadataHelper.asSuper(wildIterable, wildArrayList);
assertTrue(isAssignableFrom(rawIterable, iterable));
assertTrue(isAssignableFrom(rawIterable, rawIterable));
assertTrue(isAssignableFrom(rawIterable, genericIterable));
assertTrue(isAssignableFrom(rawIterable, wildIterable));
assertTrue(isAssignableFrom(rawIterable, arrayList));
assertTrue(isAssignableFrom(rawIterable, rawArrayList));
assertTrue(isAssignableFrom(rawIterable, genericArrayList));
assertTrue(isAssignableFrom(rawIterable, wildArrayList));
assertTrue(isAssignableFrom(wildIterable, iterable));
assertTrue(isAssignableFrom(wildIterable, rawIterable));
assertTrue(isAssignableFrom(wildIterable, genericIterable));
assertTrue(isAssignableFrom(wildIterable, wildIterable));
assertTrue(isAssignableFrom(wildIterable, arrayList));
assertTrue(isAssignableFrom(wildIterable, rawArrayList));
assertTrue(isAssignableFrom(wildIterable, genericArrayList));
assertTrue(isAssignableFrom(wildIterable, wildArrayList));
}
@Test
public void testAsSubTypeWithSimpleGenerics() throws Throwable {
final TypeReference arrayList = arrayList();
final TypeReference rawArrayList = new RawType(arrayList());
final TypeReference genericArrayList = arrayList().makeGenericType(string());
final TypeReference iterable = iterable();
final TypeReference rawIterable = new RawType(iterable());
final TypeReference genericIterable = iterable().makeGenericType(string());
final TypeReference t1 = MetadataHelper.asSubType(arrayList, genericIterable);
final TypeReference t2 = MetadataHelper.asSubType(genericArrayList, genericIterable);
final TypeReference t3 = MetadataHelper.asSubType(rawArrayList, genericIterable);
final TypeReference t4 = MetadataHelper.asSubType(arrayList, iterable);
final TypeReference t5 = MetadataHelper.asSubType(genericArrayList, iterable);
final TypeReference t6 = MetadataHelper.asSubType(rawArrayList, iterable);
final TypeReference t7 = MetadataHelper.asSubType(arrayList, rawIterable);
final TypeReference t8 = MetadataHelper.asSubType(genericArrayList, rawIterable);
final TypeReference t9 = MetadataHelper.asSubType(rawArrayList, rawIterable);
assertSameType(genericArrayList, t1);
assertSameType(genericArrayList, t2);
assertSameType(genericArrayList, t3);
assertSameType(arrayList.makeGenericType(single(iterable.getGenericParameters())), t4);
assertSameType(genericArrayList, t5);
assertSameType(arrayList.makeGenericType(single(iterable.getGenericParameters())), t6);
assertSameType(rawArrayList, t7);
assertSameType(genericArrayList, t8);
assertSameType(rawArrayList, t9);
}
}
| |
// Copyright 2015 Cloudera Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.cloudera.recordservice.tests;
import java.io.File;
import java.io.IOException;
import java.lang.reflect.Field;
import java.net.MalformedURLException;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.commons.lang.NotImplementedException;
import org.apache.hadoop.mapred.JobClient;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.RunningJob;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.json.JSONException;
/**
* This class controls the cluster during tests, both the minicluster and a real
* cluster. To control the minicluster commands are run through the
* MiniClusterController class but using this class as an api allows the tests
* to be cluster agnostic.
*/
public class ClusterController {
public static final int DEFAULT_NUM_NODES = 3;
public static final String CM_USER_NAME = "admin";
public static final String CM_PASSWORD = "admin";
private static final Logger LOGGER = LoggerFactory.getLogger(ClusterController.class);
public static ClusterController cluster_;
public final boolean USE_MINI_CLUSTER;
public final String RECORD_SERVICE_PLANNER_HOST;
public ClusterConfiguration clusterConfiguration_;
public List clusterList_;
public List activeNodes_;
public List availableNodes_;
public String HADOOP_CONF_DIR;
/**
* If a miniCluster is being used, this class simply instantiates a
* MiniClusterController.
*
* If a real cluster is being used, this class gets the necessary
* configuration files via the CM api. The HADOOP_CONF_DIR and
* RECORD_SERVICE_PLANNER_HOSTS environment variables are set. These only
* apply within the JVM.
*
* Variables: boolean miniCluster: if true, use miniCluster String hostname:
* the hostname of a CM enabled machine in a cluster
*
* TODO: Future work involves doing the necessary steps to ensure that a
* cluster is healthy and ready for RecordService jobs to be executed.
*/
public ClusterController(boolean miniCluster, String hostname) {
USE_MINI_CLUSTER = miniCluster;
RECORD_SERVICE_PLANNER_HOST = hostname;
try {
if (USE_MINI_CLUSTER) {
cluster_ = MiniClusterController.instance();
HADOOP_CONF_DIR = System.getenv("HADOOP_CONF_DIR");
} else {
clusterConfiguration_ = new ClusterConfiguration(hostname, CM_USER_NAME,
CM_PASSWORD);
Map<String, String> envMap = new HashMap<String, String>();
HADOOP_CONF_DIR = clusterConfiguration_.getHadoopConfDir();
String SERVER_HOME = System.getenv("SERVER_HOME");
String RECORD_SERVICE_HOME = System.getenv("RECORD_SERVICE_HOME");
envMap.put("HADOOP_CONF_DIR", HADOOP_CONF_DIR);
envMap.put("RECORD_SERVICE_PLANNER_HOST", RECORD_SERVICE_PLANNER_HOST);
envMap.put("RECORD_SERVICE_HOME", RECORD_SERVICE_HOME);
envMap.put("SERVER_HOME", SERVER_HOME);
envMap.put("HADOOP_HOME", System.getenv("HADOOP_HOME"));
// Add these two additional system variables to the JVM environment.
// Hadoop and RecordService rely on these variables to execute on a
// cluster.
setEnv(envMap);
LOGGER.debug("HADOOP_CONF_DIR: " + System.getenv("HADOOP_CONF_DIR"));
LOGGER.debug("HADOOP_HOME: " + System.getenv("HADOOP_HOME"));
cluster_ = this;
}
} catch (MalformedURLException e) {
LOGGER.debug("Error getting cluster configuration", e);
System.exit(1);
} catch (JSONException e) {
LOGGER.debug("Error getting cluster configuration", e);
System.exit(1);
} catch (IOException e) {
LOGGER.debug("Error getting cluster configuration", e);
System.exit(1);
}
}
/**
* This method runs the given job as specified in the JobConf on the cluster
*/
public RunningJob runJob(JobConf mrJob) throws IOException {
return JobClient.runJob(mrJob);
}
/**
* This method adds a node to the cluster. In the case of the minicluster this
* is a very straightforward procedure, a recordserviced is brought up.
*
* TODO: Future work is required to make this method work on a real cluster.
* In that case this method would add a node that was already in the cluster
* but had previously been disabled. If there were no such node, this method
* would not do anything.
*/
public void addNode() {
throw new NotImplementedException();
}
/**
* This method returns a JobConf object that allows a map reduce job to be run
* on the cluster
*/
public JobConf getJobConf() throws MalformedURLException {
JobConf conf = new JobConf();
populateJobConf(conf);
return conf;
}
/**
* This method populates a JobConf with the information in the HadoopConfDir
*/
public JobConf populateJobConf(JobConf conf) throws MalformedURLException {
File[] files = new File(clusterConfiguration_.getHadoopConfDir()).listFiles();
for (File file : files) {
if (file.getName().endsWith(".xml")) {
conf.addResource(file.getAbsoluteFile().toURI().toURL());
}
}
String[] bs = clusterConfiguration_.getHadoopConfDir().split("/");
String newPath = "/";
for (int i = 0; i < bs.length - 1; i++) {
newPath += bs[i] + "/";
}
newPath += "recordservice-conf/recordservice-site.xml";
conf.addResource(new File(newPath).getAbsoluteFile().toURI().toURL());
return conf;
}
/**
* This method allows the caller to add environment variables to the JVM.
* There is no easy way to do this through a simple call, such as there is to
* read env variables using System.getEnv(variableName). Much of the method
* was written with guidance from stack overflow:
* http://stackoverflow.com/questions
* /318239/how-do-i-set-environment-variables-from-java
*/
protected static void setEnv(Map<String, String> newenv) {
try {
Class<?> processEnvironmentClass = Class.forName("java.lang.ProcessEnvironment");
Field theEnvironmentField = processEnvironmentClass
.getDeclaredField("theEnvironment");
theEnvironmentField.setAccessible(true);
Map<String, String> env = (Map<String, String>) theEnvironmentField.get(null);
env.putAll(newenv);
Field theCaseInsensitiveEnvironmentField = processEnvironmentClass
.getDeclaredField("theCaseInsensitiveEnvironment");
theCaseInsensitiveEnvironmentField.setAccessible(true);
Map<String, String> cienv = (Map<String, String>) theCaseInsensitiveEnvironmentField
.get(null);
cienv.putAll(newenv);
} catch (NoSuchFieldException e) {
try {
Class[] classes = Collections.class.getDeclaredClasses();
Map<String, String> env = System.getenv();
for (Class cl : classes) {
if ("java.util.Collections$UnmodifiableMap".equals(cl.getName())) {
Field field = cl.getDeclaredField("m");
field.setAccessible(true);
Object obj = field.get(env);
Map<String, String> map = (Map<String, String>) obj;
map.clear();
map.putAll(newenv);
}
}
} catch (Exception e2) {
e2.printStackTrace();
}
} catch (Exception e1) {
e1.printStackTrace();
}
}
/**
* This class represents a node in a cluster. It contains basic information
* such as hostname and open ports.
*/
public static class ClusterNode {
public String hostname_;
public int workerPort_;
public int plannerPort_;
public int webserverPort_;
public ClusterNode(String hostname) {
hostname_ = hostname;
}
public ClusterNode(String hostname, int workerPort, int plannerPort, int webserverPort) {
hostname_ = hostname;
workerPort_ = workerPort;
plannerPort_ = plannerPort;
webserverPort_ = webserverPort;
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.zookeeper;
import org.apache.camel.Exchange;
import org.apache.camel.Message;
import org.apache.camel.component.zookeeper.operations.CreateOperation;
import org.apache.camel.component.zookeeper.operations.DeleteOperation;
import org.apache.camel.component.zookeeper.operations.GetChildrenOperation;
import org.apache.camel.component.zookeeper.operations.OperationResult;
import org.apache.camel.component.zookeeper.operations.SetDataOperation;
import org.apache.camel.support.DefaultProducer;
import org.apache.camel.support.ExchangeHelper;
import org.apache.zookeeper.AsyncCallback.StatCallback;
import org.apache.zookeeper.AsyncCallback.VoidCallback;
import org.apache.zookeeper.CreateMode;
import org.apache.zookeeper.KeeperException.Code;
import org.apache.zookeeper.ZooKeeper;
import org.apache.zookeeper.data.Stat;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static java.lang.String.format;
import static org.apache.camel.component.zookeeper.ZooKeeperUtils.getAclListFromMessage;
import static org.apache.camel.component.zookeeper.ZooKeeperUtils.getCreateMode;
import static org.apache.camel.component.zookeeper.ZooKeeperUtils.getCreateModeFromString;
import static org.apache.camel.component.zookeeper.ZooKeeperUtils.getNodeFromMessage;
import static org.apache.camel.component.zookeeper.ZooKeeperUtils.getPayloadFromExchange;
import static org.apache.camel.component.zookeeper.ZooKeeperUtils.getVersionFromMessage;
/**
* <code>ZooKeeperProducer</code> attempts to set the content of nodes in the {@link ZooKeeper} cluster with the
* payloads of the of the exchanges it receives.
*/
@SuppressWarnings("rawtypes")
public class ZooKeeperProducer extends DefaultProducer {
public static final String ZK_OPERATION_WRITE = "WRITE";
public static final String ZK_OPERATION_DELETE = "DELETE";
private static final Logger LOG = LoggerFactory.getLogger(ZooKeeperProducer.class);
private final ZooKeeperConfiguration configuration;
private ZooKeeperConnectionManager zkm;
private ZooKeeper connection;
public ZooKeeperProducer(ZooKeeperEndpoint endpoint) {
super(endpoint);
this.configuration = endpoint.getConfiguration();
this.zkm = endpoint.getConnectionManager();
}
@Override
public void process(Exchange exchange) throws Exception {
if (connection == null) {
connection = this.zkm.getConnection();
}
ProductionContext context = new ProductionContext(connection, exchange);
String operation = exchange.getIn().getHeader(ZooKeeperMessage.ZOOKEEPER_OPERATION, String.class);
boolean isDelete = ZK_OPERATION_DELETE.equals(operation);
if (ExchangeHelper.isOutCapable(exchange)) {
if (isDelete) {
if (LOG.isDebugEnabled()) {
LOG.debug(format("Deleting znode '%s', waiting for confirmation", context.node));
}
OperationResult result = synchronouslyDelete(context);
if (configuration.isListChildren()) {
result = listChildren(context);
}
updateExchangeWithResult(context, result);
} else {
if (LOG.isDebugEnabled()) {
LOG.debug(format("Storing data to znode '%s', waiting for confirmation", context.node));
}
OperationResult result = synchronouslySetData(context);
if (configuration.isListChildren()) {
result = listChildren(context);
}
updateExchangeWithResult(context, result);
}
} else {
if (isDelete) {
asynchronouslyDeleteNode(connection, context);
} else {
asynchronouslySetDataOnNode(connection, context);
}
}
}
@Override
protected void doStart() throws Exception {
connection = zkm.getConnection();
if (LOG.isTraceEnabled()) {
LOG.trace(String.format("Starting zookeeper producer of '%s'", configuration.getPath()));
}
}
@Override
protected void doStop() throws Exception {
super.doStop();
if (LOG.isTraceEnabled()) {
LOG.trace(String.format("Shutting down zookeeper producer of '%s'", configuration.getPath()));
}
zkm.shutdown();
}
private void asynchronouslyDeleteNode(ZooKeeper connection, ProductionContext context) {
if (LOG.isDebugEnabled()) {
LOG.debug(format("Deleting node '%s', not waiting for confirmation", context.node));
}
connection.delete(context.node, context.version, new AsyncDeleteCallback(), context);
}
private void asynchronouslySetDataOnNode(ZooKeeper connection, ProductionContext context) {
if (LOG.isDebugEnabled()) {
LOG.debug(format("Storing data to node '%s', not waiting for confirmation", context.node));
}
connection.setData(context.node, context.payload, context.version, new AsyncSetDataCallback(), context);
}
private void updateExchangeWithResult(ProductionContext context, OperationResult result) {
ZooKeeperMessage out = new ZooKeeperMessage(
getEndpoint().getCamelContext(), context.node, result.getStatistics(), context.in.getHeaders());
if (result.isOk()) {
out.setBody(result.getResult());
} else {
context.exchange.setException(result.getException());
}
context.exchange.setMessage(out);
}
private OperationResult listChildren(ProductionContext context) throws Exception {
return new GetChildrenOperation(context.connection, configuration.getPath()).get();
}
/** Simple container to avoid passing all these around as parameters */
private class ProductionContext {
ZooKeeper connection;
Exchange exchange;
Message in;
byte[] payload;
int version;
String node;
ProductionContext(ZooKeeper connection, Exchange exchange) {
this.connection = connection;
this.exchange = exchange;
this.in = exchange.getIn();
this.node = getNodeFromMessage(in, configuration.getPath());
this.version = getVersionFromMessage(in);
this.payload = getPayloadFromExchange(exchange);
}
}
private class AsyncSetDataCallback implements StatCallback {
@Override
public void processResult(int rc, String node, Object ctx, Stat statistics) {
if (Code.NONODE.equals(Code.get(rc))) {
if (configuration.isCreate()) {
LOG.warn(format("Node '%s' did not exist, creating it...", node));
ProductionContext context = (ProductionContext) ctx;
OperationResult<String> result = null;
try {
result = createNode(context);
} catch (Exception e) {
LOG.error(format("Error trying to create node '%s'", node), e);
}
if (result == null || !result.isOk()) {
LOG.error(format("Error creating node '%s'", node), result.getException());
}
}
} else {
logStoreComplete(node, statistics);
}
}
}
private class AsyncDeleteCallback implements VoidCallback {
@Override
public void processResult(int rc, String path, Object ctx) {
if (LOG.isDebugEnabled()) {
if (LOG.isTraceEnabled()) {
LOG.trace(format("Removed data node '%s'", path));
} else {
LOG.debug(format("Removed data node '%s'", path));
}
}
}
}
private OperationResult<String> createNode(ProductionContext ctx) throws Exception {
CreateOperation create = new CreateOperation(ctx.connection, ctx.node);
create.setPermissions(getAclListFromMessage(ctx.exchange.getIn()));
CreateMode mode = null;
String modeString = configuration.getCreateMode();
if (modeString != null) {
try {
mode = getCreateModeFromString(modeString, CreateMode.EPHEMERAL);
} catch (Exception e) {
}
} else {
mode = getCreateMode(ctx.exchange.getIn(), CreateMode.EPHEMERAL);
}
create.setCreateMode(mode == null ? CreateMode.EPHEMERAL : mode);
create.setData(ctx.payload);
return create.get();
}
/**
* Tries to set the data first and if a no node error is received then an attempt will be made to create it instead.
*/
private OperationResult synchronouslySetData(ProductionContext ctx) throws Exception {
SetDataOperation setData = new SetDataOperation(ctx.connection, ctx.node, ctx.payload);
setData.setVersion(ctx.version);
OperationResult result = setData.get();
if (!result.isOk() && configuration.isCreate() && result.failedDueTo(Code.NONODE)) {
LOG.warn(format("Node '%s' did not exist, creating it.", ctx.node));
result = createNode(ctx);
}
return result;
}
private OperationResult synchronouslyDelete(ProductionContext ctx) throws Exception {
DeleteOperation setData = new DeleteOperation(ctx.connection, ctx.node);
setData.setVersion(ctx.version);
OperationResult result = setData.get();
if (!result.isOk() && configuration.isCreate() && result.failedDueTo(Code.NONODE)) {
LOG.warn(format("Node '%s' did not exist, creating it.", ctx.node));
result = createNode(ctx);
}
return result;
}
private void logStoreComplete(String path, Stat statistics) {
if (LOG.isDebugEnabled()) {
if (LOG.isTraceEnabled()) {
LOG.trace(format("Stored data to node '%s', and receive statistics %s", path, statistics));
} else {
LOG.debug(format("Stored data to node '%s'", path));
}
}
}
}
| |
package de.peeeq.wurstscript.intermediateLang.optimizer;
import java.util.ArrayDeque;
import java.util.ArrayList;
import java.util.Deque;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import org.eclipse.jdt.annotation.NonNull;
import org.eclipse.jdt.annotation.Nullable;
import de.peeeq.wurstscript.intermediateLang.optimizer.ControlFlowGraph.Node;
import de.peeeq.wurstscript.jassIm.ImAlloc;
import de.peeeq.wurstscript.jassIm.ImConst;
import de.peeeq.wurstscript.jassIm.ImFunction;
import de.peeeq.wurstscript.jassIm.ImProg;
import de.peeeq.wurstscript.jassIm.ImSet;
import de.peeeq.wurstscript.jassIm.ImStmt;
import de.peeeq.wurstscript.jassIm.ImVar;
import de.peeeq.wurstscript.jassIm.ImVarAccess;
import de.peeeq.wurstscript.translation.imtranslation.ImTranslator;
public class ConstantAndCopyPropagation {
private final ImProg prog;
private final ImTranslator trans;
public ConstantAndCopyPropagation(ImTranslator trans) {
this.prog = trans.getImProg();
this.trans = trans;
}
public void optimize() {
for (ImFunction func : prog.getFunctions()) {
optimizeFunc(func);
}
}
static class Value {
// one of the two is null
final @Nullable ImVar copyVar;
final @Nullable ImConst constantValue;
public Value(ImVar copyVar) {
this.copyVar = copyVar;
this.constantValue = null;
}
public Value(ImConst constantValue) {
this.copyVar = null;
this.constantValue = constantValue;
}
@Override
public boolean equals(@Nullable Object obj) {
if (obj instanceof Value) {
return equalValue((Value) obj);
}
return false;
}
public boolean equalValue(Value other) {
if (copyVar != null && other.copyVar != null) {
return copyVar == other.copyVar;
} else if (constantValue != null && other.constantValue != null) {
return constantValue.equalValue(other.constantValue);
}
return false;
}
@Override
public String toString() {
if (copyVar != null) {
return "copy of " + copyVar;
} else {
return "constant " + constantValue;
}
}
}
static class Knowledge {
Map<ImVar, Value> varKnowledge = new HashMap<>();
Map<ImVar, Value> varKnowledgeOut = new HashMap<>();
@Override
public String toString() {
return "[in =" + varKnowledge + ", out=" + varKnowledgeOut + "]";
}
}
private void optimizeFunc(ImFunction func) {
ControlFlowGraph cfg = new ControlFlowGraph(func.getBody());
Map<Node, Knowledge> knowledge = calculateKnowledge(cfg);
rewriteCode(cfg, knowledge);
}
private void rewriteCode(ControlFlowGraph cfg, Map<Node, Knowledge> knowledge) {
for (Node node : cfg.getNodes()) {
ImStmt stmt = node.getStmt();
if (stmt == null) {
continue;
}
Knowledge kn = knowledge.get(node);
stmt.accept(new ImStmt.DefaultVisitor() {
@Override
public void visit(ImVarAccess va) {
Value val = kn.varKnowledge.get(va.getVar());
if (val == null) {
return;
}
if (val.constantValue != null) {
va.replaceWith(val.constantValue.copy());
} else if (val.copyVar != null) {
va.setVar(val.copyVar);
// recursive call, because maybe it is possible to also replace the new var
visit(va);
}
}
});
}
}
private Map<Node, Knowledge> calculateKnowledge(ControlFlowGraph cfg) {
Map<Node, Knowledge> knowledge = new HashMap<>();
// initialize with empty knowledge:
for (Node n : cfg.getNodes()) {
knowledge.put(n, new Knowledge());
}
Deque<Node> todo = new ArrayDeque<>();
todo.addAll(cfg.getNodes());
while (!todo.isEmpty()) {
Node n = todo.poll();
Knowledge kn = knowledge.get(n);
// get knowledge from predecessor out
HashMap<ImVar, Value> newKnowledge = new HashMap<>();
if (!n.getPredecessors().isEmpty()) {
Node pred1 = n.getPredecessors().get(0);
Map<ImVar, Value> predKnowledgeOut = knowledge.get(pred1).varKnowledgeOut;
for (Entry<ImVar, Value> e : predKnowledgeOut.entrySet()) {
ImVar var = e.getKey();
Value val = e.getValue();
boolean allSame = true;
for (int i = 1; i < n.getPredecessors().size(); i++) {
Node predi = n.getPredecessors().get(i);
Value predi_val = knowledge.get(predi).varKnowledgeOut.get(var);
if (predi_val == null || !predi_val.equalValue(val)) {
allSame = false;
break;
}
}
if (allSame) {
newKnowledge.put(var, val);
}
}
}
// at the output get all from the input knowledge
HashMap<ImVar, Value> newOut = new HashMap<>(newKnowledge);
ImStmt stmt = n.getStmt();
if (stmt instanceof ImSet) {
ImSet imSet = (ImSet) stmt;
ImVar var = imSet.getLeft();
if (!var.isGlobal()) {
Value newValue = null;
if (imSet.getRight() instanceof ImConst) {
ImConst imConst = (ImConst) imSet.getRight();
newValue = new Value(imConst);
} else if (imSet.getRight() instanceof ImVarAccess) {
ImVarAccess imVarAccess = (ImVarAccess) imSet.getRight();
if (!imVarAccess.getVar().isGlobal()) {
newValue = new Value(imVarAccess.getVar());
}
}
if (newValue == null) {
// invalidate old value
newOut.remove(var);
} else {
newOut.put(var, newValue);
}
// invalidate copies of the lhs
// for example:
// x = a; [x->a]
// y = b; [x->a, y->b]
// a = 5; [y->b, a->5] // here [x->a] has been invalidated
Iterator<Entry<ImVar, Value>> it = newOut.entrySet().iterator();
while (it.hasNext()) {
Entry<ImVar,Value> entry = it.next();
if (entry.getValue().equalValue(new Value(var))) {
it.remove();
}
}
}
}
// if there are changes, revisit successors:
if (!kn.varKnowledgeOut.equals(newOut)) {
todo.addAll(n.getSuccessors());
}
// update knowledge
kn.varKnowledge = newKnowledge;
kn.varKnowledgeOut = newOut;
}
return knowledge;
}
}
| |
package server;
import org.json.simple.JSONArray;
import org.json.simple.JSONObject;
import util.PeekableIterator;
import java.util.*;
import java.util.function.Function;
import java.util.stream.Collectors;
@SuppressWarnings("unchecked")
class GameView {
private class PileView {
String topCard;
int cost;
int size;
int embargoTokens;
boolean hasTradeRouteToken;
}
private class OpponentView {
String username;
int handSize;
int drawSize;
int discardSize;
int victoryPoints;
List<Count> durations;
List<Count> inPlay;
}
private static class Count {
String card;
int count;
@Override
public boolean equals(Object other) {
if (!(other instanceof Count)) {
return false;
}
Count otherCount = (Count) other;
return otherCount.card.equals(card)
&& otherCount.count == count;
}
@Override
public int hashCode() {
return Objects.hash(card, count);
}
}
private Map<String, PileView> pileViews;
private Map<String, Boolean> prizeCards;
private List<Count> trash;
private int tradeRoute;
private List<OpponentView> opponentViews;
//String waitingOn; TODO
private int drawSize;
private int discardSize;
private String actions;
private String buys;
private int coins;
private boolean isAutoplayingTreasures;
private int coinTokens;
private int pirateShip;
private int victoryTokens;
private int victoryPoints;
private List<Count> nativeVillage;
private List<Count> island;
private List<Count> durations;
private List<Count> inPlay;
private List<Count> hand;
GameView(Player player, Game game) {
pileViews = new HashMap<>();
for (Card card : game.supply.keySet()) {
PileView pileView = new PileView();
pileView.topCard = card.toString();
pileView.cost = card.cost(game);
pileView.size = game.supply.get(card);
pileView.embargoTokens = game.embargoTokens.get(card);
pileView.hasTradeRouteToken = game.tradeRouteTokenedPiles.contains(card);
pileViews.put(card.toString(), pileView);
}
for (Card card : game.nonSupply.keySet()) {
PileView pileView = new PileView();
pileView.topCard = card.toString();
pileView.cost = card.cost(game);
pileView.size = game.nonSupply.get(card);
pileView.embargoTokens = 0;
pileView.hasTradeRouteToken = false;
pileViews.put(card.toString(), pileView);
}
for (Card.MixedPileId mixedPile : game.mixedPiles.keySet()) {
PileView pileView = new PileView();
List<Card> mixedPileCards = game.mixedPiles.get(mixedPile);
if (mixedPileCards.isEmpty()) {
pileView.topCard = "";
pileView.cost = -1;
} else {
Card topCard = mixedPileCards.get(0);
pileView.topCard = topCard.toString();
pileView.cost = topCard.cost(game);
}
pileView.size = mixedPileCards.size();
pileView.embargoTokens = game.mixedPileEmbargoTokens.get(mixedPile);
pileView.hasTradeRouteToken = false; // mixed piles cannot have trade route tokens (in the current implementation)
pileViews.put(mixedPile.toString(), pileView);
}
if (game.supply.containsKey(Cards.TOURNAMENT)) {
prizeCards = Cards.PRIZE_CARDS.stream()
.collect(Collectors.toMap(Card::toString, game.prizeCards::contains));
} else {
prizeCards = Collections.emptyMap();
}
trash = counts(game.trash);
tradeRoute = game.tradeRouteMat;
opponentViews = game.getOpponents(player).stream()
.map(opponent -> {
OpponentView opponentView = new OpponentView();
opponentView.username = opponent.username;
opponentView.handSize = opponent.getHand().size();
opponentView.drawSize = opponent.getDraw().size();
opponentView.discardSize = opponent.getDiscard().size();
opponentView.victoryPoints = victoryPoints(opponent);
opponentView.durations = counts(opponent.getDurationSetAsideCards());
opponentView.inPlay = adjacentCounts(opponent.getPlay());
return opponentView;
}).collect(Collectors.toList());
drawSize = player.getDraw().size();
discardSize = player.getDiscard().size();
if (game.currentPlayer() == player && !game.inBuyPhase) {
actions = "" + player.actions;
} else {
actions = "";
}
if (game.currentPlayer() == player) {
buys = "" + player.buys;
} else {
buys = "";
}
coins = player.getUsableCoins();
isAutoplayingTreasures = player.isAutoplayingTreasures();
coinTokens = player.getCoinTokens();
pirateShip = player.getPirateShipTokens();
victoryTokens = player.getVictoryTokens();
victoryPoints = victoryPoints(player);
nativeVillage = counts(player.nativeVillageMat);
island = counts(player.islandMat);
durations = counts(player.durationSetAsideCards);
inPlay = adjacentCounts(player.getPlay());
hand = handCounts(player);
}
private static List<Count> counts(List<Card> cards) {
// group cards by name and count each group
Map<Card, Long> longCounts = cards.stream()
.collect(Collectors.groupingBy(Card::toString, Collectors.counting()))
.entrySet().stream()
.collect(Collectors.toMap(e -> Cards.fromName(e.getKey()), Map.Entry::getValue));
// sort groups
List<Card> sorted = new ArrayList<>(longCounts.keySet());
sorted.sort(Player.HAND_ORDER_COMPARATOR);
// return as counts
return sorted.stream()
.map(card -> {
Count count = new Count();
count.card = card.toString();
count.count = longCounts.get(card).intValue();
return count;
}).collect(Collectors.toList());
}
private static List<Count> adjacentCounts(List<Card> cards) {
List<Count> counts = new ArrayList<>();
for (PeekableIterator<Card> iter = new PeekableIterator<>(cards); iter.hasNext(); ) {
Card card = iter.next();
int count = 1;
while (iter.hasNext() && iter.peek() == card) {
iter.next();
count++;
}
Count cardCount = new Count();
cardCount.card = card.toString();
cardCount.count = count;
counts.add(cardCount);
}
return counts;
}
private static List<Count> handCounts(Player player) {
// group cards by name and count each group
Map<Card, Long> longCounts = player.hand.stream()
.collect(Collectors.groupingBy(Card::toString, Collectors.counting()))
.entrySet().stream()
.collect(Collectors.toMap(e -> Cards.fromName(e.getKey()), Map.Entry::getValue));
// remove cards from the hand order that are no longer in the hand
player.handOrder.removeIf(card -> !longCounts.containsKey(card));
// add cards to the end of the hand order that were not in the hand before
Set<Card> newCards = new HashSet<>(longCounts.keySet());
player.handOrder.forEach(newCards::remove);
List<Card> newCardsOrdered = new ArrayList<>(newCards);
newCardsOrdered.sort(Player.HAND_ORDER_COMPARATOR);
player.handOrder.addAll(newCardsOrdered);
// order counts by hand order
return player.handOrder.stream()
.map(card -> {
Count count = new Count();
count.card = card.toString();
count.count = longCounts.get(card).intValue();
return count;
}).collect(Collectors.toList());
}
private static int victoryPoints(Player player) {
List<Card> deck = player.getDeck();
int deckPoints = deck.stream()
.filter(c -> c.isVictory() || c == Cards.CURSE)
.map(c -> c.victoryValue(deck))
.mapToInt(Integer::intValue)
.sum();
return deckPoints + player.getVictoryTokens();
}
private static class Differ<ViewType> {
ViewType previous;
ViewType current;
JSONObject update;
Differ(ViewType previous, ViewType current, JSONObject update) {
this.previous = previous;
this.current = current;
this.update = update;
}
void diff(Function<ViewType, Object> getter, String tag) {
diff(getter, tag, Function.identity());
}
<T> void diff(Function<ViewType, T> getter, String tag, Function<T, Object> toJson) {
T currentValue = getter.apply(current);
if (previous == null || !getter.apply(previous).equals(currentValue)) {
update.put(tag, toJson.apply(currentValue));
}
}
}
JSONObject completeUpdate() {
return computeUpdate(null);
}
JSONObject computeUpdate(GameView previousView) {
JSONObject command = new JSONObject();
command.put("command", "updateGameView");
JSONObject updates = new JSONObject();
Differ<GameView> differ = new Differ<>(previousView, this, updates);
Map<String, JSONObject> pileUpdates = pileViews.keySet().stream()
.map(pile -> new AbstractMap.SimpleEntry<>(pile, pileUpdate(previousView == null ? null : previousView.pileViews.get(pile), pileViews.get(pile))))
.filter(e -> !e.getValue().isEmpty())
.collect(Collectors.toMap(AbstractMap.SimpleEntry::getKey, AbstractMap.SimpleEntry::getValue));
if (!pileUpdates.isEmpty()) {
JSONObject jsonPileUpdates = new JSONObject();
pileUpdates.forEach(jsonPileUpdates::put);
updates.put("piles", jsonPileUpdates);
}
Map<String, Boolean> prizeCardUpdates = prizeCards.keySet().stream()
.filter(prize -> previousView == null || previousView.prizeCards.get(prize) != prizeCards.get(prize))
.collect(Collectors.toMap(Function.identity(), prizeCards::get));
if (!prizeCardUpdates.isEmpty()) {
updates.put("prizeCards", prizeCardUpdates);
}
Function<List<Count>, Object> countsToHtmlList = counts -> {
if (counts.isEmpty()) {
return "(empty)";
}
Map<Card, Integer> map = new HashMap<>();
counts.forEach(c -> map.put(Cards.fromName(c.card), c.count));
return Card.htmlList(map);
};
differ.diff(v -> v.trash, "trash", countsToHtmlList);
differ.diff(v -> v.tradeRoute, "tradeRoute");
JSONObject opponentUpdates = new JSONObject();
for (int i = 0; i < opponentViews.size(); i++) {
JSONObject opponentUpdate = opponentUpdate(previousView == null ? null : previousView.opponentViews.get(i), opponentViews.get(i));
if (!opponentUpdate.isEmpty()) {
opponentUpdates.put(i + "", opponentUpdate);
}
}
if (!opponentUpdates.isEmpty()) {
updates.put("opponents", opponentUpdates);
}
differ.diff(v -> v.drawSize, "drawSize");
differ.diff(v -> v.discardSize, "discardSize");
differ.diff(v -> v.actions, "actions");
differ.diff(v -> v.buys, "buys");
differ.diff(v -> v.coins, "coins");
differ.diff(v -> v.isAutoplayingTreasures, "isAutoplayingTreasures");
differ.diff(v -> v.coinTokens, "coinTokens");
differ.diff(v -> v.pirateShip, "pirateShip");
differ.diff(v -> v.victoryTokens, "victoryTokens");
differ.diff(v -> v.victoryPoints, "victoryPoints");
differ.diff(v -> v.nativeVillage, "nativeVillage", GameView::toJson);
differ.diff(v -> v.island, "island", GameView::toJson);
differ.diff(v -> v.durations, "durations", GameView::toJson);
differ.diff(v -> v.inPlay, "inPlay", GameView::toJson);
differ.diff(v -> v.hand, "hand", GameView::toJson);
command.put("updates", updates);
return command;
}
private static JSONObject pileUpdate(PileView previous, PileView current) {
JSONObject pileUpdate = new JSONObject();
Differ<PileView> differ = new Differ<>(previous, current, pileUpdate);
differ.diff(v -> v.topCard, "topCard");
differ.diff(v -> v.cost, "cost");
differ.diff(v -> v.size, "size");
differ.diff(v -> v.embargoTokens, "embargoTokens");
differ.diff(v -> v.hasTradeRouteToken, "hasTradeRouteToken");
return pileUpdate;
}
private static JSONObject opponentUpdate(OpponentView previous, OpponentView current) {
JSONObject opponentUpdate = new JSONObject();
Differ<OpponentView> differ = new Differ<>(previous, current, opponentUpdate);
differ.diff(v -> v.username, "username");
differ.diff(v -> v.handSize, "handSize");
differ.diff(v -> v.drawSize, "drawSize");
differ.diff(v -> v.discardSize, "discardSize");
differ.diff(v -> v.victoryPoints, "victoryPoints");
differ.diff(v -> v.durations, "durations", GameView::toJson);
differ.diff(v -> v.inPlay, "inPlay", GameView::toJson);
return opponentUpdate;
}
private static JSONArray toJson(List<Count> counts) {
JSONArray json = new JSONArray();
counts.forEach(count -> {
JSONObject jsonCount = new JSONObject();
jsonCount.put("card", count.card);
jsonCount.put("count", count.count);
json.add(jsonCount);
});
return json;
}
}
| |
/*
* The Alluxio Open Foundation licenses this work under the Apache License, version 2.0
* (the "License"). You may not use this work except in compliance with the License, which is
* available at www.apache.org/licenses/LICENSE-2.0
*
* This software is distributed on an "AS IS" basis, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied, as more fully set forth in the License.
*
* See the NOTICE file distributed with this work for information regarding copyright ownership.
*/
package alluxio.client.file;
import alluxio.AlluxioURI;
import alluxio.Client;
import alluxio.exception.status.AlluxioStatusException;
import alluxio.exception.status.AlreadyExistsException;
import alluxio.exception.status.NotFoundException;
import alluxio.grpc.CheckAccessPOptions;
import alluxio.grpc.CheckConsistencyPOptions;
import alluxio.grpc.CompleteFilePOptions;
import alluxio.grpc.CreateDirectoryPOptions;
import alluxio.grpc.CreateFilePOptions;
import alluxio.grpc.DeletePOptions;
import alluxio.grpc.ExistsPOptions;
import alluxio.grpc.FreePOptions;
import alluxio.grpc.GetStatusPOptions;
import alluxio.grpc.ListStatusPOptions;
import alluxio.grpc.MountPOptions;
import alluxio.grpc.RenamePOptions;
import alluxio.grpc.ScheduleAsyncPersistencePOptions;
import alluxio.grpc.SetAclAction;
import alluxio.grpc.SetAclPOptions;
import alluxio.grpc.SetAttributePOptions;
import alluxio.grpc.UpdateUfsModePOptions;
import alluxio.master.MasterClientContext;
import alluxio.security.authorization.AclEntry;
import alluxio.wire.MountPointInfo;
import alluxio.wire.SyncPointInfo;
import java.util.List;
import java.util.Map;
import java.util.function.Consumer;
/**
* A client to use for interacting with a file system master.
*/
public interface FileSystemMasterClient extends Client {
/**
* Factory for {@link FileSystemMasterClient}.
*/
class Factory {
private Factory() {} // prevent instantiation
/**
* Factory method for {@link FileSystemMasterClient}.
*
* @param conf master client configuration
* @return a new {@link FileSystemMasterClient} instance
*/
public static FileSystemMasterClient create(MasterClientContext conf) {
return new RetryHandlingFileSystemMasterClient(conf);
}
}
/**
* Check access to a path.
*
* @param path the path to check
* @param options method options
* @throws alluxio.exception.AccessControlException if the access is denied
*/
void checkAccess(AlluxioURI path, CheckAccessPOptions options)
throws AlluxioStatusException;
/**
* Checks the consistency of Alluxio metadata against the under storage for all files and
* directories in a given subtree.
*
* @param path the root of the subtree to check
* @param options method options
* @return a list of inconsistent files and directories
*/
List<AlluxioURI> checkConsistency(AlluxioURI path, CheckConsistencyPOptions options)
throws AlluxioStatusException;
/**
* Creates a new directory.
*
* @param path the directory path
* @param options method options
* @throws AlreadyExistsException if the directory already exists
*/
void createDirectory(AlluxioURI path, CreateDirectoryPOptions options)
throws AlluxioStatusException;
/**
* Creates a new file.
*
* @param path the file path
* @param options method options
* @throws AlreadyExistsException if the file already exists
* @return the uri status of the newly created file
*/
URIStatus createFile(AlluxioURI path, CreateFilePOptions options) throws AlluxioStatusException;
/**
* Marks a file as completed.
*
* @param path the file path
* @param options the method options
*/
void completeFile(AlluxioURI path, CompleteFilePOptions options) throws AlluxioStatusException;
/**
* Deletes a file or a directory.
*
* @param path the path to delete
* @param options method options
*/
void delete(AlluxioURI path, DeletePOptions options) throws AlluxioStatusException;
/**
* Checks whether a file or directory exists.
*
* @param path the file path to check existence
* @param options the method options
* @return whether the file path exists
*/
boolean exists(AlluxioURI path, ExistsPOptions options) throws AlluxioStatusException;
/**
* Frees a file.
*
* @param path the path to free
* @param options method options
* @throws NotFoundException if the path does not exist
*/
void free(AlluxioURI path, FreePOptions options) throws AlluxioStatusException;
/**
* @param fileId a file id
* @return the file path for the given file id
*/
String getFilePath(long fileId) throws AlluxioStatusException;
/**
* @param path the file path
* @param options the getStatus options
* @return the file info for the given file id
* @throws NotFoundException if the path does not exist
*/
URIStatus getStatus(AlluxioURI path, GetStatusPOptions options) throws AlluxioStatusException;
/**
* @param path the file path
* @return the next blockId for the file
*/
long getNewBlockIdForFile(AlluxioURI path) throws AlluxioStatusException;
/**
* get the list of paths that are currently being actively synced.
*
* @return the list of paths
*/
List<SyncPointInfo> getSyncPathList() throws AlluxioStatusException;
/**
* Performs a specific action on each {@code URIStatus} in the result of {@link #listStatus}.
* This method is preferred when iterating over directories with a large number of files or
* sub-directories inside. The caller can proceed with partial result without waiting for all
* result returned.
*
* @param path the path to list information about
* @param options options to associate with this operation
* @param action action to apply on each {@code URIStatus}
* @throws NotFoundException if the path does not exist
*/
void iterateStatus(AlluxioURI path, ListStatusPOptions options,
Consumer<? super URIStatus> action) throws AlluxioStatusException;
/**
* @param path the path to list
* @param options the listStatus options
* @return the list of file information for the given path
* @throws NotFoundException if the path does not exist
*/
List<URIStatus> listStatus(AlluxioURI path, ListStatusPOptions options)
throws AlluxioStatusException;
/**
* Mounts the given UFS path under the given Alluxio path.
*
* @param alluxioPath the Alluxio path
* @param ufsPath the UFS path
* @param options mount options
*/
void mount(AlluxioURI alluxioPath, AlluxioURI ufsPath, MountPOptions options)
throws AlluxioStatusException;
/**
* Updates options of a mount point for the given Alluxio path.
*
* @param alluxioPath the Alluxio path
* @param options mount options
*/
void updateMount(AlluxioURI alluxioPath, MountPOptions options) throws AlluxioStatusException;
/**
* Lists all mount points and their corresponding under storage addresses.
*
* @return a map from String to {@link MountPointInfo}
*/
Map<String, MountPointInfo> getMountTable() throws AlluxioStatusException;
/**
* Renames a file or a directory.
*
* @param src the path to rename
* @param dst new file path
* @throws NotFoundException if the path does not exist
*/
void rename(AlluxioURI src, AlluxioURI dst) throws AlluxioStatusException;
/**
* Renames a file or a directory.
*
* @param src the path to rename
* @param dst new file path
* @param options rename options
* @throws NotFoundException if the path does not exist
*/
void rename(AlluxioURI src, AlluxioURI dst, RenamePOptions options) throws AlluxioStatusException;
/**
* Reverse resolve a ufs uri.
*
* @param ufsUri the ufs uri
* @return the alluxio path for the ufsUri
* @throws AlluxioStatusException
*/
AlluxioURI reverseResolve(AlluxioURI ufsUri) throws AlluxioStatusException;
/**
* Sets the ACL for a path.
*
* @param path the file or directory path
* @param action the set action to perform
* @param entries the ACL entries to use
* @param options the options for setting ACL
* @throws NotFoundException if the path does not exist
*/
void setAcl(AlluxioURI path, SetAclAction action, List<AclEntry> entries, SetAclPOptions options)
throws AlluxioStatusException;
/**
* Sets the file or directory attributes.
*
* @param path the file or directory path
* @param options the file or directory attribute options to be set
* @throws NotFoundException if the path does not exist
*/
void setAttribute(AlluxioURI path, SetAttributePOptions options) throws AlluxioStatusException;
/**
* Start the active syncing process for a specified path.
*
* @param path the file or directory to be synced
* @throws AlluxioStatusException
*/
void startSync(AlluxioURI path) throws AlluxioStatusException;
/**
* Stop the active syncing process for a specified path.
*
* @param path the file or directory to stop syncing
* @throws AlluxioStatusException
*/
void stopSync(AlluxioURI path) throws AlluxioStatusException;
/**
* Schedules the async persistence of the given file.
*
* @param path the file path
* @param options options to use when scheduling the persist
*/
void scheduleAsyncPersist(AlluxioURI path, ScheduleAsyncPersistencePOptions options)
throws AlluxioStatusException;
/**
* Unmounts the given Alluxio path.
*
* @param alluxioPath the Alluxio path
*/
void unmount(AlluxioURI alluxioPath) throws AlluxioStatusException;
/**
* Updates the operation mode for the given ufs path. The path is required to be the scheme and
* authority only. For example, to update the mode for under storage at hdfs://ns/folder1
* specify the argument as hdfs://ns/. Note: the mode for any other mounted under storage which
* shares the prefix (such as hdfs://ns/folder2) is also updated.
*
* @param ufsUri the ufs path
* @param options the options to update ufs operation mode
*/
void updateUfsMode(AlluxioURI ufsUri, UpdateUfsModePOptions options)
throws AlluxioStatusException;
/**
* @return the state lock waiters and holders thread identifiers
*/
List<String> getStateLockHolders() throws AlluxioStatusException;
}
| |
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
package org.elasticsearch.xpack.ml.integration;
import org.apache.lucene.util.Constants;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.ElasticsearchStatusException;
import org.elasticsearch.action.support.master.AcknowledgedResponse;
import org.elasticsearch.cluster.metadata.Metadata;
import org.elasticsearch.common.unit.ByteSizeValue;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.xpack.core.ml.action.DeleteForecastAction;
import org.elasticsearch.xpack.core.ml.job.config.AnalysisConfig;
import org.elasticsearch.xpack.core.ml.job.config.AnalysisLimits;
import org.elasticsearch.xpack.core.ml.job.config.DataDescription;
import org.elasticsearch.xpack.core.ml.job.config.Detector;
import org.elasticsearch.xpack.core.ml.job.config.Job;
import org.elasticsearch.xpack.core.ml.job.results.Bucket;
import org.elasticsearch.xpack.core.ml.job.results.Forecast;
import org.elasticsearch.xpack.core.ml.job.results.ForecastRequestStats;
import org.junit.After;
import java.time.Instant;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.stream.Collectors;
import static org.elasticsearch.xpack.core.ml.job.messages.Messages.JOB_FORECAST_NATIVE_PROCESS_KILLED;
import static org.hamcrest.Matchers.closeTo;
import static org.hamcrest.Matchers.equalTo;
public class ForecastIT extends MlNativeAutodetectIntegTestCase {
@After
public void tearDownData() {
cleanUp();
}
public void testSingleSeries() throws Exception {
Detector.Builder detector = new Detector.Builder("mean", "value");
TimeValue bucketSpan = TimeValue.timeValueHours(1);
AnalysisConfig.Builder analysisConfig = new AnalysisConfig.Builder(Collections.singletonList(detector.build()));
analysisConfig.setBucketSpan(bucketSpan);
DataDescription.Builder dataDescription = new DataDescription.Builder();
dataDescription.setTimeFormat("epoch");
Job.Builder job = new Job.Builder("forecast-it-test-single-series");
job.setAnalysisConfig(analysisConfig);
job.setDataDescription(dataDescription);
putJob(job);
openJob(job.getId());
long now = Instant.now().getEpochSecond();
long timestamp = now - 50 * bucketSpan.seconds();
List<String> data = new ArrayList<>();
while (timestamp < now) {
data.add(createJsonRecord(createRecord(timestamp, 10.0)));
data.add(createJsonRecord(createRecord(timestamp, 30.0)));
timestamp += bucketSpan.seconds();
}
postData(job.getId(), data.stream().collect(Collectors.joining()));
flushJob(job.getId(), false);
// Now we can start doing forecast requests
String forecastIdDefaultDurationDefaultExpiry = forecast(job.getId(), null, null);
String forecastIdDuration1HourNoExpiry = forecast(job.getId(), TimeValue.timeValueHours(1), TimeValue.ZERO);
String forecastIdDuration3HoursExpiresIn24Hours = forecast(job.getId(), TimeValue.timeValueHours(3), TimeValue.timeValueHours(24));
waitForecastToFinish(job.getId(), forecastIdDefaultDurationDefaultExpiry);
waitForecastToFinish(job.getId(), forecastIdDuration1HourNoExpiry);
waitForecastToFinish(job.getId(), forecastIdDuration3HoursExpiresIn24Hours);
closeJob(job.getId());
List<Bucket> buckets = getBuckets(job.getId());
Bucket lastBucket = buckets.get(buckets.size() - 1);
long lastBucketTime = lastBucket.getTimestamp().getTime();
// Now let's verify forecasts
double expectedForecastValue = 20.0;
List<ForecastRequestStats> forecastStats = getForecastStats();
assertThat(forecastStats.size(), equalTo(3));
Map<String, ForecastRequestStats> idToForecastStats = new HashMap<>();
forecastStats.forEach(f -> idToForecastStats.put(f.getForecastId(), f));
{
ForecastRequestStats forecastDefaultDurationDefaultExpiry = idToForecastStats.get(forecastIdDefaultDurationDefaultExpiry);
assertThat(forecastDefaultDurationDefaultExpiry.getExpiryTime().toEpochMilli(),
equalTo(forecastDefaultDurationDefaultExpiry.getCreateTime().toEpochMilli()
+ TimeValue.timeValueHours(14 * 24).getMillis()));
List<Forecast> forecasts = getForecasts(job.getId(), forecastDefaultDurationDefaultExpiry);
assertThat(forecastDefaultDurationDefaultExpiry.getRecordCount(), equalTo(24L));
assertThat(forecasts.size(), equalTo(24));
assertThat(forecasts.get(0).getTimestamp().getTime(), equalTo(lastBucketTime));
for (int i = 0; i < forecasts.size(); i++) {
Forecast forecast = forecasts.get(i);
assertThat(forecast.getTimestamp().getTime(), equalTo(lastBucketTime + i * bucketSpan.getMillis()));
assertThat(forecast.getBucketSpan(), equalTo(bucketSpan.getSeconds()));
assertThat(forecast.getForecastPrediction(), closeTo(expectedForecastValue, 0.01));
}
}
{
ForecastRequestStats forecastDuration1HourNoExpiry = idToForecastStats.get(forecastIdDuration1HourNoExpiry);
assertThat(forecastDuration1HourNoExpiry.getExpiryTime(), equalTo(Instant.EPOCH));
List<Forecast> forecasts = getForecasts(job.getId(), forecastDuration1HourNoExpiry);
assertThat(forecastDuration1HourNoExpiry.getRecordCount(), equalTo(1L));
assertThat(forecasts.size(), equalTo(1));
assertThat(forecasts.get(0).getTimestamp().getTime(), equalTo(lastBucketTime));
for (int i = 0; i < forecasts.size(); i++) {
Forecast forecast = forecasts.get(i);
assertThat(forecast.getTimestamp().getTime(), equalTo(lastBucketTime + i * bucketSpan.getMillis()));
assertThat(forecast.getBucketSpan(), equalTo(bucketSpan.getSeconds()));
assertThat(forecast.getForecastPrediction(), closeTo(expectedForecastValue, 0.01));
}
}
{
ForecastRequestStats forecastDuration3HoursExpiresIn24Hours = idToForecastStats.get(forecastIdDuration3HoursExpiresIn24Hours);
assertThat(forecastDuration3HoursExpiresIn24Hours.getExpiryTime().toEpochMilli(),
equalTo(forecastDuration3HoursExpiresIn24Hours.getCreateTime().toEpochMilli()
+ TimeValue.timeValueHours(24).getMillis()));
List<Forecast> forecasts = getForecasts(job.getId(), forecastDuration3HoursExpiresIn24Hours);
assertThat(forecastDuration3HoursExpiresIn24Hours.getRecordCount(), equalTo(3L));
assertThat(forecasts.size(), equalTo(3));
assertThat(forecasts.get(0).getTimestamp().getTime(), equalTo(lastBucketTime));
for (int i = 0; i < forecasts.size(); i++) {
Forecast forecast = forecasts.get(i);
assertThat(forecast.getTimestamp().getTime(), equalTo(lastBucketTime + i * bucketSpan.getMillis()));
assertThat(forecast.getBucketSpan(), equalTo(bucketSpan.getSeconds()));
assertThat(forecast.getForecastPrediction(), closeTo(expectedForecastValue, 0.01));
}
}
}
public void testDurationCannotBeLessThanBucketSpan() {
Detector.Builder detector = new Detector.Builder("mean", "value");
TimeValue bucketSpan = TimeValue.timeValueHours(1);
AnalysisConfig.Builder analysisConfig = new AnalysisConfig.Builder(Collections.singletonList(detector.build()));
analysisConfig.setBucketSpan(bucketSpan);
DataDescription.Builder dataDescription = new DataDescription.Builder();
dataDescription.setTimeFormat("epoch");
Job.Builder job = new Job.Builder("forecast-it-test-duration-bucket-span");
job.setAnalysisConfig(analysisConfig);
job.setDataDescription(dataDescription);
putJob(job);
openJob(job.getId());
ElasticsearchException e = expectThrows(ElasticsearchException.class,() -> forecast(job.getId(),
TimeValue.timeValueMinutes(10), null));
assertThat(e.getMessage(),
equalTo("[duration] must be greater or equal to the bucket span: [10m/1h]"));
}
public void testNoData() {
Detector.Builder detector = new Detector.Builder("mean", "value");
TimeValue bucketSpan = TimeValue.timeValueMinutes(1);
AnalysisConfig.Builder analysisConfig = new AnalysisConfig.Builder(Collections.singletonList(detector.build()));
analysisConfig.setBucketSpan(bucketSpan);
DataDescription.Builder dataDescription = new DataDescription.Builder();
dataDescription.setTimeFormat("epoch");
Job.Builder job = new Job.Builder("forecast-it-test-no-data");
job.setAnalysisConfig(analysisConfig);
job.setDataDescription(dataDescription);
putJob(job);
openJob(job.getId());
ElasticsearchException e = expectThrows(ElasticsearchException.class,
() -> forecast(job.getId(), TimeValue.timeValueMinutes(120), null));
assertThat(e.getMessage(),
equalTo("Cannot run forecast: Forecast cannot be executed as job requires data to have been processed and modeled"));
}
public void testMemoryStatus() {
Detector.Builder detector = new Detector.Builder("mean", "value");
detector.setByFieldName("clientIP");
TimeValue bucketSpan = TimeValue.timeValueHours(1);
AnalysisConfig.Builder analysisConfig = new AnalysisConfig.Builder(Collections.singletonList(detector.build()));
analysisConfig.setBucketSpan(bucketSpan);
DataDescription.Builder dataDescription = new DataDescription.Builder();
dataDescription.setTimeFormat("epoch");
Job.Builder job = new Job.Builder("forecast-it-test-memory-status");
job.setAnalysisConfig(analysisConfig);
job.setDataDescription(dataDescription);
// Set the memory limit to 30MB
AnalysisLimits limits = new AnalysisLimits(30L, null);
job.setAnalysisLimits(limits);
putJob(job);
openJob(job.getId());
createDataWithLotsOfClientIps(bucketSpan, job);
ElasticsearchException e = expectThrows(ElasticsearchException.class,
() -> forecast(job.getId(), TimeValue.timeValueMinutes(120), null));
assertThat(e.getMessage(), equalTo("Cannot run forecast: Forecast cannot be executed as model memory status is not OK"));
}
public void testOverflowToDisk() throws Exception {
assumeFalse("https://github.com/elastic/elasticsearch/issues/44609", Constants.WINDOWS);
Detector.Builder detector = new Detector.Builder("mean", "value");
detector.setByFieldName("clientIP");
TimeValue bucketSpan = TimeValue.timeValueHours(1);
AnalysisConfig.Builder analysisConfig = new AnalysisConfig.Builder(Collections.singletonList(detector.build()));
analysisConfig.setBucketSpan(bucketSpan);
DataDescription.Builder dataDescription = new DataDescription.Builder();
dataDescription.setTimeFormat("epoch");
Job.Builder job = new Job.Builder("forecast-it-test-overflow-to-disk");
AnalysisLimits limits = new AnalysisLimits(1200L, null);
job.setAnalysisLimits(limits);
job.setAnalysisConfig(analysisConfig);
job.setDataDescription(dataDescription);
putJob(job);
openJob(job.getId());
createDataWithLotsOfClientIps(bucketSpan, job);
try {
String forecastId = forecast(job.getId(), TimeValue.timeValueHours(1), null);
waitForecastToFinish(job.getId(), forecastId);
} catch (ElasticsearchStatusException e) {
if (e.getMessage().contains("disk space")) {
throw new ElasticsearchStatusException(
"Test likely fails due to insufficient disk space on test machine, please free up space.", e.status(), e);
}
throw e;
}
// flushing the job forces an index refresh, see https://github.com/elastic/elasticsearch/issues/31173
flushJob(job.getId(), false);
List<ForecastRequestStats> forecastStats = getForecastStats();
assertThat(forecastStats.size(), equalTo(1));
ForecastRequestStats forecastRequestStats = forecastStats.get(0);
List<Forecast> forecasts = getForecasts(job.getId(), forecastRequestStats);
assertThat(forecastRequestStats.getRecordCount(), equalTo(8000L));
assertThat(forecasts.size(), equalTo(8000));
// run forecast a 2nd time
try {
String forecastId = forecast(job.getId(), TimeValue.timeValueHours(1), null);
waitForecastToFinish(job.getId(), forecastId);
} catch (ElasticsearchStatusException e) {
if (e.getMessage().contains("disk space")) {
throw new ElasticsearchStatusException(
"Test likely fails due to insufficient disk space on test machine, please free up space.", e.status(), e);
}
throw e;
}
closeJob(job.getId());
forecastStats = getForecastStats();
assertThat(forecastStats.size(), equalTo(2));
for (ForecastRequestStats stats : forecastStats) {
forecasts = getForecasts(job.getId(), stats);
assertThat(forecastRequestStats.getRecordCount(), equalTo(8000L));
assertThat(forecasts.size(), equalTo(8000));
}
}
public void testDeleteWildCard() throws Exception {
Detector.Builder detector = new Detector.Builder("mean", "value");
TimeValue bucketSpan = TimeValue.timeValueHours(1);
AnalysisConfig.Builder analysisConfig = new AnalysisConfig.Builder(Collections.singletonList(detector.build()));
analysisConfig.setBucketSpan(bucketSpan);
DataDescription.Builder dataDescription = new DataDescription.Builder();
dataDescription.setTimeFormat("epoch");
Job.Builder job = new Job.Builder("forecast-it-test-delete-wildcard");
job.setAnalysisConfig(analysisConfig);
job.setDataDescription(dataDescription);
putJob(job);
openJob(job.getId());
long now = Instant.now().getEpochSecond();
long timestamp = now - 50 * bucketSpan.seconds();
List<String> data = new ArrayList<>();
while (timestamp < now) {
data.add(createJsonRecord(createRecord(timestamp, 10.0)));
data.add(createJsonRecord(createRecord(timestamp, 30.0)));
timestamp += bucketSpan.seconds();
}
postData(job.getId(), data.stream().collect(Collectors.joining()));
flushJob(job.getId(), false);
String forecastIdDefaultDurationDefaultExpiry = forecast(job.getId(), null, null);
String forecastIdDuration1HourNoExpiry = forecast(job.getId(), TimeValue.timeValueHours(1), TimeValue.ZERO);
String forecastId2Duration1HourNoExpiry = forecast(job.getId(), TimeValue.timeValueHours(1), TimeValue.ZERO);
String forecastId2Duration1HourNoExpiry2 = forecast(job.getId(), TimeValue.timeValueHours(1), TimeValue.ZERO);
waitForecastToFinish(job.getId(), forecastIdDefaultDurationDefaultExpiry);
waitForecastToFinish(job.getId(), forecastIdDuration1HourNoExpiry);
waitForecastToFinish(job.getId(), forecastId2Duration1HourNoExpiry);
waitForecastToFinish(job.getId(), forecastId2Duration1HourNoExpiry2);
closeJob(job.getId());
assertNotNull(getForecastStats(job.getId(), forecastIdDefaultDurationDefaultExpiry));
assertNotNull(getForecastStats(job.getId(), forecastIdDuration1HourNoExpiry));
assertNotNull(getForecastStats(job.getId(), forecastId2Duration1HourNoExpiry));
assertNotNull(getForecastStats(job.getId(), forecastId2Duration1HourNoExpiry2));
{
DeleteForecastAction.Request request = new DeleteForecastAction.Request(job.getId(),
forecastIdDefaultDurationDefaultExpiry.substring(0, forecastIdDefaultDurationDefaultExpiry.length() - 2) + "*"
+ ","
+ forecastIdDuration1HourNoExpiry);
AcknowledgedResponse response = client().execute(DeleteForecastAction.INSTANCE, request).actionGet();
assertTrue(response.isAcknowledged());
assertNull(getForecastStats(job.getId(), forecastIdDefaultDurationDefaultExpiry));
assertNull(getForecastStats(job.getId(), forecastIdDuration1HourNoExpiry));
assertNotNull(getForecastStats(job.getId(), forecastId2Duration1HourNoExpiry));
assertNotNull(getForecastStats(job.getId(), forecastId2Duration1HourNoExpiry2));
}
{
DeleteForecastAction.Request request = new DeleteForecastAction.Request(job.getId(), "*");
AcknowledgedResponse response = client().execute(DeleteForecastAction.INSTANCE, request).actionGet();
assertTrue(response.isAcknowledged());
assertNull(getForecastStats(job.getId(), forecastId2Duration1HourNoExpiry));
assertNull(getForecastStats(job.getId(), forecastId2Duration1HourNoExpiry2));
}
}
public void testDelete() throws Exception {
Detector.Builder detector = new Detector.Builder("mean", "value");
TimeValue bucketSpan = TimeValue.timeValueHours(1);
AnalysisConfig.Builder analysisConfig = new AnalysisConfig.Builder(Collections.singletonList(detector.build()));
analysisConfig.setBucketSpan(bucketSpan);
DataDescription.Builder dataDescription = new DataDescription.Builder();
dataDescription.setTimeFormat("epoch");
Job.Builder job = new Job.Builder("forecast-it-test-delete");
job.setAnalysisConfig(analysisConfig);
job.setDataDescription(dataDescription);
putJob(job);
openJob(job.getId());
long now = Instant.now().getEpochSecond();
long timestamp = now - 50 * bucketSpan.seconds();
List<String> data = new ArrayList<>();
while (timestamp < now) {
data.add(createJsonRecord(createRecord(timestamp, 10.0)));
data.add(createJsonRecord(createRecord(timestamp, 30.0)));
timestamp += bucketSpan.seconds();
}
postData(job.getId(), data.stream().collect(Collectors.joining()));
flushJob(job.getId(), false);
String forecastIdDefaultDurationDefaultExpiry = forecast(job.getId(), null, null);
String forecastIdDuration1HourNoExpiry = forecast(job.getId(), TimeValue.timeValueHours(1), TimeValue.ZERO);
String forecastId2Duration1HourNoExpiry = forecast(job.getId(), TimeValue.timeValueHours(1), TimeValue.ZERO);
String forecastId2Duration1HourNoExpiry2 = forecast(job.getId(), TimeValue.timeValueHours(1), TimeValue.ZERO);
waitForecastToFinish(job.getId(), forecastIdDefaultDurationDefaultExpiry);
waitForecastToFinish(job.getId(), forecastIdDuration1HourNoExpiry);
closeJob(job.getId());
{
ForecastRequestStats forecastStats = getForecastStats(job.getId(), forecastIdDefaultDurationDefaultExpiry);
assertNotNull(forecastStats);
ForecastRequestStats otherStats = getForecastStats(job.getId(), forecastIdDuration1HourNoExpiry);
assertNotNull(otherStats);
}
{
DeleteForecastAction.Request request = new DeleteForecastAction.Request(job.getId(),
forecastIdDefaultDurationDefaultExpiry + "," + forecastIdDuration1HourNoExpiry);
AcknowledgedResponse response = client().execute(DeleteForecastAction.INSTANCE, request).actionGet();
assertTrue(response.isAcknowledged());
assertNull(getForecastStats(job.getId(), forecastIdDefaultDurationDefaultExpiry));
assertNull(getForecastStats(job.getId(), forecastIdDuration1HourNoExpiry));
assertNotNull(getForecastStats(job.getId(), forecastId2Duration1HourNoExpiry));
assertNotNull(getForecastStats(job.getId(), forecastId2Duration1HourNoExpiry2));
}
{
DeleteForecastAction.Request request = new DeleteForecastAction.Request(job.getId(), "forecast-does-not-exist");
ElasticsearchException e = expectThrows(ElasticsearchException.class,
() -> client().execute(DeleteForecastAction.INSTANCE, request).actionGet());
assertThat(e.getMessage(),
equalTo("No forecast(s) [forecast-does-not-exist] exists for job [forecast-it-test-delete]"));
}
{
DeleteForecastAction.Request request = new DeleteForecastAction.Request(job.getId(), Metadata.ALL);
AcknowledgedResponse response = client().execute(DeleteForecastAction.INSTANCE, request).actionGet();
assertTrue(response.isAcknowledged());
assertNull(getForecastStats(job.getId(), forecastId2Duration1HourNoExpiry));
assertNull(getForecastStats(job.getId(), forecastId2Duration1HourNoExpiry2));
}
{
Job.Builder otherJob = new Job.Builder("forecasts-delete-with-all-and-allow-no-forecasts");
otherJob.setAnalysisConfig(analysisConfig);
otherJob.setDataDescription(dataDescription);
putJob(otherJob);
DeleteForecastAction.Request request = new DeleteForecastAction.Request(otherJob.getId(), Metadata.ALL);
AcknowledgedResponse response = client().execute(DeleteForecastAction.INSTANCE, request).actionGet();
assertTrue(response.isAcknowledged());
}
{
Job.Builder otherJob = new Job.Builder("forecasts-delete-with-all-and-not-allow-no-forecasts");
otherJob.setAnalysisConfig(analysisConfig);
otherJob.setDataDescription(dataDescription);
putJob(otherJob);
DeleteForecastAction.Request request = new DeleteForecastAction.Request(otherJob.getId(), Metadata.ALL);
request.setAllowNoForecasts(false);
ElasticsearchException e = expectThrows(ElasticsearchException.class,
() -> client().execute(DeleteForecastAction.INSTANCE, request).actionGet());
assertThat(e.getMessage(),
equalTo("No forecast(s) [_all] exists for job [forecasts-delete-with-all-and-not-allow-no-forecasts]"));
}
}
public void testForceStopSetsForecastToFailed() throws Exception {
Detector.Builder detector = new Detector.Builder("mean", "value");
TimeValue bucketSpan = TimeValue.timeValueHours(1);
AnalysisConfig.Builder analysisConfig = new AnalysisConfig.Builder(Collections.singletonList(detector.build()));
analysisConfig.setBucketSpan(bucketSpan);
DataDescription.Builder dataDescription = new DataDescription.Builder();
dataDescription.setTimeFormat("epoch");
Job.Builder job = new Job.Builder("forecast-it-test-failed-on-force-stop");
job.setAnalysisConfig(analysisConfig);
job.setDataDescription(dataDescription);
String jobId = job.getId();
putJob(job);
openJob(job.getId());
long now = Instant.now().getEpochSecond();
long timestamp = now - 50 * bucketSpan.seconds();
List<String> data = new ArrayList<>();
while (timestamp < now) {
data.add(createJsonRecord(createRecord(timestamp, 10.0)));
data.add(createJsonRecord(createRecord(timestamp, 30.0)));
timestamp += bucketSpan.seconds();
}
postData(job.getId(), data.stream().collect(Collectors.joining()));
flushJob(job.getId(), false);
String forecastId = forecast(jobId, TimeValue.timeValueDays(1000), TimeValue.ZERO);
waitForecastStatus(jobId, forecastId, ForecastRequestStats.ForecastRequestStatus.values());
closeJob(jobId, true);
// On force close job, it should always be at least failed or finished
waitForecastStatus(jobId,
forecastId,
ForecastRequestStats.ForecastRequestStatus.FAILED,
ForecastRequestStats.ForecastRequestStatus.FINISHED);
ForecastRequestStats forecastStats = getForecastStats(job.getId(), forecastId);
assertNotNull(forecastStats);
if (forecastStats.getStatus().equals(ForecastRequestStats.ForecastRequestStatus.FAILED)) {
assertThat(forecastStats.getMessages().get(0), equalTo(JOB_FORECAST_NATIVE_PROCESS_KILLED));
}
}
public void testForecastWithHigherMemoryUse() throws Exception {
Detector.Builder detector = new Detector.Builder("mean", "value");
TimeValue bucketSpan = TimeValue.timeValueHours(1);
AnalysisConfig.Builder analysisConfig = new AnalysisConfig.Builder(Collections.singletonList(detector.build()));
analysisConfig.setBucketSpan(bucketSpan);
DataDescription.Builder dataDescription = new DataDescription.Builder();
dataDescription.setTimeFormat("epoch");
Job.Builder job = new Job.Builder("forecast-it-test-single-series");
job.setAnalysisConfig(analysisConfig);
job.setDataDescription(dataDescription);
putJob(job);
openJob(job.getId());
long now = Instant.now().getEpochSecond();
long timestamp = now - 50 * bucketSpan.seconds();
List<String> data = new ArrayList<>();
while (timestamp < now) {
data.add(createJsonRecord(createRecord(timestamp, 10.0)));
data.add(createJsonRecord(createRecord(timestamp, 30.0)));
timestamp += bucketSpan.seconds();
}
postData(job.getId(), data.stream().collect(Collectors.joining()));
flushJob(job.getId(), false);
// Now we can start doing forecast requests
String forecastId = forecast(job.getId(),
TimeValue.timeValueHours(1),
TimeValue.ZERO,
ByteSizeValue.ofMb(50).getBytes());
waitForecastToFinish(job.getId(), forecastId);
closeJob(job.getId());
List<ForecastRequestStats> forecastStats = getForecastStats();
ForecastRequestStats forecastDuration1HourNoExpiry = forecastStats.get(0);
assertThat(forecastDuration1HourNoExpiry.getExpiryTime(), equalTo(Instant.EPOCH));
List<Forecast> forecasts = getForecasts(job.getId(), forecastDuration1HourNoExpiry);
assertThat(forecastDuration1HourNoExpiry.getRecordCount(), equalTo(1L));
assertThat(forecasts.size(), equalTo(1));
}
private void createDataWithLotsOfClientIps(TimeValue bucketSpan, Job.Builder job) {
long now = Instant.now().getEpochSecond();
long timestamp = now - 15 * bucketSpan.seconds();
List<String> data = new ArrayList<>();
for (int h = 0; h < 15; h++) {
double value = 10.0 + h;
for (int i = 1; i < 101; i++) {
for (int j = 1; j < 81; j++) {
String json = String.format(Locale.ROOT, "{\"time\": %d, \"value\": %f, \"clientIP\": \"192.168.%d.%d\"}\n",
timestamp, value, i, j);
data.add(json);
}
}
timestamp += bucketSpan.seconds();
}
postData(job.getId(), data.stream().collect(Collectors.joining()));
flushJob(job.getId(), false);
}
private static Map<String, Object> createRecord(long timestamp, double value) {
Map<String, Object> record = new HashMap<>();
record.put("time", timestamp);
record.put("value", value);
return record;
}
}
| |
package usbong.android.builder.fragments.screens;
import android.app.ProgressDialog;
import android.content.Context;
import android.content.Intent;
import android.database.Cursor;
import android.os.Bundle;
import android.support.v4.app.Fragment;
import android.util.Log;
import android.view.*;
import android.widget.*;
import butterknife.ButterKnife;
import butterknife.InjectView;
import com.activeandroid.query.Select;
import org.w3c.dom.Text;
import usbong.android.builder.R;
import usbong.android.builder.activities.UtreeDetailsActivity;
import usbong.android.builder.controllers.ScreenListController;
import usbong.android.builder.controllers.UtreeListController;
import usbong.android.builder.converters.UtreeConverter;
import usbong.android.builder.exceptions.NoStartingScreenException;
import usbong.android.builder.models.Screen;
import usbong.android.builder.models.Utree;
import android.net.Uri;
import usbong.android.builder.models.UtreeDetails;
import usbong.android.builder.utils.DeviceUtils;
import usbong.android.builder.utils.FileUtils;
import usbong.android.builder.utils.PackageUtils;
import usbong.android.builder.UploadUtree;
import android.provider.MediaStore;
import android.os.Environment;
import android.os.Build;
import android.provider.DocumentsContract;
import android.content.ContentUris;
import java.io.File;
/**
* A fragment representing a list of Items.
* <p/>
* Large screen devices (such as tablets) are supported by replacing the ListView
* with a GridView.
* <p/>
* interface.
*/
public class UtreeDetailsFragment extends Fragment {
private static final String TAG = UtreeDetailsFragment.class.getSimpleName();
public static final String EXTRA_TREE_NAME = "EXTRA_TREE_NAME";
public static final String EXTRA_UTREE = "EXTRA_UTREE";
public static final String EXTRA_TREE_ID = "EXTRA_TREE_ID";
private String treeName = "";
private View view;
private Button getFileButton, upload;
private static final int ACTIVITY_CHOOSE_FILE = 1;
private String filePath = "";
private TextView iconPath, treeNameTV;
private ProgressDialog dialog;
private UtreeListController controller;
private Utree utree;
private long treeId;
private String uploader = "";
private String description = "";
private String youtubeLink = "";
private EditText uploaderET;
private EditText descriptionET;
private EditText youtubeET;
private Uri uri;
/**
* Use this factory method to create a new instance of
* this fragment using the provided parameters.
*
* @param args
* @return A new instance of fragment UtreeFragment.
*/
public static Fragment newInstance(Bundle args) {
UtreeDetailsFragment fragment = new UtreeDetailsFragment();
fragment.setArguments(args);
return fragment;
}
/**
* Mandatory empty constructor for the fragment manager to instantiate the
* fragment (e.g. upon screen orientation changes).
*/
public UtreeDetailsFragment() {
}
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
if (getArguments() != null) {
treeName = getArguments().getString(EXTRA_TREE_NAME);
treeId = getArguments().getLong(EXTRA_TREE_ID);
if (treeId == -1) {
throw new IllegalArgumentException("tree is required");
}
utree = new Select().from(Utree.class)
.where(Utree._ID + " = ?", treeId)
.executeSingle();
}
setHasOptionsMenu(true);
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
view = inflater.inflate(R.layout.fragment_utree_details, container, false);
upload = (Button) view.findViewById(R.id.upload);
getFileButton = (Button) view.findViewById(R.id.getFile);
iconPath = (TextView) view.findViewById(R.id.iconPath);
treeNameTV = (TextView) view.findViewById(R.id.selectedUtreeName);
uploaderET = (EditText) view.findViewById(R.id.uploaderName);
descriptionET = (EditText) view.findViewById(R.id.description);
youtubeET = (EditText) view.findViewById(R.id.youtubeLink1);
treeNameTV.setText(treeName);
getFileButton.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
Intent chooseFile;
Intent intent;
chooseFile = new Intent(Intent.ACTION_GET_CONTENT);
chooseFile.setType("image/*");
intent = Intent.createChooser(chooseFile, "Choose a file");
startActivityForResult(intent, ACTIVITY_CHOOSE_FILE);
}
});
upload.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
if (DeviceUtils.hasNetworkConnection(getActivity())) {
if (uploaderET.getText().toString().matches("") || descriptionET.getText().toString().matches("")
/* || youtubeET.getText().toString().matches("")*/ || treeNameTV.getText().toString().matches("")) {
Toast.makeText(getActivity(), "Please complete all required fields.", Toast.LENGTH_SHORT).show();
} else {
uploadUtree();
}
} else {
Toast.makeText(getActivity(), "Please connect to the Internet first.", Toast.LENGTH_SHORT).show();
}
}
});
return view;
}
@Override
public void onViewCreated(View view, Bundle savedInstanceState) {
super.onViewCreated(view, savedInstanceState);
}
@Override
public void onResume() {
super.onResume();
}
@Override
public void onActivityResult(int requestCode, int resultCode, Intent data) {
switch (requestCode) {
case ACTIVITY_CHOOSE_FILE: {
if (resultCode == getActivity().RESULT_OK) {
uri = data.getData();
filePath = uri.toString();
iconPath.setText(filePath);
}
}
}
}
private void uploadUtree() {
//TODO: Async task with dialogue to upload the utree to server
//Check if Usbong andriod app is installed
if (PackageUtils.isPackageInstalled("usbong.android", getActivity())) {
//Uploads the zipped .utree to Usbong/Usbong_trees directory
String treeFolderLocation = getActivity().getFilesDir() + File.separator + "trees" + File.separator + treeName + File.separator;
Toast.makeText(getActivity(), treeFolderLocation, Toast.LENGTH_SHORT).show();
String tempFolderLocation = getActivity().getFilesDir() + File.separator + "temp" + File.separator;
String folderLocation = "/storage/emulated/legacy/usbong/usbong_trees/temp/";
File file = new File(folderLocation);
if (!file.exists()) {
file.mkdir();
} else {
FileUtils.delete(folderLocation);
}
FileUtils.mkdir(treeFolderLocation);
String xmlFileLocation = treeFolderLocation + treeName + ".xml";
String zipFilePath = folderLocation + File.separator + treeName + ".utree";
UtreeConverter converter = new UtreeConverter();
converter.convert(utree, xmlFileLocation);
FileUtils.delete(tempFolderLocation);
FileUtils.copyAll(treeFolderLocation, tempFolderLocation + treeName + ".utree" + File.separator);
FileUtils.zip(zipFilePath, tempFolderLocation);
FileUtils.delete(tempFolderLocation);
} else {
try {
Intent viewIntent =
new Intent("android.intent.action.VIEW",
Uri.parse("https://play.google.com/store/apps/details?id=usbong.android"));
startActivity(viewIntent);
} catch (Exception e) {
Toast.makeText(getActivity(), "Unable to Connect Try Again...",
Toast.LENGTH_LONG).show();
e.printStackTrace();
}
}
// instantiate it within the onCreate method
dialog = new ProgressDialog(getActivity());
dialog.setMessage("Uploading: " + treeName);
dialog.setTitle("Saving trees...");
dialog.setProgressStyle(ProgressDialog.STYLE_HORIZONTAL);
dialog.setCancelable(false);
dialog.setCanceledOnTouchOutside(false);
String folderLocation = "/storage/emulated/legacy/usbong/usbong_trees/temp/";
UploadUtree u = new UploadUtree(getActivity(), dialog);
uploader = uploaderET.getText().toString();
description = descriptionET.getText().toString();
youtubeLink = youtubeET.getText().toString();
File screenshotFile = new File(filePath);
String screenshot = getPath(getActivity(), uri);
Long tsLong = System.currentTimeMillis() / 1000;
String ts = tsLong.toString();
UtreeDetails utreeDetails = new UtreeDetails(uploader, folderLocation + treeName + ".utree",
treeName, description, youtubeLink, "", screenshot, "", "", ts);
Log.d(TAG, utreeDetails.toString());
u.execute(utreeDetails);
// u.execute(folderLocation + treeName + ".utree", uploader, description);
}
/**
* Get a file path from a Uri. This will get the the path for Storage Access
* Framework Documents, as well as the _data field for the MediaStore and
* other file-based ContentProviders.
*
* @param context The context.
* @param uri The Uri to query.
* @author paulburke
*/
public static String getPath(final Context context, final Uri uri) {
final boolean isKitKat = Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT;
// DocumentProvider
if (isKitKat && DocumentsContract.isDocumentUri(context, uri)) {
// ExternalStorageProvider
if (isExternalStorageDocument(uri)) {
final String docId = DocumentsContract.getDocumentId(uri);
final String[] split = docId.split(":");
final String type = split[0];
if ("primary".equalsIgnoreCase(type)) {
return Environment.getExternalStorageDirectory() + "/" + split[1];
}
// TODO handle non-primary volumes
}
// DownloadsProvider
else if (isDownloadsDocument(uri)) {
final String id = DocumentsContract.getDocumentId(uri);
final Uri contentUri = ContentUris.withAppendedId(
Uri.parse("content://downloads/public_downloads"), Long.valueOf(id));
return getDataColumn(context, contentUri, null, null);
}
// MediaProvider
else if (isMediaDocument(uri)) {
final String docId = DocumentsContract.getDocumentId(uri);
final String[] split = docId.split(":");
final String type = split[0];
Uri contentUri = null;
if ("image".equals(type)) {
contentUri = MediaStore.Images.Media.EXTERNAL_CONTENT_URI;
} else if ("video".equals(type)) {
contentUri = MediaStore.Video.Media.EXTERNAL_CONTENT_URI;
} else if ("audio".equals(type)) {
contentUri = MediaStore.Audio.Media.EXTERNAL_CONTENT_URI;
}
final String selection = "_id=?";
final String[] selectionArgs = new String[]{
split[1]
};
return getDataColumn(context, contentUri, selection, selectionArgs);
}
}
// MediaStore (and general)
else if ("content".equalsIgnoreCase(uri.getScheme())) {
return getDataColumn(context, uri, null, null);
}
// File
else if ("file".equalsIgnoreCase(uri.getScheme())) {
return uri.getPath();
}
return null;
}
/**
* Get the value of the data column for this Uri. This is useful for
* MediaStore Uris, and other file-based ContentProviders.
*
* @param context The context.
* @param uri The Uri to query.
* @param selection (Optional) Filter used in the query.
* @param selectionArgs (Optional) Selection arguments used in the query.
* @return The value of the _data column, which is typically a file path.
*/
public static String getDataColumn(Context context, Uri uri, String selection,
String[] selectionArgs) {
Cursor cursor = null;
final String column = "_data";
final String[] projection = {
column
};
try {
cursor = context.getContentResolver().query(uri, projection, selection, selectionArgs,
null);
if (cursor != null && cursor.moveToFirst()) {
final int column_index = cursor.getColumnIndexOrThrow(column);
return cursor.getString(column_index);
}
} finally {
if (cursor != null)
cursor.close();
}
return null;
}
/**
* @param uri The Uri to check.
* @return Whether the Uri authority is ExternalStorageProvider.
*/
public static boolean isExternalStorageDocument(Uri uri) {
return "com.android.externalstorage.documents".equals(uri.getAuthority());
}
/**
* @param uri The Uri to check.
* @return Whether the Uri authority is DownloadsProvider.
*/
public static boolean isDownloadsDocument(Uri uri) {
return "com.android.providers.downloads.documents".equals(uri.getAuthority());
}
/**
* @param uri The Uri to check.
* @return Whether the Uri authority is MediaProvider.
*/
public static boolean isMediaDocument(Uri uri) {
return "com.android.providers.media.documents".equals(uri.getAuthority());
}
}
| |
/**
* Copyright 2015-2016 Red Hat, Inc, and individual contributors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.wildfly.swarm.bootstrap.util;
import java.io.File;
import java.io.IOException;
import org.jboss.modules.maven.ArtifactCoordinates;
/**
* @author Bob McWhirter
*/
public class MavenArtifactDescriptor implements Comparable<MavenArtifactDescriptor> {
private MavenArtifactDescriptor() {
}
public MavenArtifactDescriptor(String groupId, String artifactId, String version) {
this(groupId, artifactId, "jar", null, version);
}
public MavenArtifactDescriptor(String groupId, String artifactId, String type, String classifier, String version) {
this.groupId = groupId;
this.artifactId = artifactId;
this.version = version;
this.type = type;
if (classifier != null && !classifier.trim().equals("")) {
this.classifier = classifier;
}
}
public static Builder build() {
return new MavenArtifactDescriptor().builder().type("jar");
}
public static MavenArtifactDescriptor fromMscGav(String gav) throws IOException {
String[] parts = gav.split(":");
if (parts.length == 3) {
return new MavenArtifactDescriptor(parts[0], parts[1], parts[2]);
} else if (parts.length == 4) {
return new MavenArtifactDescriptor(parts[0], parts[1], "jar", parts[3], parts[2]);
} else {
throw new IOException("Invalid gav: " + gav);
}
}
public static MavenArtifactDescriptor fromMavenGav(String gav) throws IOException {
String[] parts = gav.split(":");
if (parts.length == 3) {
return new MavenArtifactDescriptor(parts[0], parts[1], parts[2]);
} else if (parts.length == 4) {
return new MavenArtifactDescriptor(parts[0], parts[1], parts[2], null, parts[3]);
} else if (parts.length == 5) {
return new MavenArtifactDescriptor(parts[0], parts[1], parts[2], parts[3], parts[4]);
} else if (parts.length == 6) {
return new MavenArtifactDescriptor(parts[0], parts[1], parts[2], parts[3], parts[4]);
}
else {
throw new IOException("Invalid gav: " + gav);
}
}
@Override
public int compareTo(MavenArtifactDescriptor that) {
int result = this.groupId.compareTo( that.groupId );
if ( result != 0 ) {
return result;
}
result = this.artifactId.compareTo( that.artifactId );
if ( result != 0 ) {
return result;
}
result = this.version.compareTo( that.version );
if ( result != 0 ) {
return result;
}
if ( this.type != null && that.type == null ) {
return 1;
}
if ( this.type == null && that.type != null ) {
return -1;
}
result = this.type.compareTo( that.type );
if ( result != 0 ) {
return result;
}
if ( this.classifier != null && that.classifier == null ) {
return 1;
}
if ( this.classifier == null && that.classifier != null ) {
return -1;
}
return this.classifier.compareTo( that.classifier );
}
public String groupId() {
return this.groupId;
}
public String artifactId() {
return this.artifactId;
}
public String version() {
return this.version;
}
public String classifier() {
return this.classifier;
}
public String type() {
return this.type;
}
public String mscGav() {
return this.groupId + ":" +
this.artifactId + ":" +
this.version +
(this.classifier == null ? "" : ":" + this.classifier);
}
public ArtifactCoordinates mscCoordinates() {
return new ArtifactCoordinates(this.groupId,
this.artifactId,
this.version,
this.classifier == null ? "" : this.classifier);
}
public String mavenGav() {
return this.groupId + ":" +
this.artifactId + ":" +
(this.type == null ? "jar" : this.type) + ":" +
(this.classifier == null ? "" : this.classifier + ":") +
this.version;
}
public String repoPath(boolean forJar) {
char delim = File.separatorChar;
if (forJar) {
delim = '/';
}
String[] groupParts = this.groupId.split("\\.");
StringBuffer p = new StringBuffer();
for (String groupPart : groupParts) {
p.append(groupPart)
.append(delim);
}
p.append(this.artifactId)
.append(delim);
p.append(this.version)
.append(delim);
p.append(this.artifactId)
.append('-')
.append(this.version);
if (this.classifier != null) {
p.append('-')
.append(this.classifier);
}
p.append('.').append(this.type);
return p.toString();
}
public String toString() {
return mscGav();
}
private Builder builder() {
return new Builder();
}
private String groupId;
private String artifactId;
private String version;
private String classifier;
private String type;
public class Builder {
public Builder groupId(String groupId) {
MavenArtifactDescriptor.this.groupId = groupId;
return this;
}
public Builder artifactId(String artifactId) {
MavenArtifactDescriptor.this.artifactId = artifactId;
return this;
}
public Builder version(String version) {
MavenArtifactDescriptor.this.version = version;
return this;
}
public Builder type(String type) {
MavenArtifactDescriptor.this.type = type;
return this;
}
public Builder classifier(String classifier) {
MavenArtifactDescriptor.this.classifier = classifier;
return this;
}
public MavenArtifactDescriptor build() {
return MavenArtifactDescriptor.this;
}
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
MavenArtifactDescriptor that = (MavenArtifactDescriptor) o;
if (!groupId.equals(that.groupId)) return false;
if (!artifactId.equals(that.artifactId)) return false;
if (version != null ? !version.equals(that.version) : that.version != null) return false;
if (classifier != null ? !classifier.equals(that.classifier) : that.classifier != null) return false;
return type.equals(that.type);
}
@Override
public int hashCode() {
int result = groupId.hashCode();
result = 31 * result + artifactId.hashCode();
result = 31 * result + (version != null ? version.hashCode() : 0);
result = 31 * result + (classifier != null ? classifier.hashCode() : 0);
result = 31 * result + type.hashCode();
return result;
}
}
| |
/*
* Copyright (C) 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.example.android.uamp.ui;
import android.app.ActivityOptions;
import android.app.FragmentManager;
import android.content.Intent;
import android.content.res.Configuration;
import android.os.Bundle;
import android.os.Handler;
import android.support.design.widget.NavigationView;
import android.support.v4.view.GravityCompat;
import android.support.v4.widget.DrawerLayout;
import android.support.v7.app.ActionBarDrawerToggle;
import android.support.v7.app.AppCompatActivity;
import android.support.v7.app.MediaRouteButton;
import android.support.v7.widget.Toolbar;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import com.example.android.uamp.R;
import com.example.android.uamp.utils.LogHelper;
import com.google.android.gms.cast.framework.CastButtonFactory;
import com.google.android.gms.cast.framework.CastContext;
import com.google.android.gms.cast.framework.CastState;
import com.google.android.gms.cast.framework.CastStateListener;
import com.google.android.gms.cast.framework.IntroductoryOverlay;
/**
* Abstract activity with toolbar, navigation drawer and cast support. Needs to be extended by
* any activity that wants to be shown as a top level activity.
*
* The requirements for a subclass is to call {@link #initializeToolbar()} on onCreate, after
* setContentView() is called and have three mandatory layout elements:
* a {@link android.support.v7.widget.Toolbar} with id 'toolbar',
* a {@link android.support.v4.widget.DrawerLayout} with id 'drawerLayout' and
* a {@link android.widget.ListView} with id 'drawerList'.
*/
public abstract class ActionBarCastActivity extends AppCompatActivity {
private static final String TAG = LogHelper.makeLogTag(ActionBarCastActivity.class);
private static final int DELAY_MILLIS = 1000;
private CastContext mCastContext;
private MenuItem mMediaRouteMenuItem;
private Toolbar mToolbar;
private ActionBarDrawerToggle mDrawerToggle;
private DrawerLayout mDrawerLayout;
private boolean mToolbarInitialized;
private int mItemToOpenWhenDrawerCloses = -1;
private CastStateListener mCastStateListener = new CastStateListener() {
@Override
public void onCastStateChanged(int newState) {
if (newState != CastState.NO_DEVICES_AVAILABLE) {
new Handler().postDelayed(new Runnable() {
@Override
public void run() {
if (mMediaRouteMenuItem.isVisible()) {
LogHelper.d(TAG, "Cast Icon is visible");
showFtu();
}
}
}, DELAY_MILLIS);
}
}
};
private final DrawerLayout.DrawerListener mDrawerListener = new DrawerLayout.DrawerListener() {
@Override
public void onDrawerClosed(View drawerView) {
if (mDrawerToggle != null) mDrawerToggle.onDrawerClosed(drawerView);
if (mItemToOpenWhenDrawerCloses >= 0) {
Bundle extras = ActivityOptions.makeCustomAnimation(
ActionBarCastActivity.this, R.anim.fade_in, R.anim.fade_out).toBundle();
Class activityClass = null;
switch (mItemToOpenWhenDrawerCloses) {
case R.id.navigation_allmusic:
activityClass = MusicPlayerActivity.class;
break;
case R.id.navigation_playlists:
activityClass = PlaceholderActivity.class;
break;
}
if (activityClass != null) {
startActivity(new Intent(ActionBarCastActivity.this, activityClass), extras);
finish();
}
}
}
@Override
public void onDrawerStateChanged(int newState) {
if (mDrawerToggle != null) mDrawerToggle.onDrawerStateChanged(newState);
}
@Override
public void onDrawerSlide(View drawerView, float slideOffset) {
if (mDrawerToggle != null) mDrawerToggle.onDrawerSlide(drawerView, slideOffset);
}
@Override
public void onDrawerOpened(View drawerView) {
if (mDrawerToggle != null) mDrawerToggle.onDrawerOpened(drawerView);
if (getSupportActionBar() != null) getSupportActionBar()
.setTitle(R.string.app_name);
}
};
private final FragmentManager.OnBackStackChangedListener mBackStackChangedListener =
new FragmentManager.OnBackStackChangedListener() {
@Override
public void onBackStackChanged() {
updateDrawerToggle();
}
};
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
LogHelper.d(TAG, "Activity onCreate");
mCastContext = CastContext.getSharedInstance(this);
}
@Override
protected void onStart() {
super.onStart();
if (!mToolbarInitialized) {
throw new IllegalStateException("You must run super.initializeToolbar at " +
"the end of your onCreate method");
}
}
@Override
protected void onPostCreate(Bundle savedInstanceState) {
super.onPostCreate(savedInstanceState);
if (mDrawerToggle != null) {
mDrawerToggle.syncState();
}
}
@Override
public void onResume() {
super.onResume();
mCastContext.addCastStateListener(mCastStateListener);
// Whenever the fragment back stack changes, we may need to update the
// action bar toggle: only top level screens show the hamburger-like icon, inner
// screens - either Activities or fragments - show the "Up" icon instead.
getFragmentManager().addOnBackStackChangedListener(mBackStackChangedListener);
}
@Override
public void onConfigurationChanged(Configuration newConfig) {
super.onConfigurationChanged(newConfig);
if (mDrawerToggle != null) {
mDrawerToggle.onConfigurationChanged(newConfig);
}
}
@Override
public void onPause() {
super.onPause();
mCastContext.removeCastStateListener(mCastStateListener);
getFragmentManager().removeOnBackStackChangedListener(mBackStackChangedListener);
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
super.onCreateOptionsMenu(menu);
getMenuInflater().inflate(R.menu.main, menu);
mMediaRouteMenuItem = CastButtonFactory.setUpMediaRouteButton(getApplicationContext(),
menu, R.id.media_route_menu_item);
return true;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
if (mDrawerToggle != null && mDrawerToggle.onOptionsItemSelected(item)) {
return true;
}
// If not handled by drawerToggle, home needs to be handled by returning to previous
if (item != null && item.getItemId() == android.R.id.home) {
onBackPressed();
return true;
}
return super.onOptionsItemSelected(item);
}
@Override
public void onBackPressed() {
// If the drawer is open, back will close it
if (mDrawerLayout != null && mDrawerLayout.isDrawerOpen(GravityCompat.START)) {
mDrawerLayout.closeDrawers();
return;
}
// Otherwise, it may return to the previous fragment stack
FragmentManager fragmentManager = getFragmentManager();
if (fragmentManager.getBackStackEntryCount() > 0) {
fragmentManager.popBackStack();
} else {
// Lastly, it will rely on the system behavior for back
super.onBackPressed();
}
}
@Override
public void setTitle(CharSequence title) {
super.setTitle(title);
mToolbar.setTitle(title);
}
@Override
public void setTitle(int titleId) {
super.setTitle(titleId);
mToolbar.setTitle(titleId);
}
protected void initializeToolbar() {
mToolbar = (Toolbar) findViewById(R.id.toolbar);
if (mToolbar == null) {
throw new IllegalStateException("Layout is required to include a Toolbar with id " +
"'toolbar'");
}
mToolbar.inflateMenu(R.menu.main);
mDrawerLayout = (DrawerLayout) findViewById(R.id.drawer_layout);
if (mDrawerLayout != null) {
NavigationView navigationView = (NavigationView) findViewById(R.id.nav_view);
if (navigationView == null) {
throw new IllegalStateException("Layout requires a NavigationView " +
"with id 'nav_view'");
}
// Create an ActionBarDrawerToggle that will handle opening/closing of the drawer:
mDrawerToggle = new ActionBarDrawerToggle(this, mDrawerLayout,
mToolbar, R.string.open_content_drawer, R.string.close_content_drawer);
mDrawerLayout.setDrawerListener(mDrawerListener);
populateDrawerItems(navigationView);
setSupportActionBar(mToolbar);
updateDrawerToggle();
} else {
setSupportActionBar(mToolbar);
}
mToolbarInitialized = true;
}
private void populateDrawerItems(NavigationView navigationView) {
navigationView.setNavigationItemSelectedListener(
new NavigationView.OnNavigationItemSelectedListener() {
@Override
public boolean onNavigationItemSelected(MenuItem menuItem) {
menuItem.setChecked(true);
mItemToOpenWhenDrawerCloses = menuItem.getItemId();
mDrawerLayout.closeDrawers();
return true;
}
});
if (MusicPlayerActivity.class.isAssignableFrom(getClass())) {
navigationView.setCheckedItem(R.id.navigation_allmusic);
} else if (PlaceholderActivity.class.isAssignableFrom(getClass())) {
navigationView.setCheckedItem(R.id.navigation_playlists);
}
}
protected void updateDrawerToggle() {
if (mDrawerToggle == null) {
return;
}
boolean isRoot = getFragmentManager().getBackStackEntryCount() == 0;
mDrawerToggle.setDrawerIndicatorEnabled(isRoot);
if (getSupportActionBar() != null) {
getSupportActionBar().setDisplayShowHomeEnabled(!isRoot);
getSupportActionBar().setDisplayHomeAsUpEnabled(!isRoot);
getSupportActionBar().setHomeButtonEnabled(!isRoot);
}
if (isRoot) {
mDrawerToggle.syncState();
}
}
/**
* Shows the Cast First Time User experience to the user (an overlay that explains what is
* the Cast icon)
*/
private void showFtu() {
Menu menu = mToolbar.getMenu();
View view = menu.findItem(R.id.media_route_menu_item).getActionView();
if (view != null && view instanceof MediaRouteButton) {
IntroductoryOverlay overlay = new IntroductoryOverlay.Builder(this, mMediaRouteMenuItem)
.setTitleText(R.string.touch_to_cast)
.setSingleTime()
.build();
overlay.show();
}
}
}
| |
/**
* Copyright (C) 2015 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.strata.market.value;
import java.io.Serializable;
import java.time.LocalDate;
import java.util.Map;
import java.util.NoSuchElementException;
import java.util.OptionalDouble;
import java.util.Set;
import org.joda.beans.Bean;
import org.joda.beans.BeanBuilder;
import org.joda.beans.BeanDefinition;
import org.joda.beans.ImmutableBean;
import org.joda.beans.ImmutableDefaults;
import org.joda.beans.JodaBeanUtils;
import org.joda.beans.MetaProperty;
import org.joda.beans.Property;
import org.joda.beans.PropertyDefinition;
import org.joda.beans.impl.direct.DirectFieldsBeanBuilder;
import org.joda.beans.impl.direct.DirectMetaBean;
import org.joda.beans.impl.direct.DirectMetaProperty;
import org.joda.beans.impl.direct.DirectMetaPropertyMap;
import com.opengamma.strata.basics.index.OvernightIndex;
import com.opengamma.strata.basics.market.Perturbation;
import com.opengamma.strata.collect.ArgChecker;
import com.opengamma.strata.collect.Messages;
import com.opengamma.strata.collect.timeseries.LocalDateDoubleTimeSeries;
import com.opengamma.strata.market.curve.Curve;
import com.opengamma.strata.market.curve.CurveName;
import com.opengamma.strata.market.sensitivity.CurveCurrencyParameterSensitivities;
import com.opengamma.strata.market.sensitivity.CurveUnitParameterSensitivities;
import com.opengamma.strata.market.sensitivity.OvernightRateSensitivity;
import com.opengamma.strata.market.sensitivity.PointSensitivityBuilder;
/**
* An Overnight index curve providing rates from discount factors.
* <p>
* This provides historic and forward rates for a single {@link OvernightIndex}, such as 'EUR-EONIA'.
* <p>
* This implementation is based on an underlying curve that is stored with maturities
* and zero-coupon continuously-compounded rates.
*/
@BeanDefinition(builderScope = "private")
public final class DiscountOvernightIndexRates
implements OvernightIndexRates, ImmutableBean, Serializable {
/**
* The index that the rates are for.
*/
@PropertyDefinition(validate = "notNull", overrideGet = true)
private final OvernightIndex index;
/**
* The time-series.
* This covers known historical fixings and may be empty.
*/
@PropertyDefinition(validate = "notNull", overrideGet = true)
private final LocalDateDoubleTimeSeries timeSeries;
/**
* The underlying discount factor curve.
*/
@PropertyDefinition(validate = "notNull")
private final DiscountFactors discountFactors;
//-------------------------------------------------------------------------
/**
* Creates a new Overnight index rates instance with no historic fixings.
* <p>
* The forward curve is specified by an instance of {@link DiscountFactors}.
*
* @param index the Overnight index
* @param discountFactors the underlying discount factor forward curve
* @return the rates instance
*/
public static DiscountOvernightIndexRates of(OvernightIndex index, DiscountFactors discountFactors) {
return of(index, LocalDateDoubleTimeSeries.empty(), discountFactors);
}
/**
* Creates a new Overnight index rates instance.
* <p>
* The forward curve is specified by an instance of {@link DiscountFactors}.
*
* @param index the Overnight index
* @param knownFixings the known historical fixings
* @param discountFactors the underlying discount factor forward curve
* @return the rates instance
*/
public static DiscountOvernightIndexRates of(
OvernightIndex index,
LocalDateDoubleTimeSeries knownFixings,
DiscountFactors discountFactors) {
return new DiscountOvernightIndexRates(index, knownFixings, discountFactors);
}
//-------------------------------------------------------------------------
@ImmutableDefaults
private static void applyDefaults(Builder builder) {
builder.timeSeries = LocalDateDoubleTimeSeries.empty();
}
//-------------------------------------------------------------------------
@Override
public LocalDate getValuationDate() {
return discountFactors.getValuationDate();
}
@Override
public CurveName getCurveName() {
return discountFactors.getCurveName();
}
@Override
public int getParameterCount() {
return discountFactors.getParameterCount();
}
//-------------------------------------------------------------------------
@Override
public double rate(LocalDate fixingDate) {
LocalDate publicationDate = index.calculatePublicationFromFixing(fixingDate);
if (!publicationDate.isAfter(getValuationDate())) {
return historicRate(fixingDate, publicationDate);
}
return forwardRate(fixingDate);
}
// historic rate
private double historicRate(LocalDate fixingDate, LocalDate publicationDate) {
OptionalDouble fixedRate = timeSeries.get(fixingDate);
if (fixedRate.isPresent()) {
return fixedRate.getAsDouble();
} else if (publicationDate.isBefore(getValuationDate())) { // the fixing is required
if (timeSeries.isEmpty()) {
throw new IllegalArgumentException(
Messages.format("Unable to get fixing for {} on date {}, no time-series supplied", index, fixingDate));
}
throw new IllegalArgumentException(Messages.format("Unable to get fixing for {} on date {}", index, fixingDate));
} else {
return forwardRate(fixingDate);
}
}
// forward rate
private double forwardRate(LocalDate fixingDate) {
LocalDate fixingStartDate = index.calculateEffectiveFromFixing(fixingDate);
LocalDate fixingEndDate = index.calculateMaturityFromEffective(fixingStartDate);
double fixingYearFraction = index.getDayCount().yearFraction(fixingStartDate, fixingEndDate);
return simplyCompoundForwardRate(fixingStartDate, fixingEndDate, fixingYearFraction);
}
// compounded from discount factors
private double simplyCompoundForwardRate(LocalDate startDate, LocalDate endDate, double accrualFactor) {
return (discountFactors.discountFactor(startDate) / discountFactors.discountFactor(endDate) - 1) / accrualFactor;
}
//-------------------------------------------------------------------------
@Override
public PointSensitivityBuilder ratePointSensitivity(LocalDate fixingDate) {
LocalDate valuationDate = getValuationDate();
LocalDate publicationDate = index.calculatePublicationFromFixing(fixingDate);
if (publicationDate.isBefore(valuationDate) ||
(publicationDate.equals(valuationDate) && timeSeries.get(fixingDate).isPresent())) {
return PointSensitivityBuilder.none();
}
LocalDate fixingStartDate = index.calculateEffectiveFromFixing(fixingDate);
LocalDate fixingEndDate = index.calculateMaturityFromEffective(fixingStartDate);
return OvernightRateSensitivity.of(index, fixingDate, fixingEndDate, index.getCurrency(), 1d);
}
//-------------------------------------------------------------------------
@Override
public double periodRate(LocalDate startDate, LocalDate endDate) {
ArgChecker.inOrderNotEqual(startDate, endDate, "startDate", "endDate");
ArgChecker.inOrderOrEqual(getValuationDate(), startDate, "valuationDate", "startDate");
double fixingYearFraction = index.getDayCount().yearFraction(startDate, endDate);
return simplyCompoundForwardRate(startDate, endDate, fixingYearFraction);
}
//-------------------------------------------------------------------------
@Override
public PointSensitivityBuilder periodRatePointSensitivity(LocalDate startDate, LocalDate endDate) {
ArgChecker.inOrderNotEqual(startDate, endDate, "startDate", "endDate");
ArgChecker.inOrderOrEqual(getValuationDate(), startDate, "valuationDate", "startDate");
return OvernightRateSensitivity.of(index, startDate, endDate, index.getCurrency(), 1d);
}
//-------------------------------------------------------------------------
@Override
public CurveUnitParameterSensitivities unitParameterSensitivity(LocalDate fixingDate) {
LocalDate valuationDate = getValuationDate();
LocalDate publicationDate = index.calculatePublicationFromFixing(fixingDate);
if (publicationDate.isBefore(valuationDate) ||
(publicationDate.equals(valuationDate) && timeSeries.get(fixingDate).isPresent())) {
return CurveUnitParameterSensitivities.empty();
}
return discountFactors.unitParameterSensitivity(fixingDate);
}
//-------------------------------------------------------------------------
@Override
public CurveCurrencyParameterSensitivities curveParameterSensitivity(OvernightRateSensitivity pointSensitivity) {
OvernightIndex index = pointSensitivity.getIndex();
LocalDate startDate = index.calculateEffectiveFromFixing(pointSensitivity.getFixingDate());
LocalDate endDate = pointSensitivity.getEndDate();
double accrualFactor = index.getDayCount().yearFraction(startDate, endDate);
double forwardBar = pointSensitivity.getSensitivity();
double dfForwardStart = discountFactors.discountFactor(startDate);
double dfForwardEnd = discountFactors.discountFactor(endDate);
double dfStartBar = forwardBar / (accrualFactor * dfForwardEnd);
double dfEndBar = -forwardBar * dfForwardStart / (accrualFactor * dfForwardEnd * dfForwardEnd);
double zrStartBar = discountFactors.zeroRatePointSensitivity(startDate).getSensitivity() * dfStartBar;
double zrEndBar = discountFactors.zeroRatePointSensitivity(endDate).getSensitivity() * dfEndBar;
CurveUnitParameterSensitivities dzrdpStart = discountFactors.unitParameterSensitivity(startDate);
CurveUnitParameterSensitivities dzrdpEnd = discountFactors.unitParameterSensitivity(endDate);
// combine unit and point sensitivities at start and end
CurveCurrencyParameterSensitivities sensStart = dzrdpStart.multipliedBy(pointSensitivity.getCurrency(), zrStartBar);
CurveCurrencyParameterSensitivities sensEnd = dzrdpEnd.multipliedBy(pointSensitivity.getCurrency(), zrEndBar);
return sensStart.combinedWith(sensEnd);
}
//-------------------------------------------------------------------------
@Override
public DiscountOvernightIndexRates applyPerturbation(Perturbation<Curve> perturbation) {
return withDiscountFactors(discountFactors.applyPerturbation(perturbation));
}
/**
* Returns a new instance with different discount factors.
*
* @param factors the new discount factors
* @return the new instance
*/
public DiscountOvernightIndexRates withDiscountFactors(DiscountFactors factors) {
return new DiscountOvernightIndexRates(index, timeSeries, factors);
}
//------------------------- AUTOGENERATED START -------------------------
///CLOVER:OFF
/**
* The meta-bean for {@code DiscountOvernightIndexRates}.
* @return the meta-bean, not null
*/
public static DiscountOvernightIndexRates.Meta meta() {
return DiscountOvernightIndexRates.Meta.INSTANCE;
}
static {
JodaBeanUtils.registerMetaBean(DiscountOvernightIndexRates.Meta.INSTANCE);
}
/**
* The serialization version id.
*/
private static final long serialVersionUID = 1L;
private DiscountOvernightIndexRates(
OvernightIndex index,
LocalDateDoubleTimeSeries timeSeries,
DiscountFactors discountFactors) {
JodaBeanUtils.notNull(index, "index");
JodaBeanUtils.notNull(timeSeries, "timeSeries");
JodaBeanUtils.notNull(discountFactors, "discountFactors");
this.index = index;
this.timeSeries = timeSeries;
this.discountFactors = discountFactors;
}
@Override
public DiscountOvernightIndexRates.Meta metaBean() {
return DiscountOvernightIndexRates.Meta.INSTANCE;
}
@Override
public <R> Property<R> property(String propertyName) {
return metaBean().<R>metaProperty(propertyName).createProperty(this);
}
@Override
public Set<String> propertyNames() {
return metaBean().metaPropertyMap().keySet();
}
//-----------------------------------------------------------------------
/**
* Gets the index that the rates are for.
* @return the value of the property, not null
*/
@Override
public OvernightIndex getIndex() {
return index;
}
//-----------------------------------------------------------------------
/**
* Gets the time-series.
* This covers known historical fixings and may be empty.
* @return the value of the property, not null
*/
@Override
public LocalDateDoubleTimeSeries getTimeSeries() {
return timeSeries;
}
//-----------------------------------------------------------------------
/**
* Gets the underlying discount factor curve.
* @return the value of the property, not null
*/
public DiscountFactors getDiscountFactors() {
return discountFactors;
}
//-----------------------------------------------------------------------
@Override
public boolean equals(Object obj) {
if (obj == this) {
return true;
}
if (obj != null && obj.getClass() == this.getClass()) {
DiscountOvernightIndexRates other = (DiscountOvernightIndexRates) obj;
return JodaBeanUtils.equal(getIndex(), other.getIndex()) &&
JodaBeanUtils.equal(getTimeSeries(), other.getTimeSeries()) &&
JodaBeanUtils.equal(getDiscountFactors(), other.getDiscountFactors());
}
return false;
}
@Override
public int hashCode() {
int hash = getClass().hashCode();
hash = hash * 31 + JodaBeanUtils.hashCode(getIndex());
hash = hash * 31 + JodaBeanUtils.hashCode(getTimeSeries());
hash = hash * 31 + JodaBeanUtils.hashCode(getDiscountFactors());
return hash;
}
@Override
public String toString() {
StringBuilder buf = new StringBuilder(128);
buf.append("DiscountOvernightIndexRates{");
buf.append("index").append('=').append(getIndex()).append(',').append(' ');
buf.append("timeSeries").append('=').append(getTimeSeries()).append(',').append(' ');
buf.append("discountFactors").append('=').append(JodaBeanUtils.toString(getDiscountFactors()));
buf.append('}');
return buf.toString();
}
//-----------------------------------------------------------------------
/**
* The meta-bean for {@code DiscountOvernightIndexRates}.
*/
public static final class Meta extends DirectMetaBean {
/**
* The singleton instance of the meta-bean.
*/
static final Meta INSTANCE = new Meta();
/**
* The meta-property for the {@code index} property.
*/
private final MetaProperty<OvernightIndex> index = DirectMetaProperty.ofImmutable(
this, "index", DiscountOvernightIndexRates.class, OvernightIndex.class);
/**
* The meta-property for the {@code timeSeries} property.
*/
private final MetaProperty<LocalDateDoubleTimeSeries> timeSeries = DirectMetaProperty.ofImmutable(
this, "timeSeries", DiscountOvernightIndexRates.class, LocalDateDoubleTimeSeries.class);
/**
* The meta-property for the {@code discountFactors} property.
*/
private final MetaProperty<DiscountFactors> discountFactors = DirectMetaProperty.ofImmutable(
this, "discountFactors", DiscountOvernightIndexRates.class, DiscountFactors.class);
/**
* The meta-properties.
*/
private final Map<String, MetaProperty<?>> metaPropertyMap$ = new DirectMetaPropertyMap(
this, null,
"index",
"timeSeries",
"discountFactors");
/**
* Restricted constructor.
*/
private Meta() {
}
@Override
protected MetaProperty<?> metaPropertyGet(String propertyName) {
switch (propertyName.hashCode()) {
case 100346066: // index
return index;
case 779431844: // timeSeries
return timeSeries;
case -91613053: // discountFactors
return discountFactors;
}
return super.metaPropertyGet(propertyName);
}
@Override
public BeanBuilder<? extends DiscountOvernightIndexRates> builder() {
return new DiscountOvernightIndexRates.Builder();
}
@Override
public Class<? extends DiscountOvernightIndexRates> beanType() {
return DiscountOvernightIndexRates.class;
}
@Override
public Map<String, MetaProperty<?>> metaPropertyMap() {
return metaPropertyMap$;
}
//-----------------------------------------------------------------------
/**
* The meta-property for the {@code index} property.
* @return the meta-property, not null
*/
public MetaProperty<OvernightIndex> index() {
return index;
}
/**
* The meta-property for the {@code timeSeries} property.
* @return the meta-property, not null
*/
public MetaProperty<LocalDateDoubleTimeSeries> timeSeries() {
return timeSeries;
}
/**
* The meta-property for the {@code discountFactors} property.
* @return the meta-property, not null
*/
public MetaProperty<DiscountFactors> discountFactors() {
return discountFactors;
}
//-----------------------------------------------------------------------
@Override
protected Object propertyGet(Bean bean, String propertyName, boolean quiet) {
switch (propertyName.hashCode()) {
case 100346066: // index
return ((DiscountOvernightIndexRates) bean).getIndex();
case 779431844: // timeSeries
return ((DiscountOvernightIndexRates) bean).getTimeSeries();
case -91613053: // discountFactors
return ((DiscountOvernightIndexRates) bean).getDiscountFactors();
}
return super.propertyGet(bean, propertyName, quiet);
}
@Override
protected void propertySet(Bean bean, String propertyName, Object newValue, boolean quiet) {
metaProperty(propertyName);
if (quiet) {
return;
}
throw new UnsupportedOperationException("Property cannot be written: " + propertyName);
}
}
//-----------------------------------------------------------------------
/**
* The bean-builder for {@code DiscountOvernightIndexRates}.
*/
private static final class Builder extends DirectFieldsBeanBuilder<DiscountOvernightIndexRates> {
private OvernightIndex index;
private LocalDateDoubleTimeSeries timeSeries;
private DiscountFactors discountFactors;
/**
* Restricted constructor.
*/
private Builder() {
applyDefaults(this);
}
//-----------------------------------------------------------------------
@Override
public Object get(String propertyName) {
switch (propertyName.hashCode()) {
case 100346066: // index
return index;
case 779431844: // timeSeries
return timeSeries;
case -91613053: // discountFactors
return discountFactors;
default:
throw new NoSuchElementException("Unknown property: " + propertyName);
}
}
@Override
public Builder set(String propertyName, Object newValue) {
switch (propertyName.hashCode()) {
case 100346066: // index
this.index = (OvernightIndex) newValue;
break;
case 779431844: // timeSeries
this.timeSeries = (LocalDateDoubleTimeSeries) newValue;
break;
case -91613053: // discountFactors
this.discountFactors = (DiscountFactors) newValue;
break;
default:
throw new NoSuchElementException("Unknown property: " + propertyName);
}
return this;
}
@Override
public Builder set(MetaProperty<?> property, Object value) {
super.set(property, value);
return this;
}
@Override
public Builder setString(String propertyName, String value) {
setString(meta().metaProperty(propertyName), value);
return this;
}
@Override
public Builder setString(MetaProperty<?> property, String value) {
super.setString(property, value);
return this;
}
@Override
public Builder setAll(Map<String, ? extends Object> propertyValueMap) {
super.setAll(propertyValueMap);
return this;
}
@Override
public DiscountOvernightIndexRates build() {
return new DiscountOvernightIndexRates(
index,
timeSeries,
discountFactors);
}
//-----------------------------------------------------------------------
@Override
public String toString() {
StringBuilder buf = new StringBuilder(128);
buf.append("DiscountOvernightIndexRates.Builder{");
buf.append("index").append('=').append(JodaBeanUtils.toString(index)).append(',').append(' ');
buf.append("timeSeries").append('=').append(JodaBeanUtils.toString(timeSeries)).append(',').append(' ');
buf.append("discountFactors").append('=').append(JodaBeanUtils.toString(discountFactors));
buf.append('}');
return buf.toString();
}
}
///CLOVER:ON
//-------------------------- AUTOGENERATED END --------------------------
}
| |
/* JAI-Ext - OpenSource Java Advanced Image Extensions Library
* http://www.geo-solutions.it/
* Copyright 2014 GeoSolutions
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package it.geosolutions.jaiext.convolve;
import it.geosolutions.jaiext.border.BorderDescriptor;
import it.geosolutions.jaiext.iterators.RandomIterFactory;
import it.geosolutions.jaiext.range.Range;
import java.awt.Point;
import java.awt.Rectangle;
import java.awt.RenderingHints;
import java.awt.image.DataBuffer;
import java.awt.image.Raster;
import java.awt.image.RenderedImage;
import java.awt.image.WritableRaster;
import java.util.Arrays;
import javax.media.jai.AreaOpImage;
import javax.media.jai.BorderExtender;
import javax.media.jai.ImageLayout;
import javax.media.jai.IntegerSequence;
import javax.media.jai.KernelJAI;
import javax.media.jai.PlanarImage;
import javax.media.jai.ROI;
import javax.media.jai.ROIShape;
import javax.media.jai.RasterAccessor;
import javax.media.jai.RasterFormatTag;
import javax.media.jai.iterator.RandomIter;
import com.sun.media.jai.util.ImageUtil;
public abstract class ConvolveOpImage extends AreaOpImage {
/** Constant indicating that the inner random iterators must pre-calculate an array of the image positions */
public static final boolean ARRAY_CALC = true;
/** Constant indicating that the inner random iterators must cache the current tile position */
public static final boolean TILE_CACHED = true;
/** Boolean indicating that NoData must be checked */
protected final boolean hasNoData;
/** NoData Range element */
protected Range noData;
/** LookupTable used for checking if an input byte sample is a NoData */
protected boolean[] lut;
/** Boolean indicating that ROI must be checked */
protected final boolean hasROI;
/** ROI element */
protected ROI roi;
/** Boolean indicating that no roi and no data check must be done */
protected final boolean caseA;
/** Boolean indicating that only roi check must be done */
protected final boolean caseB;
/** Boolean indicating that only no data check must be done */
protected final boolean caseC;
/** ROI bounds as a Shape */
protected final Rectangle roiBounds;
/** ROI related image */
protected PlanarImage roiImage;
/** Destination No Data value for Byte sources */
protected byte destNoDataByte;
/** Destination No Data value for Short sources */
protected short destNoDataShort;
/** Destination No Data value for Integer sources */
protected int destNoDataInt;
/** Destination No Data value for Float sources */
protected float destNoDataFloat;
/** Destination No Data value for Double sources */
protected double destNoDataDouble;
protected boolean skipNoData;
protected RenderedImage extendedIMG;
protected Rectangle destBounds;
protected KernelJAI kernel;
protected int kw;
protected int kh;
protected int kx;
protected int ky;
public ConvolveOpImage(RenderedImage source, BorderExtender extender, RenderingHints hints,
ImageLayout l, KernelJAI kernel, ROI roi, Range noData, double destinationNoData, boolean skipNoData) {
super(source, l, hints, true, extender, kernel.getLeftPadding(), kernel.getRightPadding(),
kernel.getTopPadding(), kernel.getBottomPadding());
this.kernel = kernel;
kw = kernel.getWidth();
kh = kernel.getHeight();
kx = kernel.getXOrigin();
ky = kernel.getYOrigin();
// Check if ROI control must be done
if (roi != null) {
hasROI = true;
// Roi object
this.roi = roi;
roiBounds = roi.getBounds();
} else {
hasROI = false;
this.roi = null;
roiBounds = null;
}
// Check if No Data control must be done
if (noData != null) {
hasNoData = true;
this.noData = noData;
this.skipNoData = skipNoData;
} else {
hasNoData = false;
this.skipNoData = false;
}
// Getting datatype
int dataType = source.getSampleModel().getDataType();
// Destination No Data value is clamped to the image data type
this.destNoDataDouble = destinationNoData;
switch (dataType) {
case DataBuffer.TYPE_BYTE:
this.destNoDataByte = ImageUtil.clampRoundByte(destinationNoData);
break;
case DataBuffer.TYPE_USHORT:
this.destNoDataShort = ImageUtil.clampRoundUShort(destinationNoData);
break;
case DataBuffer.TYPE_SHORT:
this.destNoDataShort = ImageUtil.clampRoundShort(destinationNoData);
break;
case DataBuffer.TYPE_INT:
this.destNoDataInt = ImageUtil.clampRoundInt(destinationNoData);
break;
case DataBuffer.TYPE_FLOAT:
this.destNoDataFloat = ImageUtil.clampFloat(destinationNoData);
break;
case DataBuffer.TYPE_DOUBLE:
break;
default:
throw new IllegalArgumentException("Wrong image data type");
}
// Definition of the possible cases that can be found
// caseA = no ROI nor No Data
// caseB = ROI present but No Data not present
// caseC = No Data present but ROI not present
// Last case not defined = both ROI and No Data are present
caseA = !hasNoData && !hasROI;
caseB = !hasNoData && hasROI;
caseC = hasNoData && !hasROI;
if (hasNoData && dataType == DataBuffer.TYPE_BYTE) {
initBooleanNoDataTable();
}
if (this.extender != null) {
extendedIMG = BorderDescriptor.create(source, leftPadding, rightPadding, topPadding,
bottomPadding, extender, noData, destinationNoData, hints);
this.destBounds = getBounds();
} else {
int x0 = getMinX() + leftPadding;
int y0 = getMinY() + topPadding;
int w = getWidth() - leftPadding - rightPadding;
w = Math.max(w, 0);
int h = getHeight() - topPadding - bottomPadding;
h = Math.max(h, 0);
this.destBounds = new Rectangle(x0, y0, w, h);
}
}
private void initBooleanNoDataTable() {
// Initialization of the boolean lookup table
lut = new boolean[256];
// Fill the lookuptable
for (int i = 0; i < 256; i++) {
boolean result = true;
if (noData.contains((byte) i)) {
result = false;
}
lut[i] = result;
}
}
/**
* Performs convolution on a specified rectangle. The sources are cobbled.
*
* @param sources an array of source Rasters, guaranteed to provide all necessary source data for computing the output.
* @param dest a WritableRaster tile containing the area to be computed.
* @param destRect the rectangle within dest to be processed.
*/
protected void computeRect(Raster[] sources, WritableRaster dest, Rectangle destRect) {
// Retrieve format tags.
RasterFormatTag[] formatTags = getFormatTags();
Raster source = sources[0];
Rectangle srcRect = mapDestRect(destRect, 0);
RasterAccessor src = new RasterAccessor(source, srcRect, formatTags[0], getSourceImage(0)
.getColorModel());
RasterAccessor dst = new RasterAccessor(dest, destRect, formatTags[1], getColorModel());
// ROI fields
ROI roiTile = null;
RandomIter roiIter = null;
boolean roiContainsTile = false;
boolean roiDisjointTile = false;
// ROI check
if (hasROI) {
Rectangle srcRectExpanded = mapDestRect(destRect, 0);
// The tile dimension is extended for avoiding border errors
srcRectExpanded.setRect(srcRectExpanded.getMinX() - 1, srcRectExpanded.getMinY() - 1,
srcRectExpanded.getWidth() + 2, srcRectExpanded.getHeight() + 2);
roiTile = roi.intersect(new ROIShape(srcRectExpanded));
if (!roiBounds.intersects(srcRectExpanded)) {
roiDisjointTile = true;
} else {
roiContainsTile = roiTile.contains(srcRectExpanded);
if (!roiContainsTile) {
if (!roiTile.intersects(srcRectExpanded)) {
roiDisjointTile = true;
} else {
PlanarImage roiIMG = getImage();
roiIter = RandomIterFactory.create(roiIMG, null, TILE_CACHED, ARRAY_CALC);
}
}
}
}
if (!hasROI || !roiDisjointTile) {
switch (dst.getDataType()) {
case DataBuffer.TYPE_BYTE:
byteLoop(src, dst, roiIter, roiContainsTile);
break;
case DataBuffer.TYPE_USHORT:
ushortLoop(src, dst, roiIter, roiContainsTile);
break;
case DataBuffer.TYPE_SHORT:
shortLoop(src, dst, roiIter, roiContainsTile);
break;
case DataBuffer.TYPE_INT:
intLoop(src, dst, roiIter, roiContainsTile);
break;
case DataBuffer.TYPE_FLOAT:
floatLoop(src, dst, roiIter, roiContainsTile);
break;
case DataBuffer.TYPE_DOUBLE:
doubleLoop(src, dst, roiIter, roiContainsTile);
break;
default:
throw new IllegalArgumentException("Wrong Data Type defined");
}
// If the RasterAccessor object set up a temporary buffer for the
// op to write to, tell the RasterAccessor to write that data
// to the raster no that we're done with it.
if (dst.isDataCopy()) {
dst.clampDataArrays();
dst.copyDataToRaster();
}
} else {
// Setting all as NoData
double[] backgroundValues = new double[src.getNumBands()];
Arrays.fill(backgroundValues, destNoDataDouble);
ImageUtil.fillBackground(dest, destRect, backgroundValues);
}
}
protected abstract void byteLoop(RasterAccessor src, RasterAccessor dst, RandomIter roiIter,
boolean roiContainsTile);
protected abstract void ushortLoop(RasterAccessor src, RasterAccessor dst, RandomIter roiIter,
boolean roiContainsTile);
protected abstract void shortLoop(RasterAccessor src, RasterAccessor dst, RandomIter roiIter,
boolean roiContainsTile);
protected abstract void intLoop(RasterAccessor src, RasterAccessor dst, RandomIter roiIter,
boolean roiContainsTile);
protected abstract void floatLoop(RasterAccessor src, RasterAccessor dst, RandomIter roiIter,
boolean roiContainsTile);
protected abstract void doubleLoop(RasterAccessor src, RasterAccessor dst, RandomIter roiIter,
boolean roiContainsTile);
public Raster computeTile(int tileX, int tileY) {
if (!cobbleSources) {
return super.computeTile(tileX, tileY);
}
// Special handling for Border Extender
/* Create a new WritableRaster to represent this tile. */
Point org = new Point(tileXToX(tileX), tileYToY(tileY));
WritableRaster dest = createWritableRaster(sampleModel, org);
/* Clip output rectangle to image bounds. */
Rectangle rect = new Rectangle(org.x, org.y, sampleModel.getWidth(),
sampleModel.getHeight());
Rectangle destRect = rect.intersection(destBounds);
if ((destRect.width <= 0) || (destRect.height <= 0)) {
return dest;
}
/* account for padding in srcRectangle */
PlanarImage s = getSourceImage(0);
// Fix 4639755: Area operations throw exception for
// destination extending beyond source bounds
// The default dest image area is the same as the source
// image area. However, when an ImageLayout hint is set,
// this might be not true. So the destRect should be the
// intersection of the provided rectangle, the destination
// bounds and the source bounds.
destRect = destRect.intersection(s.getBounds());
Rectangle srcRect = new Rectangle(destRect);
srcRect.x -= getLeftPadding();
srcRect.width += getLeftPadding() + getRightPadding();
srcRect.y -= getTopPadding();
srcRect.height += getTopPadding() + getBottomPadding();
/*
* The tileWidth and tileHeight of the source image may differ from this tileWidth and tileHeight.
*/
IntegerSequence srcXSplits = new IntegerSequence();
IntegerSequence srcYSplits = new IntegerSequence();
// there is only one source for an AreaOpImage
s.getSplits(srcXSplits, srcYSplits, srcRect);
// Initialize new sequences of X splits.
IntegerSequence xSplits = new IntegerSequence(destRect.x, destRect.x + destRect.width);
xSplits.insert(destRect.x);
xSplits.insert(destRect.x + destRect.width);
srcXSplits.startEnumeration();
while (srcXSplits.hasMoreElements()) {
int xsplit = srcXSplits.nextElement();
int lsplit = xsplit - getLeftPadding();
int rsplit = xsplit + getRightPadding();
xSplits.insert(lsplit);
xSplits.insert(rsplit);
}
// Initialize new sequences of Y splits.
IntegerSequence ySplits = new IntegerSequence(destRect.y, destRect.y + destRect.height);
ySplits.insert(destRect.y);
ySplits.insert(destRect.y + destRect.height);
srcYSplits.startEnumeration();
while (srcYSplits.hasMoreElements()) {
int ysplit = srcYSplits.nextElement();
int tsplit = ysplit - getBottomPadding();
int bsplit = ysplit + getTopPadding();
ySplits.insert(tsplit);
ySplits.insert(bsplit);
}
/*
* Divide destRect into sub rectangles based on the source splits, and compute each sub rectangle separately.
*/
int x1, x2, y1, y2;
Raster[] sources = new Raster[1];
ySplits.startEnumeration();
for (y1 = ySplits.nextElement(); ySplits.hasMoreElements(); y1 = y2) {
y2 = ySplits.nextElement();
int h = y2 - y1;
int py1 = y1 - getTopPadding();
int py2 = y2 + getBottomPadding();
int ph = py2 - py1;
xSplits.startEnumeration();
for (x1 = xSplits.nextElement(); xSplits.hasMoreElements(); x1 = x2) {
x2 = xSplits.nextElement();
int w = x2 - x1;
int px1 = x1 - getLeftPadding();
int px2 = x2 + getRightPadding();
int pw = px2 - px1;
// Fetch the padded src rectangle
Rectangle srcSubRect = new Rectangle(px1, py1, pw, ph);
sources[0] = extender != null ? extendedIMG.getData(srcSubRect) : s
.getData(srcSubRect);
// Make a destRectangle
Rectangle dstSubRect = new Rectangle(x1, y1, w, h);
computeRect(sources, dest, dstSubRect);
// Recycle the source tile
if (s.overlapsMultipleTiles(srcSubRect)) {
recycleTile(sources[0]);
}
}
}
return dest;
}
/**
* This method provides a lazy initialization of the image associated to the ROI. The method uses the Double-checked locking in order to maintain
* thread-safety
*
* @return
*/
protected PlanarImage getImage() {
PlanarImage img = roiImage;
if (img == null) {
synchronized (this) {
img = roiImage;
if (img == null) {
roiImage = img = roi.getAsImage();
}
}
}
return img;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.internal.processors.query.h2.database;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.NoSuchElementException;
import org.apache.ignite.IgniteCheckedException;
import org.apache.ignite.IgniteException;
import org.apache.ignite.IgniteSystemProperties;
import org.apache.ignite.internal.pagemem.PageIdUtils;
import org.apache.ignite.internal.processors.cache.GridCacheContext;
import org.apache.ignite.internal.processors.cache.persistence.IgniteCacheDatabaseSharedManager;
import org.apache.ignite.internal.processors.cache.persistence.RootPage;
import org.apache.ignite.internal.processors.cache.persistence.tree.BPlusTree;
import org.apache.ignite.internal.processors.cache.persistence.tree.io.BPlusIO;
import org.apache.ignite.internal.processors.cache.persistence.tree.io.PageIO;
import org.apache.ignite.internal.processors.query.GridQueryTypeDescriptor;
import org.apache.ignite.internal.processors.query.h2.H2Cursor;
import org.apache.ignite.internal.processors.query.h2.H2RowCache;
import org.apache.ignite.internal.processors.query.h2.opt.GridH2IndexBase;
import org.apache.ignite.internal.processors.query.h2.database.io.H2RowLinkIO;
import org.apache.ignite.internal.processors.query.h2.opt.GridH2Row;
import org.apache.ignite.internal.processors.query.h2.opt.GridH2Table;
import org.apache.ignite.internal.util.IgniteTree;
import org.apache.ignite.internal.util.lang.GridCursor;
import org.apache.ignite.internal.util.typedef.F;
import org.apache.ignite.spi.indexing.IndexingQueryFilter;
import org.apache.ignite.spi.indexing.IndexingQueryCacheFilter;
import org.h2.engine.Session;
import org.h2.index.Cursor;
import org.h2.index.IndexType;
import org.h2.index.SingleRowCursor;
import org.h2.message.DbException;
import org.h2.result.Row;
import org.h2.result.SearchRow;
import org.h2.result.SortOrder;
import org.h2.table.Column;
import org.h2.table.IndexColumn;
import org.h2.table.TableFilter;
import org.h2.value.Value;
import org.jetbrains.annotations.Nullable;
/**
* H2 Index over {@link BPlusTree}.
*/
@SuppressWarnings({"TypeMayBeWeakened", "unchecked"})
public class H2TreeIndex extends GridH2IndexBase {
/** Default value for {@code IGNITE_MAX_INDEX_PAYLOAD_SIZE} */
public static final int IGNITE_MAX_INDEX_PAYLOAD_SIZE_DEFAULT = 10;
/** */
private final H2Tree[] segments;
/** */
private final List<InlineIndexHelper> inlineIdxs;
/** Cache context. */
private final GridCacheContext<?, ?> cctx;
/**
* @param cctx Cache context.
* @param tbl Table.
* @param name Index name.
* @param pk Primary key.
* @param colsList Index columns.
* @param inlineSize Inline size.
* @throws IgniteCheckedException If failed.
*/
public H2TreeIndex(
GridCacheContext<?, ?> cctx,
@Nullable H2RowCache rowCache,
GridH2Table tbl,
String name,
boolean pk,
List<IndexColumn> colsList,
int inlineSize,
int segmentsCnt
) throws IgniteCheckedException {
assert segmentsCnt > 0 : segmentsCnt;
this.cctx = cctx;
IndexColumn[] cols = colsList.toArray(new IndexColumn[colsList.size()]);
IndexColumn.mapColumns(cols, tbl);
initBaseIndex(tbl, 0, name, cols,
pk ? IndexType.createPrimaryKey(false, false) : IndexType.createNonUnique(false, false, false));
GridQueryTypeDescriptor typeDesc = tbl.rowDescriptor().type();
int typeId = cctx.binaryMarshaller() ? typeDesc.typeId() : typeDesc.valueClass().hashCode();
name = (tbl.rowDescriptor() == null ? "" : typeId + "_") + name;
name = BPlusTree.treeName(name, "H2Tree");
if (cctx.affinityNode()) {
inlineIdxs = getAvailableInlineColumns(cols);
segments = new H2Tree[segmentsCnt];
IgniteCacheDatabaseSharedManager db = cctx.shared().database();
for (int i = 0; i < segments.length; i++) {
db.checkpointReadLock();
try {
RootPage page = getMetaPage(name, i);
segments[i] = new H2Tree(
name,
cctx.offheap().reuseListForIndex(name),
cctx.groupId(),
cctx.dataRegion().pageMemory(),
cctx.shared().wal(),
cctx.offheap().globalRemoveId(),
tbl.rowFactory(),
page.pageId().pageId(),
page.isAllocated(),
cols,
inlineIdxs,
computeInlineSize(inlineIdxs, inlineSize),
rowCache) {
@Override public int compareValues(Value v1, Value v2) {
return v1 == v2 ? 0 : table.compareTypeSafe(v1, v2);
}
};
}
finally {
db.checkpointReadUnlock();
}
}
}
else {
// We need indexes on the client node, but index will not contain any data.
segments = null;
inlineIdxs = null;
}
initDistributedJoinMessaging(tbl);
}
/**
* @param cols Columns array.
* @return List of {@link InlineIndexHelper} objects.
*/
private List<InlineIndexHelper> getAvailableInlineColumns(IndexColumn[] cols) {
List<InlineIndexHelper> res = new ArrayList<>();
for (IndexColumn col : cols) {
if (!InlineIndexHelper.AVAILABLE_TYPES.contains(col.column.getType()))
break;
InlineIndexHelper idx = new InlineIndexHelper(
col.column.getType(),
col.column.getColumnId(),
col.sortType,
table.getCompareMode());
res.add(idx);
}
return res;
}
/** {@inheritDoc} */
@Override protected int segmentsCount() {
return segments.length;
}
/** {@inheritDoc} */
@Override public Cursor find(Session ses, SearchRow lower, SearchRow upper) {
try {
IndexingQueryCacheFilter filter = partitionFilter(threadLocalFilter());
int seg = threadLocalSegment();
H2Tree tree = treeForRead(seg);
if (indexType.isPrimaryKey() && lower != null && upper != null && tree.compareRows(lower, upper) == 0) {
GridH2Row row = tree.findOne(lower, filter);
return (row == null) ? EMPTY_CURSOR : new SingleRowCursor(row);
}
else {
GridCursor<GridH2Row> cursor = tree.find(lower, upper, filter);
return new H2Cursor(cursor);
}
}
catch (IgniteCheckedException e) {
throw DbException.convert(e);
}
}
/** {@inheritDoc} */
@Override public GridH2Row put(GridH2Row row) {
try {
InlineIndexHelper.setCurrentInlineIndexes(inlineIdxs);
int seg = segmentForRow(row);
H2Tree tree = treeForRead(seg);
assert cctx.shared().database().checkpointLockIsHeldByThread();
return tree.put(row);
}
catch (IgniteCheckedException e) {
throw DbException.convert(e);
}
finally {
InlineIndexHelper.clearCurrentInlineIndexes();
}
}
/** {@inheritDoc} */
@Override public boolean putx(GridH2Row row) {
try {
InlineIndexHelper.setCurrentInlineIndexes(inlineIdxs);
int seg = segmentForRow(row);
H2Tree tree = treeForRead(seg);
assert cctx.shared().database().checkpointLockIsHeldByThread();
return tree.putx(row);
}
catch (IgniteCheckedException e) {
throw DbException.convert(e);
}
finally {
InlineIndexHelper.clearCurrentInlineIndexes();
}
}
/** {@inheritDoc} */
@Override public GridH2Row remove(SearchRow row) {
try {
InlineIndexHelper.setCurrentInlineIndexes(inlineIdxs);
int seg = segmentForRow(row);
H2Tree tree = treeForRead(seg);
assert cctx.shared().database().checkpointLockIsHeldByThread();
return tree.remove(row);
}
catch (IgniteCheckedException e) {
throw DbException.convert(e);
}
finally {
InlineIndexHelper.clearCurrentInlineIndexes();
}
}
/** {@inheritDoc} */
@Override public boolean removex(SearchRow row) {
try {
InlineIndexHelper.setCurrentInlineIndexes(inlineIdxs);
int seg = segmentForRow(row);
H2Tree tree = treeForRead(seg);
assert cctx.shared().database().checkpointLockIsHeldByThread();
return tree.removex(row);
}
catch (IgniteCheckedException e) {
throw DbException.convert(e);
}
finally {
InlineIndexHelper.clearCurrentInlineIndexes();
}
}
/** {@inheritDoc} */
@Override public double getCost(Session ses, int[] masks, TableFilter[] filters, int filter, SortOrder sortOrder, HashSet<Column> allColumnsSet) {
long rowCnt = getRowCountApproximation();
double baseCost = getCostRangeIndex(masks, rowCnt, filters, filter, sortOrder, false, allColumnsSet);
int mul = getDistributedMultiplier(ses, filters, filter);
return mul * baseCost;
}
/** {@inheritDoc} */
@Override public long getRowCount(Session ses) {
try {
int seg = threadLocalSegment();
H2Tree tree = treeForRead(seg);
BPlusTree.TreeRowClosure<SearchRow, GridH2Row> filter = filterClosure();
return tree.size(filter);
}
catch (IgniteCheckedException e) {
throw DbException.convert(e);
}
}
/** {@inheritDoc} */
@Override public long getRowCountApproximation() {
return 10_000; // TODO
}
/** {@inheritDoc} */
@Override public boolean canGetFirstOrLast() {
return true;
}
/** {@inheritDoc} */
@Override public Cursor findFirstOrLast(Session session, boolean b) {
try {
int seg = threadLocalSegment();
H2Tree tree = treeForRead(seg);
GridH2Row row = b ? tree.findFirst(): tree.findLast();
return new SingleRowCursor(row);
}
catch (IgniteCheckedException e) {
throw DbException.convert(e);
}
}
/** {@inheritDoc} */
@Override public void destroy(boolean rmvIndex) {
try {
if (cctx.affinityNode() && rmvIndex) {
assert cctx.shared().database().checkpointLockIsHeldByThread();
for (int i = 0; i < segments.length; i++) {
H2Tree tree = segments[i];
tree.destroy();
dropMetaPage(tree.getName(), i);
}
}
}
catch (IgniteCheckedException e) {
throw new IgniteException(e);
}
finally {
super.destroy(rmvIndex);
}
}
/** {@inheritDoc} */
@Override protected H2Tree treeForRead(int segment) {
return segments[segment];
}
/** {@inheritDoc} */
@Override protected H2Cursor doFind0(
IgniteTree t,
@Nullable SearchRow first,
boolean includeFirst,
@Nullable SearchRow last,
IndexingQueryFilter filter) {
try {
IndexingQueryCacheFilter pf = partitionFilter(filter);
GridCursor<GridH2Row> range = t.find(first, last, pf);
if (range == null)
range = GridH2IndexBase.EMPTY_CURSOR;
return new H2Cursor(range);
}
catch (IgniteCheckedException e) {
throw DbException.convert(e);
}
}
/**
* @param inlineIdxs Inline index helpers.
* @param cfgInlineSize Inline size from cache config.
* @return Inline size.
*/
private int computeInlineSize(List<InlineIndexHelper> inlineIdxs, int cfgInlineSize) {
int confSize = cctx.config().getSqlIndexMaxInlineSize();
int propSize = confSize == -1 ? IgniteSystemProperties.getInteger(IgniteSystemProperties.IGNITE_MAX_INDEX_PAYLOAD_SIZE,
IGNITE_MAX_INDEX_PAYLOAD_SIZE_DEFAULT) : confSize;
if (cfgInlineSize == 0)
return 0;
if (F.isEmpty(inlineIdxs))
return 0;
if (cfgInlineSize == -1) {
if (propSize == 0)
return 0;
int size = 0;
for (InlineIndexHelper idxHelper : inlineIdxs) {
if (idxHelper.size() <= 0) {
size = propSize;
break;
}
// 1 byte type + size
size += idxHelper.size() + 1;
}
return Math.min(PageIO.MAX_PAYLOAD_SIZE, size);
}
else
return Math.min(PageIO.MAX_PAYLOAD_SIZE, cfgInlineSize);
}
/**
* @param name Name.
* @param segIdx Segment index.
* @return RootPage for meta page.
* @throws IgniteCheckedException If failed.
*/
private RootPage getMetaPage(String name, int segIdx) throws IgniteCheckedException {
return cctx.offheap().rootPageForIndex(cctx.cacheId(), name + "%" + segIdx);
}
/**
* @param name Name.
* @param segIdx Segment index.
* @throws IgniteCheckedException If failed.
*/
private void dropMetaPage(String name, int segIdx) throws IgniteCheckedException {
cctx.offheap().dropRootPageForIndex(cctx.cacheId(), name + "%" + segIdx);
}
/**
* Returns a filter which returns true for entries belonging to a particular partition.
*
* @param qryFilter Factory that creates a predicate for filtering entries for a particular cache.
* @return The filter or null if the filter is not needed (e.g., if the cache is not partitioned).
*/
@Nullable private IndexingQueryCacheFilter partitionFilter(@Nullable IndexingQueryFilter qryFilter) {
if (qryFilter == null)
return null;
String cacheName = getTable().cacheName();
return qryFilter.forCache(cacheName);
}
/**
* An adapter from {@link IndexingQueryCacheFilter} to {@link BPlusTree.TreeRowClosure} which
* filters entries that belong to the current partition.
*/
private static class PartitionFilterTreeRowClosure implements BPlusTree.TreeRowClosure<SearchRow, GridH2Row> {
/** Filter. */
private final IndexingQueryCacheFilter filter;
/**
* Creates a {@link BPlusTree.TreeRowClosure} adapter based on the given partition filter.
*
* @param filter The partition filter.
*/
public PartitionFilterTreeRowClosure(IndexingQueryCacheFilter filter) {
this.filter = filter;
}
/** {@inheritDoc} */
@Override public boolean apply(BPlusTree<SearchRow, GridH2Row> tree,
BPlusIO<SearchRow> io, long pageAddr, int idx) throws IgniteCheckedException {
H2RowLinkIO h2io = (H2RowLinkIO)io;
return filter.applyPartition(
PageIdUtils.partId(
PageIdUtils.pageId(
h2io.getLink(pageAddr, idx))));
}
}
/**
* Returns a filter to apply to rows in the current index to obtain only the
* ones owned by the this cache.
*
* @return The filter, which returns true for rows owned by this cache.
*/
@Nullable private BPlusTree.TreeRowClosure<SearchRow, GridH2Row> filterClosure() {
final IndexingQueryCacheFilter filter = partitionFilter(threadLocalFilter());
return filter != null ? new PartitionFilterTreeRowClosure(filter) : null;
}
/** {@inheritDoc} */
@Override public void refreshColumnIds() {
super.refreshColumnIds();
if (inlineIdxs == null)
return;
List<InlineIndexHelper> inlineHelpers = getAvailableInlineColumns(indexColumns);
assert inlineIdxs.size() == inlineHelpers.size();
for (int pos = 0; pos < inlineHelpers.size(); ++pos)
inlineIdxs.set(pos, inlineHelpers.get(pos));
}
/**
* Empty cursor.
*/
public static final Cursor EMPTY_CURSOR = new Cursor() {
/** {@inheritDoc} */
@Override public Row get() {
throw DbException.convert(new NoSuchElementException("Empty cursor"));
}
/** {@inheritDoc} */
@Override public SearchRow getSearchRow() {
throw DbException.convert(new NoSuchElementException("Empty cursor"));
}
/** {@inheritDoc} */
@Override public boolean next() {
return false;
}
/** {@inheritDoc} */
@Override public boolean previous() {
return false;
}
};
}
| |
/**
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for
* license information.
*
* Code generated by Microsoft (R) AutoRest Code Generator.
*/
package com.microsoft.azure.cognitiveservices.vision.customvision.prediction;
import com.microsoft.azure.cognitiveservices.vision.customvision.prediction.models.PredictImageWithNoStoreOptionalParameter;
import com.microsoft.azure.cognitiveservices.vision.customvision.prediction.models.PredictImageUrlWithNoStoreOptionalParameter;
import com.microsoft.azure.cognitiveservices.vision.customvision.prediction.models.PredictImageOptionalParameter;
import com.microsoft.azure.cognitiveservices.vision.customvision.prediction.models.PredictImageUrlOptionalParameter;
import com.microsoft.azure.CloudException;
import com.microsoft.azure.cognitiveservices.vision.customvision.prediction.models.ImagePrediction;
import java.util.UUID;
import rx.Observable;
/**
* An instance of this class provides access to all the operations defined
* in Predictions.
*/
public interface Predictions {
/**
* Predict an image without saving the result.
*
* @param projectId The project id.
* @param imageData the InputStream value.
* @param predictImageWithNoStoreOptionalParameter the object representing the optional parameters to be set before calling this API
* @throws IllegalArgumentException thrown if parameters fail the validation
* @throws CloudException thrown if the request is rejected by server
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent
* @return the ImagePrediction object if successful.
*/
@Deprecated
ImagePrediction predictImageWithNoStore(UUID projectId, byte[] imageData, PredictImageWithNoStoreOptionalParameter predictImageWithNoStoreOptionalParameter);
/**
* Predict an image without saving the result.
*
* @param projectId The project id.
* @param imageData the InputStream value.
* @param predictImageWithNoStoreOptionalParameter the object representing the optional parameters to be set before calling this API
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the observable to the ImagePrediction object
*/
@Deprecated
Observable<ImagePrediction> predictImageWithNoStoreAsync(UUID projectId, byte[] imageData, PredictImageWithNoStoreOptionalParameter predictImageWithNoStoreOptionalParameter);
/**
* Predict an image without saving the result.
*
* @return the first stage of the predictImageWithNoStore call
*/
PredictionsPredictImageWithNoStoreDefinitionStages.WithProjectId predictImageWithNoStore();
/**
* Grouping of predictImageWithNoStore definition stages.
*/
interface PredictionsPredictImageWithNoStoreDefinitionStages {
/**
* The stage of the definition to be specify projectId.
*/
interface WithProjectId {
/**
* The project id.
*
* @return next definition stage
*/
WithImageData withProjectId(UUID projectId);
}
/**
* The stage of the definition to be specify imageData.
*/
interface WithImageData {
/**
*
*
* @return next definition stage
*/
PredictionsPredictImageWithNoStoreDefinitionStages.WithExecute withImageData(byte[] imageData);
}
/**
* The stage of the definition which allows for any other optional settings to be specified.
*/
interface WithAllOptions {
/**
* Optional. Specifies the id of a particular iteration to evaluate against.
* The default iteration for the project will be used when not specified.
*
* @return next definition stage
*/
PredictionsPredictImageWithNoStoreDefinitionStages.WithExecute withIterationId(UUID iterationId);
/**
* Optional. Specifies the name of application using the endpoint.
*
* @return next definition stage
*/
PredictionsPredictImageWithNoStoreDefinitionStages.WithExecute withApplication(String application);
}
/**
* The last stage of the definition which will make the operation call.
*/
interface WithExecute extends PredictionsPredictImageWithNoStoreDefinitionStages.WithAllOptions {
/**
* Execute the request.
*
* @return the ImagePrediction object if successful.
*/
ImagePrediction execute();
/**
* Execute the request asynchronously.
*
* @return the observable to the ImagePrediction object
*/
Observable<ImagePrediction> executeAsync();
}
}
/**
* The entirety of predictImageWithNoStore definition.
*/
interface PredictionsPredictImageWithNoStoreDefinition extends
PredictionsPredictImageWithNoStoreDefinitionStages.WithProjectId,
PredictionsPredictImageWithNoStoreDefinitionStages.WithImageData,
PredictionsPredictImageWithNoStoreDefinitionStages.WithExecute {
}
/**
* Predict an image url without saving the result.
*
* @param projectId The project id.
* @param predictImageUrlWithNoStoreOptionalParameter the object representing the optional parameters to be set before calling this API
* @throws IllegalArgumentException thrown if parameters fail the validation
* @throws CloudException thrown if the request is rejected by server
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent
* @return the ImagePrediction object if successful.
*/
@Deprecated
ImagePrediction predictImageUrlWithNoStore(UUID projectId, PredictImageUrlWithNoStoreOptionalParameter predictImageUrlWithNoStoreOptionalParameter);
/**
* Predict an image url without saving the result.
*
* @param projectId The project id.
* @param predictImageUrlWithNoStoreOptionalParameter the object representing the optional parameters to be set before calling this API
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the observable to the ImagePrediction object
*/
@Deprecated
Observable<ImagePrediction> predictImageUrlWithNoStoreAsync(UUID projectId, PredictImageUrlWithNoStoreOptionalParameter predictImageUrlWithNoStoreOptionalParameter);
/**
* Predict an image url without saving the result.
*
* @return the first stage of the predictImageUrlWithNoStore call
*/
PredictionsPredictImageUrlWithNoStoreDefinitionStages.WithProjectId predictImageUrlWithNoStore();
/**
* Grouping of predictImageUrlWithNoStore definition stages.
*/
interface PredictionsPredictImageUrlWithNoStoreDefinitionStages {
/**
* The stage of the definition to be specify projectId.
*/
interface WithProjectId {
/**
* The project id.
*
* @return next definition stage
*/
PredictionsPredictImageUrlWithNoStoreDefinitionStages.WithExecute withProjectId(UUID projectId);
}
/**
* The stage of the definition which allows for any other optional settings to be specified.
*/
interface WithAllOptions {
/**
* Optional. Specifies the id of a particular iteration to evaluate against.
* The default iteration for the project will be used when not specified.
*
* @return next definition stage
*/
PredictionsPredictImageUrlWithNoStoreDefinitionStages.WithExecute withIterationId(UUID iterationId);
/**
* Optional. Specifies the name of application using the endpoint.
*
* @return next definition stage
*/
PredictionsPredictImageUrlWithNoStoreDefinitionStages.WithExecute withApplication(String application);
/**
*
*
* @return next definition stage
*/
PredictionsPredictImageUrlWithNoStoreDefinitionStages.WithExecute withUrl(String url);
}
/**
* The last stage of the definition which will make the operation call.
*/
interface WithExecute extends PredictionsPredictImageUrlWithNoStoreDefinitionStages.WithAllOptions {
/**
* Execute the request.
*
* @return the ImagePrediction object if successful.
*/
ImagePrediction execute();
/**
* Execute the request asynchronously.
*
* @return the observable to the ImagePrediction object
*/
Observable<ImagePrediction> executeAsync();
}
}
/**
* The entirety of predictImageUrlWithNoStore definition.
*/
interface PredictionsPredictImageUrlWithNoStoreDefinition extends
PredictionsPredictImageUrlWithNoStoreDefinitionStages.WithProjectId,
PredictionsPredictImageUrlWithNoStoreDefinitionStages.WithExecute {
}
/**
* Predict an image and saves the result.
*
* @param projectId The project id.
* @param imageData the InputStream value.
* @param predictImageOptionalParameter the object representing the optional parameters to be set before calling this API
* @throws IllegalArgumentException thrown if parameters fail the validation
* @throws CloudException thrown if the request is rejected by server
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent
* @return the ImagePrediction object if successful.
*/
@Deprecated
ImagePrediction predictImage(UUID projectId, byte[] imageData, PredictImageOptionalParameter predictImageOptionalParameter);
/**
* Predict an image and saves the result.
*
* @param projectId The project id.
* @param imageData the InputStream value.
* @param predictImageOptionalParameter the object representing the optional parameters to be set before calling this API
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the observable to the ImagePrediction object
*/
@Deprecated
Observable<ImagePrediction> predictImageAsync(UUID projectId, byte[] imageData, PredictImageOptionalParameter predictImageOptionalParameter);
/**
* Predict an image and saves the result.
*
* @return the first stage of the predictImage call
*/
PredictionsPredictImageDefinitionStages.WithProjectId predictImage();
/**
* Grouping of predictImage definition stages.
*/
interface PredictionsPredictImageDefinitionStages {
/**
* The stage of the definition to be specify projectId.
*/
interface WithProjectId {
/**
* The project id.
*
* @return next definition stage
*/
WithImageData withProjectId(UUID projectId);
}
/**
* The stage of the definition to be specify imageData.
*/
interface WithImageData {
/**
*
*
* @return next definition stage
*/
PredictionsPredictImageDefinitionStages.WithExecute withImageData(byte[] imageData);
}
/**
* The stage of the definition which allows for any other optional settings to be specified.
*/
interface WithAllOptions {
/**
* Optional. Specifies the id of a particular iteration to evaluate against.
* The default iteration for the project will be used when not specified.
*
* @return next definition stage
*/
PredictionsPredictImageDefinitionStages.WithExecute withIterationId(UUID iterationId);
/**
* Optional. Specifies the name of application using the endpoint.
*
* @return next definition stage
*/
PredictionsPredictImageDefinitionStages.WithExecute withApplication(String application);
}
/**
* The last stage of the definition which will make the operation call.
*/
interface WithExecute extends PredictionsPredictImageDefinitionStages.WithAllOptions {
/**
* Execute the request.
*
* @return the ImagePrediction object if successful.
*/
ImagePrediction execute();
/**
* Execute the request asynchronously.
*
* @return the observable to the ImagePrediction object
*/
Observable<ImagePrediction> executeAsync();
}
}
/**
* The entirety of predictImage definition.
*/
interface PredictionsPredictImageDefinition extends
PredictionsPredictImageDefinitionStages.WithProjectId,
PredictionsPredictImageDefinitionStages.WithImageData,
PredictionsPredictImageDefinitionStages.WithExecute {
}
/**
* Predict an image url and saves the result.
*
* @param projectId The project id.
* @param predictImageUrlOptionalParameter the object representing the optional parameters to be set before calling this API
* @throws IllegalArgumentException thrown if parameters fail the validation
* @throws CloudException thrown if the request is rejected by server
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent
* @return the ImagePrediction object if successful.
*/
@Deprecated
ImagePrediction predictImageUrl(UUID projectId, PredictImageUrlOptionalParameter predictImageUrlOptionalParameter);
/**
* Predict an image url and saves the result.
*
* @param projectId The project id.
* @param predictImageUrlOptionalParameter the object representing the optional parameters to be set before calling this API
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the observable to the ImagePrediction object
*/
@Deprecated
Observable<ImagePrediction> predictImageUrlAsync(UUID projectId, PredictImageUrlOptionalParameter predictImageUrlOptionalParameter);
/**
* Predict an image url and saves the result.
*
* @return the first stage of the predictImageUrl call
*/
PredictionsPredictImageUrlDefinitionStages.WithProjectId predictImageUrl();
/**
* Grouping of predictImageUrl definition stages.
*/
interface PredictionsPredictImageUrlDefinitionStages {
/**
* The stage of the definition to be specify projectId.
*/
interface WithProjectId {
/**
* The project id.
*
* @return next definition stage
*/
PredictionsPredictImageUrlDefinitionStages.WithExecute withProjectId(UUID projectId);
}
/**
* The stage of the definition which allows for any other optional settings to be specified.
*/
interface WithAllOptions {
/**
* Optional. Specifies the id of a particular iteration to evaluate against.
* The default iteration for the project will be used when not specified.
*
* @return next definition stage
*/
PredictionsPredictImageUrlDefinitionStages.WithExecute withIterationId(UUID iterationId);
/**
* Optional. Specifies the name of application using the endpoint.
*
* @return next definition stage
*/
PredictionsPredictImageUrlDefinitionStages.WithExecute withApplication(String application);
/**
*
*
* @return next definition stage
*/
PredictionsPredictImageUrlDefinitionStages.WithExecute withUrl(String url);
}
/**
* The last stage of the definition which will make the operation call.
*/
interface WithExecute extends PredictionsPredictImageUrlDefinitionStages.WithAllOptions {
/**
* Execute the request.
*
* @return the ImagePrediction object if successful.
*/
ImagePrediction execute();
/**
* Execute the request asynchronously.
*
* @return the observable to the ImagePrediction object
*/
Observable<ImagePrediction> executeAsync();
}
}
/**
* The entirety of predictImageUrl definition.
*/
interface PredictionsPredictImageUrlDefinition extends
PredictionsPredictImageUrlDefinitionStages.WithProjectId,
PredictionsPredictImageUrlDefinitionStages.WithExecute {
}
}
| |
/**
* Apache License
* Version 2.0, January 2004
* http://www.apache.org/licenses/
*
* TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
*
* 1. Definitions.
*
* "License" shall mean the terms and conditions for use, reproduction,
* and distribution as defined by Sections 1 through 9 of this document.
*
* "Licensor" shall mean the copyright owner or entity authorized by
* the copyright owner that is granting the License.
*
* "Legal Entity" shall mean the union of the acting entity and all
* other entities that control, are controlled by, or are under common
* control with that entity. For the purposes of this definition,
* "control" means (i) the power, direct or indirect, to cause the
* direction or management of such entity, whether by contract or
* otherwise, or (ii) ownership of fifty percent (50%) or more of the
* outstanding shares, or (iii) beneficial ownership of such entity.
*
* "You" (or "Your") shall mean an individual or Legal Entity
* exercising permissions granted by this License.
*
* "Source" form shall mean the preferred form for making modifications,
* including but not limited to software source code, documentation
* source, and configuration files.
*
* "Object" form shall mean any form resulting from mechanical
* transformation or translation of a Source form, including but
* not limited to compiled object code, generated documentation,
* and conversions to other media types.
*
* "Work" shall mean the work of authorship, whether in Source or
* Object form, made available under the License, as indicated by a
* copyright notice that is included in or attached to the work
* (an example is provided in the Appendix below).
*
* "Derivative Works" shall mean any work, whether in Source or Object
* form, that is based on (or derived from) the Work and for which the
* editorial revisions, annotations, elaborations, or other modifications
* represent, as a whole, an original work of authorship. For the purposes
* of this License, Derivative Works shall not include works that remain
* separable from, or merely link (or bind by name) to the interfaces of,
* the Work and Derivative Works thereof.
*
* "Contribution" shall mean any work of authorship, including
* the original version of the Work and any modifications or additions
* to that Work or Derivative Works thereof, that is intentionally
* submitted to Licensor for inclusion in the Work by the copyright owner
* or by an individual or Legal Entity authorized to submit on behalf of
* the copyright owner. For the purposes of this definition, "submitted"
* means any form of electronic, verbal, or written communication sent
* to the Licensor or its representatives, including but not limited to
* communication on electronic mailing lists, source code control systems,
* and issue tracking systems that are managed by, or on behalf of, the
* Licensor for the purpose of discussing and improving the Work, but
* excluding communication that is conspicuously marked or otherwise
* designated in writing by the copyright owner as "Not a Contribution."
*
* "Contributor" shall mean Licensor and any individual or Legal Entity
* on behalf of whom a Contribution has been received by Licensor and
* subsequently incorporated within the Work.
*
* 2. Grant of Copyright License. Subject to the terms and conditions of
* this License, each Contributor hereby grants to You a perpetual,
* worldwide, non-exclusive, no-charge, royalty-free, irrevocable
* copyright license to reproduce, prepare Derivative Works of,
* publicly display, publicly perform, sublicense, and distribute the
* Work and such Derivative Works in Source or Object form.
*
* 3. Grant of Patent License. Subject to the terms and conditions of
* this License, each Contributor hereby grants to You a perpetual,
* worldwide, non-exclusive, no-charge, royalty-free, irrevocable
* (except as stated in this section) patent license to make, have made,
* use, offer to sell, sell, import, and otherwise transfer the Work,
* where such license applies only to those patent claims licensable
* by such Contributor that are necessarily infringed by their
* Contribution(s) alone or by combination of their Contribution(s)
* with the Work to which such Contribution(s) was submitted. If You
* institute patent litigation against any entity (including a
* cross-claim or counterclaim in a lawsuit) alleging that the Work
* or a Contribution incorporated within the Work constitutes direct
* or contributory patent infringement, then any patent licenses
* granted to You under this License for that Work shall terminate
* as of the date such litigation is filed.
*
* 4. Redistribution. You may reproduce and distribute copies of the
* Work or Derivative Works thereof in any medium, with or without
* modifications, and in Source or Object form, provided that You
* meet the following conditions:
*
* (a) You must give any other recipients of the Work or
* Derivative Works a copy of this License; and
*
* (b) You must cause any modified files to carry prominent notices
* stating that You changed the files; and
*
* (c) You must retain, in the Source form of any Derivative Works
* that You distribute, all copyright, patent, trademark, and
* attribution notices from the Source form of the Work,
* excluding those notices that do not pertain to any part of
* the Derivative Works; and
*
* (d) If the Work includes a "NOTICE" text file as part of its
* distribution, then any Derivative Works that You distribute must
* include a readable copy of the attribution notices contained
* within such NOTICE file, excluding those notices that do not
* pertain to any part of the Derivative Works, in at least one
* of the following places: within a NOTICE text file distributed
* as part of the Derivative Works; within the Source form or
* documentation, if provided along with the Derivative Works; or,
* within a display generated by the Derivative Works, if and
* wherever such third-party notices normally appear. The contents
* of the NOTICE file are for informational purposes only and
* do not modify the License. You may add Your own attribution
* notices within Derivative Works that You distribute, alongside
* or as an addendum to the NOTICE text from the Work, provided
* that such additional attribution notices cannot be construed
* as modifying the License.
*
* You may add Your own copyright statement to Your modifications and
* may provide additional or different license terms and conditions
* for use, reproduction, or distribution of Your modifications, or
* for any such Derivative Works as a whole, provided Your use,
* reproduction, and distribution of the Work otherwise complies with
* the conditions stated in this License.
*
* 5. Submission of Contributions. Unless You explicitly state otherwise,
* any Contribution intentionally submitted for inclusion in the Work
* by You to the Licensor shall be under the terms and conditions of
* this License, without any additional terms or conditions.
* Notwithstanding the above, nothing herein shall supersede or modify
* the terms of any separate license agreement you may have executed
* with Licensor regarding such Contributions.
*
* 6. Trademarks. This License does not grant permission to use the trade
* names, trademarks, service marks, or product names of the Licensor,
* except as required for reasonable and customary use in describing the
* origin of the Work and reproducing the content of the NOTICE file.
*
* 7. Disclaimer of Warranty. Unless required by applicable law or
* agreed to in writing, Licensor provides the Work (and each
* Contributor provides its Contributions) on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied, including, without limitation, any warranties or conditions
* of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
* PARTICULAR PURPOSE. You are solely responsible for determining the
* appropriateness of using or redistributing the Work and assume any
* risks associated with Your exercise of permissions under this License.
*
* 8. Limitation of Liability. In no event and under no legal theory,
* whether in tort (including negligence), contract, or otherwise,
* unless required by applicable law (such as deliberate and grossly
* negligent acts) or agreed to in writing, shall any Contributor be
* liable to You for damages, including any direct, indirect, special,
* incidental, or consequential damages of any character arising as a
* result of this License or out of the use or inability to use the
* Work (including but not limited to damages for loss of goodwill,
* work stoppage, computer failure or malfunction, or any and all
* other commercial damages or losses), even if such Contributor
* has been advised of the possibility of such damages.
*
* 9. Accepting Warranty or Additional Liability. While redistributing
* the Work or Derivative Works thereof, You may choose to offer,
* and charge a fee for, acceptance of support, warranty, indemnity,
* or other liability obligations and/or rights consistent with this
* License. However, in accepting such obligations, You may act only
* on Your own behalf and on Your sole responsibility, not on behalf
* of any other Contributor, and only if You agree to indemnify,
* defend, and hold each Contributor harmless for any liability
* incurred by, or claims asserted against, such Contributor by reason
* of your accepting any such warranty or additional liability.
*
* END OF TERMS AND CONDITIONS
*
* APPENDIX: How to apply the Apache License to your work.
*
* To apply the Apache License to your work, attach the following
* boilerplate notice, with the fields enclosed by brackets "{}"
* replaced with your own identifying information. (Don't include
* the brackets!) The text should be enclosed in the appropriate
* comment syntax for the file format. We also recommend that a
* file or class name and description of purpose be included on the
* same "printed page" as the copyright notice for easier
* identification within third-party archives.
*
* Copyright {yyyy} {name of copyright owner}
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.thesoftwarefactory.vertx.web.more.impl;
import java.nio.charset.Charset;
import java.util.Locale;
import java.util.Objects;
import com.thesoftwarefactory.vertx.web.model.I18n;
import com.thesoftwarefactory.vertx.web.more.DeviceInfo;
import com.thesoftwarefactory.vertx.web.more.Flash;
import com.thesoftwarefactory.vertx.web.more.UserContext;
import com.thesoftwarefactory.vertx.web.more.WebContext;
import io.vertx.core.http.HttpHeaders;
import io.vertx.ext.web.RoutingContext;
import io.vertx.ext.web.Session;
public class WebContextImpl implements WebContext {
private DeviceInfo deviceInfo;
private Flash flash;
private I18n i18n;
private Locale locale;
private RoutingContext routingContext;
private String referer;
private UserContext userContext;
public WebContextImpl(RoutingContext routingContext) {
Objects.requireNonNull(routingContext);
this.routingContext = routingContext;
}
@Override
public DeviceInfo deviceInfo() {
if (deviceInfo==null) {
String userAgent = routingContext.request().getHeader(HttpHeaders.USER_AGENT.toString());
String httpAccept = routingContext.request().getHeader(HttpHeaders.ACCEPT.toString());
deviceInfo = new DeviceInfoMobileEsp(userAgent, httpAccept);
}
return deviceInfo;
}
@Override
public Flash flash() {
if (flash == null) {
flash = Flash.get(routingContext);
}
// Fallback to avoid NPE
if (flash == null) {
flash = Flash.create();
Flash.set(flash, routingContext);
}
return flash;
}
@Override
public I18n i18n() {
if (i18n==null) {
String charset = routingContext().get(I18n.class.getName() + ".charset");
if (charset != null) {
i18n = new I18n(locale(), Charset.forName(charset));
} else {
i18n = new I18n(locale());
}
}
return i18n;
}
@Override
public Locale locale() {
if (locale==null) {
locale = routingContext.get("__locale");
if (locale==null) {
locale = Locale.FRANCE;
}
}
return locale;
}
@Override
public String referer() {
if (referer == null) {
referer = routingContext.request().getHeader(HttpHeaders.REFERER.toString());
if (referer == null) {
// Some browsers do not send the referer, in that case let's use
// "/" as a fallback referer
referer = "/";
}
}
return referer;
}
protected RoutingContext routingContext() {
return routingContext;
}
@Override
public Session session() {
return routingContext.session();
}
@Override
public String uri() {
return routingContext.request().uri();
}
@Override
public UserContext userContext() {
if (userContext==null) {
if (routingContext.user() instanceof UserContext) {
userContext = (UserContext) routingContext.user();
}
else {
userContext = UserContextImpl.anonymous();
}
}
return userContext;
}
}
| |
/*******************************************************************************
* Copyright 2017 Vincenzo-Maria Cappelleri <vincenzo.cappelleri@gmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*******************************************************************************/
/**
*
*/
package raw.dht.implementations;
import java.net.InetAddress;
import java.net.InetSocketAddress;
import java.net.UnknownHostException;
import java.util.Arrays;
import org.apache.commons.lang3.builder.HashCodeBuilder;
import raw.dht.DhtAddress;
/**
* Objects of this class implement {@link DhtAddress}
* wrapper for physical addresses used in the DHT.
*
* @author vic
*
*/
public class DefaultDhtAddress implements DhtAddress {
private InetAddress address;
private int udpPort;
private int tcpPort;
/**
* Generated random serial
*/
private static final long serialVersionUID = 7914143027648273175L;
/**
* Build an instance of a {@link DhtAddress} object.
*
* @param address the {@link InetAddress} physical address
* @param port the port (both TCP and UPD) used by this address
* @throws IllegalArgumentException if <tt>port</tt> is not in range [0 - 65535]
*/
public DefaultDhtAddress(InetAddress address, int port) throws IllegalArgumentException{
this(address, port, port);
}
/**
* Build an instance of a {@link DhtAddress} object.
*
* @param address the {@link InetAddress} physical address
* @param udpPort the UDP port used by this address
* @param tcpPort the TCP port used by this address
* @throws IllegalArgumentException if <tt>tcpPort</tt> or <tt>udpPort</tt> are not in range [0 - 65535]
*/
public DefaultDhtAddress(InetAddress address, int udpPort, int tcpPort) throws IllegalArgumentException{
checkPort(udpPort);
checkPort(tcpPort);
this.address = address;
this.udpPort = udpPort;
this.tcpPort = tcpPort;
}
/**
* Build an instance of a {@link DhtAddress} object.
*
* @param address the raw IP address for this physical address
* @param port the port (both TCP and UPD) used by this address
* @throws UnknownHostException if IP address is of illegal length
* @throws IllegalArgumentException if <tt>port</tt> is not in range [0 - 65535]
*/
public DefaultDhtAddress(byte[] address, int port) throws UnknownHostException, IllegalArgumentException {
this(address, port, port);
}
/**
* Build an instance of a {@link DhtAddress} object.
*
* @param address the raw IP address for this physical address
* @param udpPort the UDP port used by this address
* @param tcpPort the TCP port used by this address
* @throws UnknownHostException if IP address is of illegal length
* @throws IllegalArgumentException if <tt>port</tt> is not in range [0 - 65535]
*/
public DefaultDhtAddress(byte[] address, int udpPort, int tcpPort) throws UnknownHostException, IllegalArgumentException {
InetAddress addr = InetAddress.getByAddress(address);
checkPort(udpPort);
checkPort(tcpPort);
this.address = addr;
this.udpPort = udpPort;
this.tcpPort = tcpPort;
}
/**
* Build an instance of a {@link DhtAddress} object.
*
* @param address the canonical IP address string representation for this physical address
* @param port the port (both TCP and UPD) used by this address
* @throws UnknownHostException if IP address is of illegal length
* @throws IllegalArgumentException if <tt>udpPort</tt> is not in range [0 - 65535]
*/
public DefaultDhtAddress(String address, int port) throws UnknownHostException, IllegalArgumentException {
this(address, port, port);
}
/**
* Build an instance of a {@link DhtAddress} object.
*
* @param address the canonical IP address string representation for this physical address
* @param udpPort the UDP port used by this address
* @param tcpPort the TCP port used by this address
* @throws UnknownHostException if IP address is of illegal length
* @throws IllegalArgumentException if <tt>udpPort</tt> is not in range [0 - 65535]
*/
public DefaultDhtAddress(String address, int udpPort, int tcpPort) throws UnknownHostException, IllegalArgumentException {
InetAddress addr = InetAddress.getByName(address);
checkPort(udpPort);
checkPort(tcpPort);
this.address = addr;
this.udpPort = udpPort;
this.tcpPort = tcpPort;
}
/**
* Rise exception if udpPort is invalid. To be used in constructors.
*
* @throws IllegalArgumentException if udpPort is less than 0 or more than 65535
*/
private void checkPort(int port) throws IllegalArgumentException{
if(port<0 || port >65535){
throw new IllegalArgumentException("Port number specified is not valid (got "+port+").");
}
}
/* (non-Javadoc)
* @see raw.dht.interfaces.DhtAddress#getAddress()
*/
@Override
public InetAddress getAddress() {
return address;
}
/* (non-Javadoc)
* @see raw.dht.interfaces.DhtAddress#getPort()
*/
@Override
public int getUdpPort() {
return udpPort;
}
/* (non-Javadoc)
* @see raw.dht.interfaces.DhtAddress#getSocketAddress()
*/
@Override
public InetSocketAddress getUdpSocketAddress() {
return new InetSocketAddress(address, udpPort);
}
/* (non-Javadoc)
* @see raw.dht.DhtAddress#getTcpPort()
*/
@Override
public int getTcpPort() {
return tcpPort;
}
/* (non-Javadoc)
* @see raw.dht.DhtAddress#getTcpSocketAddress()
*/
@Override
public InetSocketAddress getTcpSocketAddress() {
return new InetSocketAddress(address, tcpPort);
}
@Override
public boolean equals(Object obj) {
if(!(obj instanceof DhtAddress)){
return false;
}
DhtAddress other = (DhtAddress) obj;
if(!Arrays.equals(address.getAddress(), other.getAddress().getAddress())){
return false;
}
if(tcpPort != other.getTcpPort()){
return false;
}
return (udpPort == other.getUdpPort());
}
@Override
public String toString() {
return getUdpSocketAddress().getHostString()+":"+getUdpPort()+"+"+getTcpPort();
}
/* (non-Javadoc)
* @see java.lang.Object#hashCode()
*/
@Override
public int hashCode() {
HashCodeBuilder builder = new HashCodeBuilder();
builder.append(address).
append(udpPort).
append(tcpPort);
return builder.toHashCode();
}
}
| |
package org.jenkinsci.plugins.proxmox;
import static java.util.Collections.emptyList;
import static java.util.Optional.ofNullable;
import java.io.IOException;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import javax.security.auth.login.LoginException;
import org.jenkinsci.plugins.proxmox.VirtualMachineLauncher.RevertPolicy;
import org.jenkinsci.plugins.proxmox.pve2api.Connector;
import org.kohsuke.stapler.DataBoundConstructor;
import org.kohsuke.stapler.QueryParameter;
import hudson.Extension;
import hudson.model.Computer;
import hudson.model.Descriptor;
import hudson.model.Slave;
import hudson.slaves.Cloud;
import hudson.slaves.ComputerLauncher;
import hudson.slaves.NodeProperty;
import hudson.slaves.RetentionStrategy;
import hudson.util.FormValidation;
import hudson.util.ListBoxModel;
import jenkins.model.Jenkins;
import us.monoid.json.JSONException;
public class VirtualMachineSlave extends Slave {
private static final long serialVersionUID = 1L;
private String datacenterDescription;
private String datacenterNode;
private String snapshotName;
private Integer virtualMachineId;
private Boolean startVM;
private int startupWaitingPeriodSeconds;
private RevertPolicy revertPolicy;
@DataBoundConstructor
public VirtualMachineSlave(String name, String nodeDescription, String remoteFS, String numExecutors,
Mode mode, String labelString, ComputerLauncher delegateLauncher,
RetentionStrategy retentionStrategy, List<? extends NodeProperty<?>> nodeProperties,
String datacenterDescription, String datacenterNode, Integer virtualMachineId,
String snapshotName, Boolean startVM, int startupWaitingPeriodSeconds,
RevertPolicy revertPolicy)
throws
Descriptor.FormException, IOException {
super(name, nodeDescription, remoteFS, numExecutors, mode, labelString,
new VirtualMachineLauncher(delegateLauncher, datacenterDescription, datacenterNode, virtualMachineId,
snapshotName, startVM, startupWaitingPeriodSeconds, revertPolicy),
retentionStrategy, ofNullable(nodeProperties).orElse(emptyList()));
this.datacenterDescription = datacenterDescription;
this.datacenterNode = datacenterNode;
this.virtualMachineId = virtualMachineId;
this.snapshotName = snapshotName;
this.startVM = startVM;
this.startupWaitingPeriodSeconds = startupWaitingPeriodSeconds;
this.revertPolicy = revertPolicy;
}
public String getDatacenterDescription() {
return datacenterDescription;
}
public String getDatacenterNode() {
return datacenterNode;
}
public Integer getVirtualMachineId() {
return virtualMachineId;
}
public String getSnapshotName() {
return snapshotName;
}
public Boolean getStartVM() {
return startVM;
}
public int getStartupWaitingPeriodSeconds() {
return startupWaitingPeriodSeconds;
}
public RevertPolicy getRevertPolicy() {
return revertPolicy;
}
public ComputerLauncher getDelegateLauncher() {
return ((VirtualMachineLauncher) getLauncher()).getLauncher();
}
@Override
public Computer createComputer() {
//TODO: Not sure if this is needed, could be able to use this to reset to snapshots
//TODO: as a computer is required for a job.
return new VirtualMachineSlaveComputer(this);
}
@Extension
public static final class DescriptorImpl extends SlaveDescriptor {
private String datacenterDescription;
private String datacenterNode;
private Integer virtualMachineId;
private String snapshotName;
private Boolean startVM;
private RevertPolicy revertPolicy;
public DescriptorImpl() {
load();
}
public String getDisplayName() {
return "Slave virtual machine running on a Proxmox datacenter.";
}
@Override
public boolean isInstantiable() {
return true;
}
public ListBoxModel doFillDatacenterDescriptionItems() {
ListBoxModel items = new ListBoxModel();
items.add("[Select]", "");
for (Cloud cloud : Jenkins.get().clouds) {
if (cloud instanceof Datacenter) {
//TODO: Possibly add the `datacenterDescription` as the `displayName` and `value` (http://javadoc.jenkins-ci.org/hudson/util/ListBoxModel.html)
//Add by `display name` and then the `value`
items.add(((Datacenter) cloud).getHostname(), ((Datacenter) cloud).getDatacenterDescription());
}
}
return items;
}
public ListBoxModel doFillDatacenterNodeItems(@QueryParameter("datacenterDescription") String datacenterDescription) {
ListBoxModel items = new ListBoxModel();
items.add("[Select]", "");
Datacenter datacenter = getDatacenterByDescription(datacenterDescription);
if (datacenter != null) {
for (String node : datacenter.getNodes()) {
items.add(node);
}
}
return items;
}
public ListBoxModel doFillVirtualMachineIdItems(@QueryParameter("datacenterDescription") String datacenterDescription, @QueryParameter("datacenterNode") String datacenterNode) {
ListBoxModel items = new ListBoxModel();
items.add("[Select]", "");
Datacenter datacenter = getDatacenterByDescription(datacenterDescription);
if (datacenter != null) {
HashMap<String, Integer> machines = datacenter.getQemuMachines(datacenterNode);
for (Map.Entry<String, Integer> me : machines.entrySet()) {
items.add(me.getKey().toString(), me.getValue().toString());
}
}
return items;
}
public ListBoxModel doFillSnapshotNameItems(@QueryParameter("datacenterDescription") String datacenterDescription, @QueryParameter("datacenterNode") String datacenterNode,
@QueryParameter("virtualMachineId") String virtualMachineId) {
ListBoxModel items = new ListBoxModel();
items.add("[Select]", "");
Datacenter datacenter = getDatacenterByDescription(datacenterDescription);
if (datacenter != null && virtualMachineId != null && virtualMachineId.length() != 0) {
for (String snapshot : datacenter.getQemuMachineSnapshots(datacenterNode, Integer.parseInt(virtualMachineId))) {
items.add(snapshot);
}
}
return items;
}
public String getDatacenterDescription() {
return datacenterDescription;
}
public String getDatecenterNode() {
return datacenterNode;
}
public Integer getVirtualMachineId() {
return virtualMachineId;
}
public String getSnapshotName() {
return snapshotName;
}
public Boolean getStartVM() {
return startVM;
}
public RevertPolicy getRevertPolicy() {
return revertPolicy;
}
public FormValidation doTestRollback (
@QueryParameter String datacenterDescription, @QueryParameter String datacenterNode,
@QueryParameter Integer virtualMachineId, @QueryParameter String snapshotName) {
Datacenter datacenter = getDatacenterByDescription(datacenterDescription);
if (datacenter == null)
return FormValidation.error("Datacenter not found!");
Connector pveApi = datacenter.proxmoxInstance();
try {
String taskStatus = pveApi.rollbackQemuMachineSnapshot(datacenterNode, virtualMachineId, snapshotName);
return FormValidation.ok("Returned: " + taskStatus);
} catch (IOException e) {
return FormValidation.error("IO: " + e.getMessage());
} catch (LoginException e) {
return FormValidation.error("Login Failed: " + e.getMessage());
} catch (JSONException e) {
return FormValidation.error("JSON: " + e.getMessage());
}
}
private Datacenter getDatacenterByDescription (String datacenterDescription) {
if (datacenterDescription != null && !datacenterDescription.equals("")) {
for (Cloud cloud : Jenkins.get().clouds) {
if (cloud instanceof Datacenter && ((Datacenter) cloud).getDatacenterDescription().equals(datacenterDescription)) {
return (Datacenter) cloud;
}
}
}
return null;
}
}
}
| |
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/logging/v2/logging_config.proto
package com.google.logging.v2;
/**
* <pre>
* The parameters to `ListSinks`.
* </pre>
*
* Protobuf type {@code google.logging.v2.ListSinksRequest}
*/
public final class ListSinksRequest extends
com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:google.logging.v2.ListSinksRequest)
ListSinksRequestOrBuilder {
// Use ListSinksRequest.newBuilder() to construct.
private ListSinksRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListSinksRequest() {
parent_ = "";
pageToken_ = "";
pageSize_ = 0;
}
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return com.google.protobuf.UnknownFieldSet.getDefaultInstance();
}
private ListSinksRequest(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
this();
int mutable_bitField0_ = 0;
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!input.skipField(tag)) {
done = true;
}
break;
}
case 10: {
java.lang.String s = input.readStringRequireUtf8();
parent_ = s;
break;
}
case 18: {
java.lang.String s = input.readStringRequireUtf8();
pageToken_ = s;
break;
}
case 24: {
pageSize_ = input.readInt32();
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e).setUnfinishedMessage(this);
} finally {
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.logging.v2.LoggingConfigProto.internal_static_google_logging_v2_ListSinksRequest_descriptor;
}
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.logging.v2.LoggingConfigProto.internal_static_google_logging_v2_ListSinksRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.logging.v2.ListSinksRequest.class, com.google.logging.v2.ListSinksRequest.Builder.class);
}
public static final int PARENT_FIELD_NUMBER = 1;
private volatile java.lang.Object parent_;
/**
* <pre>
* Required. The parent resource whose sinks are to be listed:
* "projects/[PROJECT_ID]"
* "organizations/[ORGANIZATION_ID]"
* "billingAccounts/[BILLING_ACCOUNT_ID]"
* "folders/[FOLDER_ID]"
* </pre>
*
* <code>string parent = 1;</code>
*/
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
}
}
/**
* <pre>
* Required. The parent resource whose sinks are to be listed:
* "projects/[PROJECT_ID]"
* "organizations/[ORGANIZATION_ID]"
* "billingAccounts/[BILLING_ACCOUNT_ID]"
* "folders/[FOLDER_ID]"
* </pre>
*
* <code>string parent = 1;</code>
*/
public com.google.protobuf.ByteString
getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int PAGE_TOKEN_FIELD_NUMBER = 2;
private volatile java.lang.Object pageToken_;
/**
* <pre>
* Optional. If present, then retrieve the next batch of results from the
* preceding call to this method. `pageToken` must be the value of
* `nextPageToken` from the previous response. The values of other method
* parameters should be identical to those in the previous call.
* </pre>
*
* <code>string page_token = 2;</code>
*/
public java.lang.String getPageToken() {
java.lang.Object ref = pageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pageToken_ = s;
return s;
}
}
/**
* <pre>
* Optional. If present, then retrieve the next batch of results from the
* preceding call to this method. `pageToken` must be the value of
* `nextPageToken` from the previous response. The values of other method
* parameters should be identical to those in the previous call.
* </pre>
*
* <code>string page_token = 2;</code>
*/
public com.google.protobuf.ByteString
getPageTokenBytes() {
java.lang.Object ref = pageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
pageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int PAGE_SIZE_FIELD_NUMBER = 3;
private int pageSize_;
/**
* <pre>
* Optional. The maximum number of results to return from this request.
* Non-positive values are ignored. The presence of `nextPageToken` in the
* response indicates that more results might be available.
* </pre>
*
* <code>int32 page_size = 3;</code>
*/
public int getPageSize() {
return pageSize_;
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (!getParentBytes().isEmpty()) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_);
}
if (!getPageTokenBytes().isEmpty()) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, pageToken_);
}
if (pageSize_ != 0) {
output.writeInt32(3, pageSize_);
}
}
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!getParentBytes().isEmpty()) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_);
}
if (!getPageTokenBytes().isEmpty()) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, pageToken_);
}
if (pageSize_ != 0) {
size += com.google.protobuf.CodedOutputStream
.computeInt32Size(3, pageSize_);
}
memoizedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.logging.v2.ListSinksRequest)) {
return super.equals(obj);
}
com.google.logging.v2.ListSinksRequest other = (com.google.logging.v2.ListSinksRequest) obj;
boolean result = true;
result = result && getParent()
.equals(other.getParent());
result = result && getPageToken()
.equals(other.getPageToken());
result = result && (getPageSize()
== other.getPageSize());
return result;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + PARENT_FIELD_NUMBER;
hash = (53 * hash) + getParent().hashCode();
hash = (37 * hash) + PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getPageToken().hashCode();
hash = (37 * hash) + PAGE_SIZE_FIELD_NUMBER;
hash = (53 * hash) + getPageSize();
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.logging.v2.ListSinksRequest parseFrom(
java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.logging.v2.ListSinksRequest parseFrom(
java.nio.ByteBuffer data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.logging.v2.ListSinksRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.logging.v2.ListSinksRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.logging.v2.ListSinksRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.logging.v2.ListSinksRequest parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.logging.v2.ListSinksRequest parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.logging.v2.ListSinksRequest parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.logging.v2.ListSinksRequest parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.logging.v2.ListSinksRequest parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.logging.v2.ListSinksRequest parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.logging.v2.ListSinksRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.logging.v2.ListSinksRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* <pre>
* The parameters to `ListSinks`.
* </pre>
*
* Protobuf type {@code google.logging.v2.ListSinksRequest}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:google.logging.v2.ListSinksRequest)
com.google.logging.v2.ListSinksRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.logging.v2.LoggingConfigProto.internal_static_google_logging_v2_ListSinksRequest_descriptor;
}
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.logging.v2.LoggingConfigProto.internal_static_google_logging_v2_ListSinksRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.logging.v2.ListSinksRequest.class, com.google.logging.v2.ListSinksRequest.Builder.class);
}
// Construct using com.google.logging.v2.ListSinksRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
}
}
public Builder clear() {
super.clear();
parent_ = "";
pageToken_ = "";
pageSize_ = 0;
return this;
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return com.google.logging.v2.LoggingConfigProto.internal_static_google_logging_v2_ListSinksRequest_descriptor;
}
public com.google.logging.v2.ListSinksRequest getDefaultInstanceForType() {
return com.google.logging.v2.ListSinksRequest.getDefaultInstance();
}
public com.google.logging.v2.ListSinksRequest build() {
com.google.logging.v2.ListSinksRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public com.google.logging.v2.ListSinksRequest buildPartial() {
com.google.logging.v2.ListSinksRequest result = new com.google.logging.v2.ListSinksRequest(this);
result.parent_ = parent_;
result.pageToken_ = pageToken_;
result.pageSize_ = pageSize_;
onBuilt();
return result;
}
public Builder clone() {
return (Builder) super.clone();
}
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field,
Object value) {
return (Builder) super.setField(field, value);
}
public Builder clearField(
com.google.protobuf.Descriptors.FieldDescriptor field) {
return (Builder) super.clearField(field);
}
public Builder clearOneof(
com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return (Builder) super.clearOneof(oneof);
}
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
int index, Object value) {
return (Builder) super.setRepeatedField(field, index, value);
}
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
Object value) {
return (Builder) super.addRepeatedField(field, value);
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.logging.v2.ListSinksRequest) {
return mergeFrom((com.google.logging.v2.ListSinksRequest)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.logging.v2.ListSinksRequest other) {
if (other == com.google.logging.v2.ListSinksRequest.getDefaultInstance()) return this;
if (!other.getParent().isEmpty()) {
parent_ = other.parent_;
onChanged();
}
if (!other.getPageToken().isEmpty()) {
pageToken_ = other.pageToken_;
onChanged();
}
if (other.getPageSize() != 0) {
setPageSize(other.getPageSize());
}
onChanged();
return this;
}
public final boolean isInitialized() {
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.logging.v2.ListSinksRequest parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (com.google.logging.v2.ListSinksRequest) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private java.lang.Object parent_ = "";
/**
* <pre>
* Required. The parent resource whose sinks are to be listed:
* "projects/[PROJECT_ID]"
* "organizations/[ORGANIZATION_ID]"
* "billingAccounts/[BILLING_ACCOUNT_ID]"
* "folders/[FOLDER_ID]"
* </pre>
*
* <code>string parent = 1;</code>
*/
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <pre>
* Required. The parent resource whose sinks are to be listed:
* "projects/[PROJECT_ID]"
* "organizations/[ORGANIZATION_ID]"
* "billingAccounts/[BILLING_ACCOUNT_ID]"
* "folders/[FOLDER_ID]"
* </pre>
*
* <code>string parent = 1;</code>
*/
public com.google.protobuf.ByteString
getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <pre>
* Required. The parent resource whose sinks are to be listed:
* "projects/[PROJECT_ID]"
* "organizations/[ORGANIZATION_ID]"
* "billingAccounts/[BILLING_ACCOUNT_ID]"
* "folders/[FOLDER_ID]"
* </pre>
*
* <code>string parent = 1;</code>
*/
public Builder setParent(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
parent_ = value;
onChanged();
return this;
}
/**
* <pre>
* Required. The parent resource whose sinks are to be listed:
* "projects/[PROJECT_ID]"
* "organizations/[ORGANIZATION_ID]"
* "billingAccounts/[BILLING_ACCOUNT_ID]"
* "folders/[FOLDER_ID]"
* </pre>
*
* <code>string parent = 1;</code>
*/
public Builder clearParent() {
parent_ = getDefaultInstance().getParent();
onChanged();
return this;
}
/**
* <pre>
* Required. The parent resource whose sinks are to be listed:
* "projects/[PROJECT_ID]"
* "organizations/[ORGANIZATION_ID]"
* "billingAccounts/[BILLING_ACCOUNT_ID]"
* "folders/[FOLDER_ID]"
* </pre>
*
* <code>string parent = 1;</code>
*/
public Builder setParentBytes(
com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
parent_ = value;
onChanged();
return this;
}
private java.lang.Object pageToken_ = "";
/**
* <pre>
* Optional. If present, then retrieve the next batch of results from the
* preceding call to this method. `pageToken` must be the value of
* `nextPageToken` from the previous response. The values of other method
* parameters should be identical to those in the previous call.
* </pre>
*
* <code>string page_token = 2;</code>
*/
public java.lang.String getPageToken() {
java.lang.Object ref = pageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <pre>
* Optional. If present, then retrieve the next batch of results from the
* preceding call to this method. `pageToken` must be the value of
* `nextPageToken` from the previous response. The values of other method
* parameters should be identical to those in the previous call.
* </pre>
*
* <code>string page_token = 2;</code>
*/
public com.google.protobuf.ByteString
getPageTokenBytes() {
java.lang.Object ref = pageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
pageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <pre>
* Optional. If present, then retrieve the next batch of results from the
* preceding call to this method. `pageToken` must be the value of
* `nextPageToken` from the previous response. The values of other method
* parameters should be identical to those in the previous call.
* </pre>
*
* <code>string page_token = 2;</code>
*/
public Builder setPageToken(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
pageToken_ = value;
onChanged();
return this;
}
/**
* <pre>
* Optional. If present, then retrieve the next batch of results from the
* preceding call to this method. `pageToken` must be the value of
* `nextPageToken` from the previous response. The values of other method
* parameters should be identical to those in the previous call.
* </pre>
*
* <code>string page_token = 2;</code>
*/
public Builder clearPageToken() {
pageToken_ = getDefaultInstance().getPageToken();
onChanged();
return this;
}
/**
* <pre>
* Optional. If present, then retrieve the next batch of results from the
* preceding call to this method. `pageToken` must be the value of
* `nextPageToken` from the previous response. The values of other method
* parameters should be identical to those in the previous call.
* </pre>
*
* <code>string page_token = 2;</code>
*/
public Builder setPageTokenBytes(
com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
pageToken_ = value;
onChanged();
return this;
}
private int pageSize_ ;
/**
* <pre>
* Optional. The maximum number of results to return from this request.
* Non-positive values are ignored. The presence of `nextPageToken` in the
* response indicates that more results might be available.
* </pre>
*
* <code>int32 page_size = 3;</code>
*/
public int getPageSize() {
return pageSize_;
}
/**
* <pre>
* Optional. The maximum number of results to return from this request.
* Non-positive values are ignored. The presence of `nextPageToken` in the
* response indicates that more results might be available.
* </pre>
*
* <code>int32 page_size = 3;</code>
*/
public Builder setPageSize(int value) {
pageSize_ = value;
onChanged();
return this;
}
/**
* <pre>
* Optional. The maximum number of results to return from this request.
* Non-positive values are ignored. The presence of `nextPageToken` in the
* response indicates that more results might be available.
* </pre>
*
* <code>int32 page_size = 3;</code>
*/
public Builder clearPageSize() {
pageSize_ = 0;
onChanged();
return this;
}
public final Builder setUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return this;
}
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return this;
}
public final Builder setParentWithParentNameOneof(com.google.logging.v2.ParentNameOneof value) {
if (value == null) {
return setParent("");
}
return setParent(value.toString());
}
public final com.google.logging.v2.ParentNameOneof getParentAsParentNameOneof() {
java.lang.String str = getParent();
if (str.isEmpty()) {
return null;
}
return com.google.logging.v2.ParentNameOneof.parse(str);
}
// @@protoc_insertion_point(builder_scope:google.logging.v2.ListSinksRequest)
}
public final com.google.logging.v2.ParentNameOneof getParentAsParentNameOneof() {
java.lang.String str = getParent();
if (str.isEmpty()) {
return null;
}
return com.google.logging.v2.ParentNameOneof.parse(str);
}
// @@protoc_insertion_point(class_scope:google.logging.v2.ListSinksRequest)
private static final com.google.logging.v2.ListSinksRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.logging.v2.ListSinksRequest();
}
public static com.google.logging.v2.ListSinksRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListSinksRequest>
PARSER = new com.google.protobuf.AbstractParser<ListSinksRequest>() {
public ListSinksRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new ListSinksRequest(input, extensionRegistry);
}
};
public static com.google.protobuf.Parser<ListSinksRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListSinksRequest> getParserForType() {
return PARSER;
}
public com.google.logging.v2.ListSinksRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
| |
package org.hive2hive.core.processes.share.read;
import java.io.File;
import java.io.IOException;
import java.util.Arrays;
import java.util.Random;
import java.util.Set;
import org.apache.commons.io.FileUtils;
import org.hive2hive.core.api.interfaces.IFileConfiguration;
import org.hive2hive.core.exceptions.GetFailedException;
import org.hive2hive.core.exceptions.NoPeerConnectionException;
import org.hive2hive.core.exceptions.NoSessionException;
import org.hive2hive.core.model.FileIndex;
import org.hive2hive.core.model.PermissionType;
import org.hive2hive.core.model.versioned.UserProfile;
import org.hive2hive.core.processes.ProcessFactory;
import org.hive2hive.core.processes.share.BaseShareReadWriteTest;
import org.hive2hive.core.security.HashUtil;
import org.hive2hive.core.utils.FileTestUtil;
import org.hive2hive.core.utils.H2HWaiter;
import org.hive2hive.core.utils.TestExecutionUtil;
import org.hive2hive.core.utils.TestFileConfiguration;
import org.hive2hive.core.utils.UseCaseTestUtil;
import org.junit.Assert;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
/**
* A folder is shared with {@link PermissionType#READ} permission. Tests if updates get synchronized among
* two sharing users.
*
* @author Seppi
* @author Nico
*/
public class SharedFolderWithReadPermissionUpdateTest extends BaseShareReadWriteTest {
private static File subFolderA;
private static File subFolderB;
private static IFileConfiguration fileConfig;
@BeforeClass
public static void printIdentifier() throws Exception {
testClass = SharedFolderWithReadPermissionUpdateTest.class;
beforeClass();
setupNetwork();
}
@Before
public void initTest() throws Exception {
setupShares(PermissionType.READ);
subFolderA = new File(sharedFolderA, "subfolder");
subFolderA.mkdir();
logger.info("Upload a new subfolder '{}'.", rootA.toPath().relativize(subFolderA.toPath()).toString());
UseCaseTestUtil.uploadNewFile(nodeA, subFolderA);
subFolderB = new File(sharedFolderB, subFolderA.getName());
waitTillSynchronizedAdding(subFolderB);
fileConfig = new TestFileConfiguration();
}
@Test
public void testSynchronizeAddFileFromAUpdateAtA() throws NoSessionException, NoPeerConnectionException, IOException,
IllegalArgumentException, IllegalArgumentException, GetFailedException {
File fileFromAAtA = FileTestUtil.createFileRandomContent("file1FromA", new Random().nextInt(MAX_NUM_CHUNKS) + 1,
sharedFolderA);
logger.info("Upload a new file '{}' from A.", fileFromAAtA.toString());
UseCaseTestUtil.uploadNewFile(nodeA, fileFromAAtA);
logger.info("Wait till new file '{}' gets synchronized with B.", fileFromAAtA.toString());
File fileFromAAtB = new File(sharedFolderB, fileFromAAtA.getName());
waitTillSynchronizedAdding(fileFromAAtB);
logger.info("Update file '{}' at A.", fileFromAAtA.toString());
long lastUpdated = fileFromAAtA.lastModified();
FileUtils.write(fileFromAAtA, randomString(), false);
byte[] newHash = HashUtil.hash(fileFromAAtA);
UseCaseTestUtil.uploadNewVersion(nodeA, fileFromAAtA);
logger.info("Wait till update of file '{}' gets synchronized with B.", fileFromAAtA.toString());
waitTillSynchronizedUpdating(fileFromAAtB, lastUpdated);
compareFiles(fileFromAAtA, fileFromAAtB);
checkFileIndex(fileFromAAtA, fileFromAAtB, newHash);
}
@Test
public void testSynchronizeAddFileFromATryToUpdateAtB() throws NoSessionException, NoPeerConnectionException,
IOException, IllegalArgumentException, IllegalArgumentException, GetFailedException {
File fileFromAAtA = FileTestUtil.createFileRandomContent("file2FromA", new Random().nextInt(MAX_NUM_CHUNKS) + 1,
sharedFolderA);
logger.info("Upload a new file '{}' from A.", fileFromAAtA.toString());
UseCaseTestUtil.uploadNewFile(nodeA, fileFromAAtA);
logger.info("Wait till new file '{}' gets synchronized with B.", fileFromAAtA.toString());
File fileFromAAtB = new File(sharedFolderB, fileFromAAtA.getName());
waitTillSynchronizedAdding(fileFromAAtB);
logger.info("Try to update file '{}' at B.", fileFromAAtA.toString());
FileUtils.write(fileFromAAtB, randomString(), false);
TestExecutionUtil.executeProcessTillFailed(ProcessFactory.instance().createUpdateFileProcess(fileFromAAtB, nodeB,
fileConfig));
checkFileIndex(fileFromAAtA, fileFromAAtB, HashUtil.hash(fileFromAAtA));
}
@Test
public void testSynchronizeAddSubfileFromAUpdateAtA() throws NoSessionException, NoPeerConnectionException, IOException,
IllegalArgumentException, IllegalArgumentException, GetFailedException {
File fileFromAAtA = FileTestUtil.createFileRandomContent("subfile1FromA", new Random().nextInt(MAX_NUM_CHUNKS) + 1,
subFolderA);
logger.info("Upload a new file '{}' from A.", fileFromAAtA.toString());
UseCaseTestUtil.uploadNewFile(nodeA, fileFromAAtA);
logger.info("Wait till new file '{}' gets synchronized with B.", fileFromAAtA.toString());
File fileFromAAtB = new File(subFolderB, fileFromAAtA.getName());
waitTillSynchronizedAdding(fileFromAAtB);
logger.info("Update file '{}' at A.", fileFromAAtA.toString());
long lastUpdated = fileFromAAtA.lastModified();
FileUtils.write(fileFromAAtA, randomString(), false);
byte[] newHash = HashUtil.hash(fileFromAAtA);
UseCaseTestUtil.uploadNewVersion(nodeA, fileFromAAtA);
logger.info("Wait till update of file '{}' gets synchronized with B.", fileFromAAtA.toString());
waitTillSynchronizedUpdating(fileFromAAtB, lastUpdated);
compareFiles(fileFromAAtA, fileFromAAtB);
checkFileIndex(fileFromAAtA, fileFromAAtB, newHash);
}
@Test
public void testSynchronizeAddSubfileFromATryToUpdateAtB() throws NoSessionException, NoPeerConnectionException,
IOException, IllegalArgumentException, IllegalArgumentException, GetFailedException {
File fileFromAAtA = FileTestUtil.createFileRandomContent("subfile2FromA", new Random().nextInt(MAX_NUM_CHUNKS) + 1,
subFolderA);
logger.info("Upload a new file '{}' from A.", fileFromAAtA.toString());
UseCaseTestUtil.uploadNewFile(nodeA, fileFromAAtA);
logger.info("Wait till new file '{}' gets synchronized with B.", fileFromAAtA.toString());
File fileFromAAtB = new File(subFolderB, fileFromAAtA.getName());
waitTillSynchronizedAdding(fileFromAAtB);
logger.info("Try to update file '{}' at B.", fileFromAAtA.toString());
FileUtils.write(fileFromAAtB, randomString(), false);
TestExecutionUtil.executeProcessTillFailed(ProcessFactory.instance().createUpdateFileProcess(fileFromAAtB, nodeB,
fileConfig));
checkFileIndex(fileFromAAtA, fileFromAAtB, HashUtil.hash(fileFromAAtA));
}
/**
* Waits a certain amount of time till a file appears (add).
*
* @param synchronizingFile
* the file to synchronize
* @param appearing
* <code>true</code> if file should appear, <code>false</code> if file should disappear
*/
private static void waitTillSynchronizedAdding(File synchronizingFile) {
H2HWaiter waiter = new H2HWaiter(40);
do {
waiter.tickASecond();
} while (!synchronizingFile.exists());
}
/**
* Waits a certain amount of time till a file gets updated.
*
* @param synchronizingFile
* the file to synchronize
* @param appearing
* <code>true</code> if file should appear, <code>false</code> if file should disappear
*/
private static void waitTillSynchronizedUpdating(File updatingFile, long lastModified) {
H2HWaiter waiter = new H2HWaiter(40);
do {
waiter.tickASecond();
} while (updatingFile.lastModified() == lastModified);
}
private void checkFileIndex(File fileA, File fileB, byte[] hash) throws GetFailedException, NoSessionException {
UserProfile userProfileA = nodeA.getSession().getProfileManager().readUserProfile();
FileIndex indexA = (FileIndex) userProfileA.getFileByPath(fileA, nodeA.getSession().getRootFile());
UserProfile userProfileB = nodeB.getSession().getProfileManager().readUserProfile();
FileIndex indexB = (FileIndex) userProfileB.getFileByPath(fileB, nodeB.getSession().getRootFile());
// check if index is file
Assert.assertTrue(indexA.isFile());
Assert.assertTrue(indexB.isFile());
// check if isShared flag is set
Assert.assertTrue(indexA.isShared());
Assert.assertTrue(indexB.isShared());
// check write access
Assert.assertTrue(indexA.canWrite());
// user B isn't allowed to write
Assert.assertFalse(indexB.canWrite());
// check if hash is the same
Assert.assertTrue(Arrays.equals(indexA.getHash(), hash));
Assert.assertTrue(Arrays.equals(indexB.getHash(), hash));
// check if userA's content protection keys are other ones
Assert.assertFalse(indexA.getProtectionKeys().getPrivate().equals(userProfileA.getProtectionKeys().getPrivate()));
Assert.assertFalse(indexA.getProtectionKeys().getPublic().equals(userProfileA.getProtectionKeys().getPublic()));
// check if user B has no content protection keys
Assert.assertNull(indexB.getProtectionKeys());
// check user permissions at A
Set<String> usersA = indexA.getCalculatedUserList();
Assert.assertEquals(2, usersA.size());
Assert.assertTrue(usersA.contains(userA.getUserId()));
Assert.assertTrue(usersA.contains(userB.getUserId()));
// check user permissions at A
Set<String> usersB = indexB.getCalculatedUserList();
Assert.assertEquals(2, usersB.size());
Assert.assertTrue(usersB.contains(userA.getUserId()));
Assert.assertTrue(usersB.contains(userB.getUserId()));
}
}
| |
package com.bzh.dytt.data.entity;
import android.arch.persistence.room.ColumnInfo;
import android.arch.persistence.room.Entity;
import android.support.annotation.NonNull;
import java.util.List;
import java.util.Objects;
@Entity(
tableName = "video_detail",
primaryKeys = {"category", "link"}
)
public class VideoDetail {
@NonNull
@ColumnInfo(name = "link")
private String mDetailLink;
@ColumnInfo(name = "name")
private String mName;
@ColumnInfo(name = "years")
private String mYears;
@ColumnInfo(name = "country")
private String mCountry;
@ColumnInfo(name = "type")
private String mType;
@ColumnInfo(name = "imdb_grade")
private String mIMDBGrade;
@ColumnInfo(name = "imdb_grade_userds")
private String mIMDBGradeUsers;
@ColumnInfo(name = "douban_grade")
private String mDoubanGrade;
@ColumnInfo(name = "douban_grade_users")
private String mDoubanGradeUsers;
@ColumnInfo(name = "file_size")
private String mFileSize;
@ColumnInfo(name = "duration")
private String mDuration;
@ColumnInfo(name = "director")
private List<String> mDirector;
@ColumnInfo(name = "leading_role")
private List<String> mLeadingRole;
@ColumnInfo(name = "description")
private String mDescription;
@ColumnInfo(name = "cover_url")
private String mCoverUrl;
@ColumnInfo(name = "download_link")
private String mDownloadLink;
@ColumnInfo(name = "show_time")
private String mShowTime;
@ColumnInfo(name = "publish_time")
private String mPublishTime;
@ColumnInfo(name = "translation_name")
private String mTranslationName;
@ColumnInfo(name = "is_valid_video_item")
private boolean mValidVideoItem;
@NonNull
@ColumnInfo(name = "category")
private MovieCategory mCategory;
@ColumnInfo(name = "serial_number")
private int mSN;
@ColumnInfo(name = "query")
private String mQuery;
public String getQuery() {
return mQuery;
}
public void setQuery(String query) {
mQuery = query;
}
public int getSN() {
return mSN;
}
public void setSN(int SN) {
mSN = SN;
}
public String getName() {
if (mName == null) {
return mTranslationName;
}
return mName;
}
public void setName(@NonNull String name) {
this.mName = name;
}
public String getYears() {
return mYears;
}
public void setYears(@NonNull String years) {
mYears = years;
}
public String getCountry() {
return mCountry;
}
public void setCountry(@NonNull String country) {
mCountry = country;
}
public String getType() {
return mType;
}
public void setType(@NonNull String type) {
mType = type;
}
public String getIMDBGrade() {
return mIMDBGrade;
}
public void setIMDBGrade(@NonNull String IMDBGrade) {
mIMDBGrade = IMDBGrade;
}
public String getFileSize() {
return mFileSize;
}
public void setFileSize(@NonNull String fileSize) {
mFileSize = fileSize;
}
public String getDuration() {
return mDuration;
}
public void setDuration(@NonNull String duration) {
mDuration = duration;
}
public List<String> getDirector() {
return mDirector;
}
public void setDirector(@NonNull List<String> director) {
mDirector = director;
}
public List<String> getLeadingRole() {
return mLeadingRole;
}
public void setLeadingRole(@NonNull List<String> leadingRole) {
mLeadingRole = leadingRole;
}
public String getDescription() {
return mDescription;
}
public void setDescription(@NonNull String description) {
mDescription = description;
}
@NonNull
public String getDetailLink() {
return mDetailLink;
}
public void setDetailLink(@NonNull String detailLink) {
mDetailLink = detailLink;
}
public String getCoverUrl() {
return mCoverUrl;
}
public void setCoverUrl(@NonNull String coverUrl) {
mCoverUrl = coverUrl;
}
public String getDownloadLink() {
return mDownloadLink;
}
public void setDownloadLink(@NonNull String downloadLink) {
mDownloadLink = downloadLink;
}
public String getShowTime() {
return mShowTime;
}
public void setShowTime(@NonNull String showTime) {
mShowTime = showTime;
}
public String getPublishTime() {
return mPublishTime;
}
public void setPublishTime(@NonNull String publishTime) {
mPublishTime = publishTime;
}
public String getDoubanGrade() {
return mDoubanGrade;
}
public void setDoubanGrade(@NonNull String doubanGrade) {
mDoubanGrade = doubanGrade;
}
public String getDoubanGradeUsers() {
return mDoubanGradeUsers;
}
public void setDoubanGradeUsers(String doubanGradeUsers) {
mDoubanGradeUsers = doubanGradeUsers;
}
public String getIMDBGradeUsers() {
return mIMDBGradeUsers;
}
public void setIMDBGradeUsers(String IMDBGradeUsers) {
mIMDBGradeUsers = IMDBGradeUsers;
}
public String getTranslationName() {
if (mTranslationName == null)
return mName;
return mTranslationName;
}
public void setTranslationName(String translationName) {
mTranslationName = translationName;
}
public boolean isValidVideoItem() {
return mValidVideoItem;
}
public void setValidVideoItem(boolean validVideoItem) {
mValidVideoItem = validVideoItem;
}
public MovieCategory getCategory() {
return mCategory;
}
public void setCategory(MovieCategory category) {
mCategory = category;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
VideoDetail that = (VideoDetail) o;
return mSN == that.mSN &&
Objects.equals(mDetailLink, that.mDetailLink);
}
@Override
public int hashCode() {
return Objects.hash(mDetailLink, mSN);
}
@Override
public String toString() {
return "VideoDetail{" +
"mDetailLink='" + mDetailLink + '\'' +
", mName='" + mName + '\'' +
'}';
}
public VideoDetail updateValue(VideoDetail videoDetail) {
setQuery(videoDetail.getQuery());
setSN(videoDetail.getSN());
setDetailLink(videoDetail.getDetailLink());
setCategory(videoDetail.getCategory());
setQuery(videoDetail.getQuery());
setValidVideoItem(true);
return this;
}
public VideoDetail updateValue(CategoryMap category) {
setDetailLink(category.getLink());
setSN(category.getSN());
setCategory(category.getCategory());
setQuery(category.getQuery());
setName(category.getName());
setPublishTime(category.getTime());
return this;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.sis.internal.jaxb.referencing;
import java.util.Map;
import java.util.List;
import java.util.LinkedHashMap;
import javax.xml.bind.annotation.XmlElement;
import org.opengis.parameter.ParameterDescriptor;
import org.opengis.parameter.ParameterDescriptorGroup;
import org.opengis.parameter.GeneralParameterDescriptor;
import org.apache.sis.internal.jaxb.gco.PropertyType;
import org.apache.sis.internal.referencing.Resources;
import org.apache.sis.parameter.DefaultParameterDescriptorGroup;
import org.apache.sis.util.CorruptedObjectException;
import org.apache.sis.util.collection.Containers;
/**
* JAXB adapter mapping implementing class to the GeoAPI interface. See
* package documentation for more information about JAXB and interface.
*
* @author Martin Desruisseaux (Geomatys)
* @version 0.6
* @since 0.6
* @module
*/
public final class CC_OperationParameterGroup extends PropertyType<CC_OperationParameterGroup,ParameterDescriptorGroup> {
/**
* Empty constructor for JAXB only.
*/
public CC_OperationParameterGroup() {
}
/**
* Returns the GeoAPI interface which is bound by this adapter.
* This method is indirectly invoked by the private constructor
* below, so it shall not depend on the state of this object.
*
* @return {@code ParameterDescriptorGroup.class}
*/
@Override
protected Class<ParameterDescriptorGroup> getBoundType() {
return ParameterDescriptorGroup.class;
}
/**
* Constructor for the {@link #wrap} method only.
*/
private CC_OperationParameterGroup(final ParameterDescriptorGroup parameter) {
super(parameter);
}
/**
* Invoked by {@link PropertyType} at marshalling time for wrapping the given value
* in a {@code <gml:OperationParameterGroup>} XML element.
*
* @param parameter the element to marshal.
* @return a {@code PropertyType} wrapping the given the element.
*/
@Override
protected CC_OperationParameterGroup wrap(final ParameterDescriptorGroup parameter) {
return new CC_OperationParameterGroup(parameter);
}
/**
* Invoked by JAXB at marshalling time for getting the actual element to write
* inside the {@code <gml:OperationParameter>} XML element.
* This is the value or a copy of the value given in argument to the {@code wrap} method.
*
* @return the element to be marshalled.
*/
@XmlElement(name = "OperationParameterGroup")
public DefaultParameterDescriptorGroup getElement() {
return DefaultParameterDescriptorGroup.castOrCopy(metadata);
}
/**
* Invoked by JAXB at unmarshalling time for storing the result temporarily.
*
* @param parameter the unmarshalled element.
*/
public void setElement(final DefaultParameterDescriptorGroup parameter) {
metadata = parameter;
}
/**
* Invoked by {@link DefaultParameterDescriptorGroup#setDescriptors(GeneralParameterDescriptor[])}
* for merging into a single set the descriptors which are repeated twice in a GML document.
*
* <p>The {@code descriptors} argument gives the descriptors listed explicitly inside a
* {@code <gml:OperationParameterGroup>} or {@code <gml:OperationMethod>} element. Those
* descriptors are said "incomplete" (from SIS point of view) because they are missing the
* {@link ParameterDescriptor#getValueClass()} property, which does not exist in GML but
* is mandatory for us. However an exception to this "incompleteness" happen when SIS has
* been able to match the {@code <gml:OperationMethod>} parent to one of the pre-defined
* operations in the {@link org.apache.sis.internal.referencing.provider} package.</p>
*
* <p>The {@code fromValues} argument gives the descriptors declared in each {@code <gml:ParameterValue>}
* instances of a {@code <gml:ParameterValueGroup>} or {@code <gml:AbstractSingleOperation>} element.
* Contrarily to the {@code descriptors} argument, the {@code fromValues} instances should have non-null
* {@link ParameterDescriptor#getValueClass()} property inferred by SIS from the parameter value.</p>
*
* <p>So the preferred descriptors from more complete to less complete are:</p>
* <ol>
* <li>{@code descriptors} if and only if they contain pre-defined parameters inferred by SIS from the {@code <gml:OperationMethod>} name.</li>
* <li>{@code fromValues}, which contain the descriptors declared in the {@code <gml:ParameterValue>} instances.</li>
* <li>{@code descriptors}, which contain the descriptor listed in {@code <gml:OperationParameterGroup>} or {@code <gml:OperationMethod>}.</li>
* </ol>
*
* <div class="note"><b>Note:</b>
* this code is defined in this {@code CC_OperationParameterGroup} class instead of in the
* {@link DefaultParameterDescriptorGroup} class in the hope to reduce the amount of code
* processed by the JVM in the common case where JAXB (un)marshalling is not needed.</div>
*
* @param descriptors the descriptors declared in the {@code ParameterDescriptorGroup}.
* @param fromValues the descriptors declared in the {@code ParameterValue} instances.
* They are said "valid" because they contain the mandatory {@code valueClass} property.
* @param replacements an {@code IdentityHashMap} where to store the replacements that the caller needs to
* apply in the {@code GeneralParameterValue} instances.
* @return a sequence containing the merged set of parameter descriptors.
*
* @see <a href="http://issues.apache.org/jira/browse/SIS-290">SIS-290</a>
*/
public static GeneralParameterDescriptor[] merge(
final List<GeneralParameterDescriptor> descriptors,
final GeneralParameterDescriptor[] fromValues,
final Map<GeneralParameterDescriptor,GeneralParameterDescriptor> replacements)
{
if (descriptors.isEmpty()) {
return fromValues;
}
final Map<String,GeneralParameterDescriptor> union =
new LinkedHashMap<>(Containers.hashMapCapacity(descriptors.size()));
/*
* Collect the descriptors declared explicitly in the ParameterDescriptorGroup. We should never have
* two descriptors of the same name since the DefaultParameterDescriptorGroup constructor checked for
* name ambiguity. If a name collision is nevertheless detected, this would mean that a descriptor's
* name mutated.
*/
for (final GeneralParameterDescriptor p : descriptors) {
final String name = p.getName().getCode();
if (union.put(name, p) != null) {
throw new CorruptedObjectException(name);
}
}
/*
* Verify if any descriptors found in the ParameterValue instances could replace the descriptors in the group.
* We give precedence to the descriptors having a non-null 'valueClass' property, which normally appear in the
* 'fromValues' array.
*/
for (final GeneralParameterDescriptor valueDescriptor : fromValues) {
final String name = valueDescriptor.getName().getCode();
GeneralParameterDescriptor complete = valueDescriptor;
GeneralParameterDescriptor previous = union.put(name, complete);
if (previous != null) {
if (previous instanceof ParameterDescriptor<?>) {
verifyEquivalence(name, complete instanceof ParameterDescriptor<?>);
final Class<?> valueClass = ((ParameterDescriptor<?>) previous).getValueClass();
if (valueClass != null) {
/*
* This may happen if the 'descriptors' argument contain the parameters of a pre-defined
* method from the 'org.apache.sis.internal.referencing.provider' package instead of a
* descriptor from the GML file. In such case, presume that 'previous' is actually more
* complete than 'complete'.
*
* Note that 'r' should never be null unless JAXB unmarshalled the elements in reverse
* order (e.g. <gml:ParameterValue> before <gml:OperationMethod>). Since this behavior
* may depend on JAXB implementation, we are better to check for such case.
*/
final Class<?> r = ((ParameterDescriptor<?>) complete).getValueClass();
if (r != null) {
verifyEquivalence(name, valueClass == r);
}
// Restore the previous value in the map and swap 'previous' with 'replacement'.
previous = union.put(name, complete = previous);
}
} else if (previous instanceof ParameterDescriptorGroup) {
verifyEquivalence(name, complete instanceof ParameterDescriptorGroup);
}
/*
* Verify that the replacement contains at least all the information provided by the previous
* descriptor. The replacement is allowed to contain more information however.
*/
final GeneralParameterDescriptor replacement = CC_GeneralOperationParameter.merge(previous, complete);
if (replacement != valueDescriptor) {
union.put(name, replacement);
if (replacements.put(valueDescriptor, replacement) != null) {
// Should never happen, unless the parameter name changed during execution of this loop.
throw new CorruptedObjectException(name);
}
}
}
}
return union.values().toArray(new GeneralParameterDescriptor[union.size()]);
}
/**
* Throws an exception for mismatched descriptor if a condition is false.
* This is used for verifying that a descriptors has the expected properties.
*/
private static void verifyEquivalence(final String name, final boolean condition) {
if (!condition) {
throw new IllegalArgumentException(Resources.format(Resources.Keys.MismatchedParameterDescriptor_1, name));
}
}
}
| |
/*
* Copyright 2008-2014 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.codehaus.griffon.runtime.core.event;
import griffon.core.CallableWithArgs;
import griffon.core.event.Event;
import griffon.core.event.EventRouter;
import griffon.util.GriffonClassUtils;
import griffon.util.MethodDescriptor;
import griffon.util.MethodUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import java.lang.reflect.Method;
import java.util.ArrayList;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import static griffon.util.GriffonClassUtils.convertToTypeArray;
import static griffon.util.GriffonNameUtils.capitalize;
import static griffon.util.GriffonNameUtils.requireNonBlank;
import static java.util.Arrays.asList;
import static java.util.Collections.EMPTY_LIST;
import static java.util.Collections.synchronizedList;
import static java.util.Objects.requireNonNull;
/**
* @author Andres Almiray
*/
public abstract class AbstractEventRouter implements EventRouter {
private static final String ERROR_EVENT_NAME_BLANK = "Argument 'eventName' must not be blank";
private static final String ERROR_EVENT_HANDLER_BLANK = "Argument 'eventHandler' must not be blank";
private static final String ERROR_MODE_BLANK = "Argument 'mode' must not be blank";
private static final String ERROR_LISTENER_NULL = "Argument 'listener' must not be null";
private static final String ERROR_EVENT_CLASS_NULL = "Argument 'eventClass' must not be null";
private static final String ERROR_EVENT_NULL = "Argument 'event' must not be null";
private static final String ERROR_CALLABLE_NULL = "Argument 'callable' must not be null";
private static final String ERROR_PARAMS_NULL = "Argument 'params' must not be null";
private static final String ERROR_INSTANCE_NULL = "Argument 'instance' must not be null";
private static final String ERROR_OWNER_NULL = "Argument 'owner' must not be null";
private static final Logger LOG = LoggerFactory.getLogger(AbstractEventRouter.class);
protected static final Object[] LOCK = new Object[0];
private boolean enabled = true;
protected final List<Object> listeners = synchronizedList(new ArrayList<>());
protected final Map<String, List<CallableWithArgs<?>>> callableListeners = new ConcurrentHashMap<>();
private final MethodCache methodCache = new MethodCache();
@Override
public boolean isEventPublishingEnabled() {
synchronized (LOCK) {
return this.enabled;
}
}
@Override
public void setEventPublishingEnabled(boolean enabled) {
synchronized (LOCK) {
this.enabled = enabled;
}
}
@Override
public void publishEvent(@Nonnull String eventName) {
publishEvent(eventName, EMPTY_LIST);
}
@Override
public void publishEvent(@Nonnull String eventName, @Nullable List<?> params) {
if (!isEventPublishingEnabled()) return;
requireNonBlank(eventName, ERROR_EVENT_NAME_BLANK);
if (params == null) params = EMPTY_LIST;
buildPublisher(eventName, params, "synchronously").run();
}
@Override
public void publishEventOutsideUI(@Nonnull String eventName) {
publishEventOutsideUI(eventName, EMPTY_LIST);
}
@Override
public void publishEventOutsideUI(@Nonnull String eventName, @Nullable List<?> params) {
if (!isEventPublishingEnabled()) return;
requireNonBlank(eventName, ERROR_EVENT_NAME_BLANK);
if (params == null) params = EMPTY_LIST;
final Runnable publisher = buildPublisher(eventName, params, "outside UI");
doPublishOutsideUI(publisher);
}
protected abstract void doPublishOutsideUI(@Nonnull Runnable publisher);
@Override
public void publishEventAsync(@Nonnull String eventName) {
publishEventAsync(eventName, EMPTY_LIST);
}
@Override
public void publishEventAsync(@Nonnull String eventName, @Nullable List<?> params) {
if (!isEventPublishingEnabled()) return;
requireNonBlank(eventName, ERROR_EVENT_NAME_BLANK);
if (params == null) params = EMPTY_LIST;
final Runnable publisher = buildPublisher(eventName, params, "asynchronously");
doPublishAsync(publisher);
}
protected abstract void doPublishAsync(@Nonnull Runnable publisher);
@Override
public void publishEvent(@Nonnull Event event) {
requireNonNull(event, ERROR_EVENT_NULL);
publishEvent(event.getClass().getSimpleName(), asList(event));
}
@Override
public void publishEventOutsideUI(@Nonnull Event event) {
requireNonNull(event, ERROR_EVENT_NULL);
publishEventOutsideUI(event.getClass().getSimpleName(), asList(event));
}
@Override
public void publishEventAsync(@Nonnull Event event) {
requireNonNull(event, ERROR_EVENT_NULL);
publishEventAsync(event.getClass().getSimpleName(), asList(event));
}
@Override
public <E extends Event> void removeEventListener(@Nonnull Class<E> eventClass, @Nonnull CallableWithArgs<?> listener) {
requireNonNull(eventClass, ERROR_EVENT_CLASS_NULL);
removeEventListener(eventClass.getSimpleName(), listener);
}
protected void fireEvent(@Nonnull CallableWithArgs<?> callable, @Nonnull List<?> params) {
requireNonNull(callable, ERROR_CALLABLE_NULL);
requireNonNull(params, ERROR_PARAMS_NULL);
callable.call(asArray(params));
}
protected void fireEvent(@Nonnull Object instance, @Nonnull String eventHandler, @Nonnull List<?> params) {
requireNonNull(instance, ERROR_INSTANCE_NULL);
requireNonBlank(eventHandler, ERROR_EVENT_HANDLER_BLANK);
requireNonNull(params, ERROR_PARAMS_NULL);
Class[] argTypes = convertToTypeArray(asArray(params));
MethodDescriptor target = new MethodDescriptor(eventHandler, argTypes);
Method method = methodCache.findMatchingMethodFor(instance.getClass(), target);
if (method != null) {
MethodUtils.invokeSafe(method, instance, asArray(params));
}
}
@Override
public <E extends Event> void addEventListener(@Nonnull Class<E> eventClass, @Nonnull CallableWithArgs<?> listener) {
requireNonNull(eventClass, ERROR_EVENT_CLASS_NULL);
addEventListener(eventClass.getSimpleName(), listener);
}
@Override
@SuppressWarnings("unchecked")
public void addEventListener(@Nonnull Object listener) {
requireNonNull(listener, ERROR_LISTENER_NULL);
if (listener instanceof CallableWithArgs) return;
if (listener instanceof Map) {
addEventListener((Map) listener);
return;
}
if (!methodCache.isEventListener(listener.getClass())) {
return;
}
synchronized (listeners) {
if (listeners.contains(listener)) return;
try {
LOG.debug("Adding listener {}", listener);
} catch (UnsupportedOperationException uoe) {
LOG.debug("Adding listener {}", listener.getClass().getName());
}
listeners.add(listener);
}
}
@Override
public void addEventListener(@Nonnull Map<String, CallableWithArgs<?>> listener) {
requireNonNull(listener, ERROR_LISTENER_NULL);
for (Map.Entry<String, CallableWithArgs<?>> entry : listener.entrySet()) {
addEventListener(entry.getKey(), entry.getValue());
}
}
@Override
@SuppressWarnings("unchecked")
public void removeEventListener(@Nonnull Object listener) {
requireNonNull(listener, ERROR_LISTENER_NULL);
if (listener instanceof CallableWithArgs) return;
if (listener instanceof Map) {
removeEventListener((Map) listener);
return;
}
synchronized (listeners) {
try {
LOG.debug("Removing listener {}", listener);
} catch (UnsupportedOperationException uoe) {
LOG.debug("Removing listener {}", listener.getClass().getName());
}
listeners.remove(listener);
removeNestedListeners(listener);
}
}
@Override
public void removeEventListener(@Nonnull Map<String, CallableWithArgs<?>> listener) {
requireNonNull(listener, ERROR_LISTENER_NULL);
for (Map.Entry<String, CallableWithArgs<?>> entry : listener.entrySet()) {
removeEventListener(entry.getKey(), entry.getValue());
}
}
@Override
public void addEventListener(@Nonnull String eventName, @Nonnull CallableWithArgs<?> listener) {
requireNonBlank(eventName, ERROR_EVENT_NAME_BLANK);
requireNonNull(listener, ERROR_LISTENER_NULL);
synchronized (callableListeners) {
List<CallableWithArgs<?>> list = callableListeners.get(capitalize(eventName));
if (list == null) {
list = new ArrayList<>();
callableListeners.put(capitalize(eventName), list);
}
if (list.contains(listener)) return;
LOG.debug("Adding listener {} on {}", listener.getClass().getName(), capitalize(eventName));
list.add(listener);
}
}
@Override
public void removeEventListener(@Nonnull String eventName, @Nonnull CallableWithArgs<?> listener) {
requireNonBlank(eventName, ERROR_EVENT_NAME_BLANK);
requireNonNull(listener, ERROR_LISTENER_NULL);
synchronized (callableListeners) {
List<CallableWithArgs<?>> list = callableListeners.get(capitalize(eventName));
if (list != null) {
LOG.debug("Removing listener {} on {}", listener.getClass().getName(), capitalize(eventName));
list.remove(listener);
}
}
}
protected Runnable buildPublisher(@Nonnull final String event, @Nonnull final List<?> params, @Nonnull final String mode) {
requireNonNull(event, ERROR_EVENT_NULL);
requireNonNull(params, ERROR_PARAMS_NULL);
requireNonBlank(mode, ERROR_MODE_BLANK);
return new Runnable() {
public void run() {
String eventName = capitalize(event);
LOG.debug("Triggering event '{}' {}", eventName, mode);
String eventHandler = "on" + eventName;
// defensive copying to avoid CME during event dispatching
// GRIFFON-224
List<Object> listenersCopy = new ArrayList<>();
synchronized (listeners) {
listenersCopy.addAll(listeners);
}
synchronized (callableListeners) {
List list = callableListeners.get(eventName);
if (list != null) {
for (Object listener : list) {
listenersCopy.add(listener);
}
}
}
for (Object listener : listenersCopy) {
if (listener instanceof CallableWithArgs) {
fireEvent((CallableWithArgs<?>) listener, params);
} else {
fireEvent(listener, eventHandler, params);
}
}
}
};
}
protected void removeNestedListeners(@Nonnull Object owner) {
requireNonNull(owner, ERROR_OWNER_NULL);
synchronized (callableListeners) {
for (Map.Entry<String, List<CallableWithArgs<?>>> event : callableListeners.entrySet()) {
String eventName = event.getKey();
List<CallableWithArgs<?>> listenerList = event.getValue();
List<CallableWithArgs<?>> toRemove = new ArrayList<>();
for (CallableWithArgs<?> listener : listenerList) {
if (isNestedListener(listener, owner)) {
toRemove.add(listener);
}
}
for (CallableWithArgs<?> listener : toRemove) {
LOG.debug("Removing listener {} on {}", listener.getClass().getName(), capitalize(eventName));
listenerList.remove(listener);
}
}
}
}
protected boolean isNestedListener(@Nonnull CallableWithArgs<?> listener, @Nonnull Object owner) {
requireNonNull(listener, ERROR_LISTENER_NULL);
requireNonNull(owner, ERROR_OWNER_NULL);
Class<?> listenerClass = listener.getClass();
return listenerClass.isMemberClass() &&
listenerClass.getEnclosingClass().equals(owner.getClass()) &&
owner.equals(GriffonClassUtils.getFieldValue(listener, "this$0"));
}
protected Object[] asArray(@Nonnull List<?> list) {
return list.toArray(new Object[list.size()]);
}
protected static class MethodCache {
private final Map<Class<?>, Map<String, List<MethodInfo>>> methodMap = new ConcurrentHashMap<>();
public boolean isEventListener(@Nonnull Class<?> klass) {
Map<String, List<MethodInfo>> methodMetadata = methodMap.get(klass);
if (methodMetadata == null) {
methodMetadata = fetchMethodMetadata(klass);
if (!methodMetadata.isEmpty()) {
methodMap.put(klass, methodMetadata);
} else {
methodMetadata = null;
}
}
return methodMetadata != null;
}
@Nullable
public Method findMatchingMethodFor(@Nonnull Class<?> klass, @Nonnull MethodDescriptor target) {
Map<String, List<MethodInfo>> methodMetadata = methodMap.get(klass);
List<MethodInfo> descriptors = methodMetadata.get(target.getName());
if (descriptors != null) {
for (MethodInfo info : descriptors) {
if (info.descriptor.matches(target)) {
return info.method;
}
}
}
return null;
}
private Map<String, List<MethodInfo>> fetchMethodMetadata(Class<?> klass) {
Map<String, List<MethodInfo>> methodMetadata = new LinkedHashMap<>();
for (Method method : klass.getMethods()) {
MethodDescriptor descriptor = MethodDescriptor.forMethod(method);
if (GriffonClassUtils.isEventHandler(descriptor)) {
String methodName = method.getName();
List<MethodInfo> descriptors = methodMetadata.get(methodName);
if (descriptors == null) {
descriptors = new ArrayList<>();
methodMetadata.put(methodName, descriptors);
}
descriptors.add(new MethodInfo(descriptor, method));
}
}
return methodMetadata;
}
}
protected static class MethodInfo {
private final MethodDescriptor descriptor;
private final Method method;
public MethodInfo(MethodDescriptor descriptor, Method method) {
this.descriptor = descriptor;
this.method = method;
}
public MethodDescriptor getDescriptor() {
return descriptor;
}
public Method getMethod() {
return method;
}
}
}
| |
/**
*/
package substationStandard.Dataclasses.impl;
import org.eclipse.emf.common.notify.Notification;
import org.eclipse.emf.ecore.EClass;
import org.eclipse.emf.ecore.impl.ENotificationImpl;
import org.eclipse.emf.ecore.impl.MinimalEObjectImpl;
import substationStandard.Dataclasses.DataclassesPackage;
import substationStandard.Dataclasses.ValWithTrans;
/**
* <!-- begin-user-doc -->
* An implementation of the model object '<em><b>Val With Trans</b></em>'.
* <!-- end-user-doc -->
* <p>
* The following features are implemented:
* </p>
* <ul>
* <li>{@link substationStandard.Dataclasses.impl.ValWithTransImpl#getPosVal <em>Pos Val</em>}</li>
* <li>{@link substationStandard.Dataclasses.impl.ValWithTransImpl#isTransInd <em>Trans Ind</em>}</li>
* </ul>
*
* @generated
*/
public class ValWithTransImpl extends MinimalEObjectImpl.Container implements ValWithTrans {
/**
* The default value of the '{@link #getPosVal() <em>Pos Val</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getPosVal()
* @generated
* @ordered
*/
protected static final int POS_VAL_EDEFAULT = 0;
/**
* The cached value of the '{@link #getPosVal() <em>Pos Val</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getPosVal()
* @generated
* @ordered
*/
protected int posVal = POS_VAL_EDEFAULT;
/**
* The default value of the '{@link #isTransInd() <em>Trans Ind</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #isTransInd()
* @generated
* @ordered
*/
protected static final boolean TRANS_IND_EDEFAULT = false;
/**
* The cached value of the '{@link #isTransInd() <em>Trans Ind</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #isTransInd()
* @generated
* @ordered
*/
protected boolean transInd = TRANS_IND_EDEFAULT;
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected ValWithTransImpl() {
super();
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
protected EClass eStaticClass() {
return DataclassesPackage.Literals.VAL_WITH_TRANS;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public int getPosVal() {
return posVal;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setPosVal(int newPosVal) {
int oldPosVal = posVal;
posVal = newPosVal;
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, DataclassesPackage.VAL_WITH_TRANS__POS_VAL, oldPosVal, posVal));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public boolean isTransInd() {
return transInd;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setTransInd(boolean newTransInd) {
boolean oldTransInd = transInd;
transInd = newTransInd;
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, DataclassesPackage.VAL_WITH_TRANS__TRANS_IND, oldTransInd, transInd));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public Object eGet(int featureID, boolean resolve, boolean coreType) {
switch (featureID) {
case DataclassesPackage.VAL_WITH_TRANS__POS_VAL:
return getPosVal();
case DataclassesPackage.VAL_WITH_TRANS__TRANS_IND:
return isTransInd();
}
return super.eGet(featureID, resolve, coreType);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public void eSet(int featureID, Object newValue) {
switch (featureID) {
case DataclassesPackage.VAL_WITH_TRANS__POS_VAL:
setPosVal((Integer)newValue);
return;
case DataclassesPackage.VAL_WITH_TRANS__TRANS_IND:
setTransInd((Boolean)newValue);
return;
}
super.eSet(featureID, newValue);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public void eUnset(int featureID) {
switch (featureID) {
case DataclassesPackage.VAL_WITH_TRANS__POS_VAL:
setPosVal(POS_VAL_EDEFAULT);
return;
case DataclassesPackage.VAL_WITH_TRANS__TRANS_IND:
setTransInd(TRANS_IND_EDEFAULT);
return;
}
super.eUnset(featureID);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public boolean eIsSet(int featureID) {
switch (featureID) {
case DataclassesPackage.VAL_WITH_TRANS__POS_VAL:
return posVal != POS_VAL_EDEFAULT;
case DataclassesPackage.VAL_WITH_TRANS__TRANS_IND:
return transInd != TRANS_IND_EDEFAULT;
}
return super.eIsSet(featureID);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public String toString() {
if (eIsProxy()) return super.toString();
StringBuffer result = new StringBuffer(super.toString());
result.append(" (posVal: ");
result.append(posVal);
result.append(", transInd: ");
result.append(transInd);
result.append(')');
return result.toString();
}
} //ValWithTransImpl
| |
package com.asteele.clrs.data_structures;
/**
* A binary search tree is an abstract data structure supporting the operations
* search, add, and remove in time proportional to the height of the tree. If
* the tree is balanced, these operations are supported in time proportional to
* log(n), where n is the number of nodes in the tree. If the tree is completely
* unbalanced, they are supported in O(n) time.
*
* The structure of a binary search tree depends on the order in which its
* composing elements were added. If added in one order, the tree may be
* completely balanced. If added in a different order, the tree may be quite
* unbalanced, even resembling a linked list. There are, in fact, N! possible
* different structures of a binary search tree composed of the same set of
* elements.
*
* Unbalanced tree structures can have a drastic negative impact on the speed of
* the fundamental operations. Balanced binary search trees
* (see {@link com.asteele.clrs.data_structures.AVLTree}) eliminate the
* possibility these asymptotic performance issues by guaranteeing tree balance.
*
* Duplicate nodes are allowed in this implementation.
*
* @author Alex Steele
*/
public class BinarySearchTree<K extends Comparable<? super K>> {
private BSTNode<K> root;
private int size;
protected static class BSTNode<E extends Comparable<? super E>> {
private E data;
private BSTNode<E> right;
private BSTNode<E> left;
private BSTNode<E> parent;
protected BSTNode(E data) {
this.data = data;
}
protected BSTNode<E> right() {
return this.right;
}
protected BSTNode<E> left() {
return this.left;
}
protected BSTNode<E> parent() {
return this.parent;
}
}
/**
* Constructs an empty BinarySearchTree.
*/
public BinarySearchTree() {
this.size = 0;
this.root = null;
}
/**
* Adds the given element to the tree.
* Returns true if the element is successfully added.
*
* @param element to be added
*/
public boolean add(K element) {
return addNode(new BSTNode<K>(element));
}
protected boolean addNode(BSTNode<K> addedNode) {
BSTNode<K> chaser = null;
BSTNode<K> current = this.root;
while (current != null) {
chaser = current;
if (addedNode.data.compareTo(current.data) < 0) {
current = current.left;
} else {
current = current.right; // Equal nodes are always placed right.
}
}
addedNode.parent = chaser;
if (chaser == null) {
this.root = addedNode;
} else if (addedNode.data.compareTo(chaser.data) < 0) {
chaser.left = addedNode;
} else {
chaser.right = addedNode;
}
this.size++;
return addedNode != null;
}
/**
* Remove the given element from the tree. If the element is successfully
* removed, returns true. If the element does not exist in the tree, returns
* false.
*
* @param element
* @return true if found & removed, false otherwise
*/
public boolean remove(K element) {
return removeNode(search(element));
}
protected boolean removeNode(BSTNode<K> nodeToRemove) {
if (nodeToRemove == null) return false; // The element was not found.
if (nodeToRemove.left == null) {
transplant(nodeToRemove, nodeToRemove.right); // Transplant accepts null arguments. This works properly when both children are null.
} else if (nodeToRemove.right == null) {
transplant(nodeToRemove, nodeToRemove.left);
} else {
BSTNode<K> successor = findTreeMinimum(nodeToRemove.right);
if (successor.parent != nodeToRemove) {
transplant(successor, successor.right);
successor.right = nodeToRemove.right;
successor.right.parent = successor;
}
transplant(nodeToRemove, successor);
successor.left = nodeToRemove.left;
successor.left.parent = successor;
}
this.size--;
return true;
}
/**
* Finds the minimum node of the tree with the given element as its root.
*/
private BSTNode<K> findTreeMinimum(BSTNode<K> parent) {
BSTNode<K> chaser = null;
BSTNode<K> current = parent;
while (current != null) {
chaser = current;
current = current.left;
}
return chaser;
}
/**
* Replaces the first given node and its tree with the second given node and
* its tree. This method does not change nodeToReplaceWith's left and right
* subtrees. Doing so is the responsibility of the caller.
*/
private void transplant(BSTNode<K> nodeToBeReplaced, BSTNode<K> nodeToReplaceWith) {
if (nodeToBeReplaced.parent == null) {
this.root = nodeToReplaceWith;
} else if (nodeToBeReplaced == nodeToBeReplaced.parent.left) {
nodeToBeReplaced.parent.left = nodeToReplaceWith;
} else {
nodeToBeReplaced.parent.right = nodeToReplaceWith;
}
if (nodeToReplaceWith != null) {
nodeToReplaceWith.parent = nodeToBeReplaced.parent;
}
}
/**
* Returns true if the given element exists in the tree, false otherwise.
*/
public boolean contains(K element) {
return search(element) != null;
}
/**
* Find the given element and return its corresponding node.
* Returns Null if the element is not found.
*/
protected BSTNode<K> search(K element) {
BSTNode<K> current = this.root;
while (current != null) {
int comparison = current.data.compareTo(element);
if (comparison < 0) {
current = current.right;
} else if (comparison > 0){
current = current.left;
} else {
return current;
}
}
return null;
}
/**
* Rotate the tree beginning at the entered node left such that
* the entered node becomes the left child of its current right child node.
*/
protected void rotateLeft(BSTNode<K> topNode) {
if (topNode.right != null) {
BSTNode<K> rightNode = topNode.right;
topNode.right = rightNode.left;
if (rightNode.left != null) {
rightNode.left.parent = topNode;
}
rightNode.parent = topNode.parent;
if (topNode.parent == null) {
this.root = rightNode;
} else if (topNode == topNode.parent.left) {
topNode.parent.left = rightNode;
} else {
topNode.parent.right = rightNode;
}
rightNode.left = topNode;
topNode.parent = rightNode;
}
}
/**
* Rotate the tree beginning at the entered node right such that
* the entered node becomes the right child of its current left child node.
*/
protected void rotateRight(BSTNode<K> topNode) {
if (topNode.left != null) {
BSTNode<K> leftNode = topNode.left;
topNode.left = leftNode.right;
if (leftNode.right != null) {
leftNode.right.parent = topNode;
}
leftNode.parent = topNode.parent;
if (topNode.parent == null) {
this.root = leftNode;
} else if (topNode == topNode.parent.left) {
topNode.parent.left = leftNode;
} else {
topNode.parent.right = leftNode;
}
leftNode.right = topNode;
topNode.parent = leftNode;
}
}
/**
* Returns the root node of the tree.
*
* This protected method is needed for convenience
* during the addition of elements to a Red Black Tree.
*/
protected BSTNode<K> root() {
return this.root;
}
/**
* Performs a pre-order traversal of the elements in the tree. The elements
* are added to the given collection in the appropriate order.
*
* @param collection
* collection to which the elements will be added
*/
public void preOrderTraversal(java.util.Collection<K> collection) {
preOrderTraversal(this.root, collection);
}
private static <T extends Comparable<? super T>> void preOrderTraversal(
BSTNode<T> root, java.util.Collection<T> collection) {
if (root != null) {
collection.add(root.data);
preOrderTraversal(root.left(), collection);
preOrderTraversal(root.right(), collection);
}
}
/**
* Performs an in-order traversal of the elements in the tree, that is, one
* in which the elements are accessed in sorted order. The elements are added to
* the given collection in the appropriate order.
*
* @param collection
* collection to which the elements will be added
*/
public void inOrderTraversal(java.util.Collection<K> collection) {
inOrderTraversal(this.root, collection);
}
private static <T extends Comparable<? super T>> void inOrderTraversal(
BSTNode<T> root, java.util.Collection<T> collection) {
if (root != null) {
inOrderTraversal(root.left(), collection);
collection.add(root.data);
inOrderTraversal(root.right(), collection);
}
}
/**
* Performs a post-order traversal of the elements in the tree. The elements
* are added to the given collection in the appropriate order.
*
* @param collection
* collection to which the elements will be added
*/
public void postOrderTraversal(java.util.Collection<K> collection) {
postOrderTraversal(this.root, collection);
}
private static <T extends Comparable<? super T>> void postOrderTraversal(
BSTNode<T> root, java.util.Collection<T> collection) {
if (root != null) {
postOrderTraversal(root.left(), collection);
postOrderTraversal(root.right(), collection);
collection.add(root.data);
}
}
/**
* Returns the size of this tree.
*/
public int size() {
return this.size;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.arrow.vector.complex;
import static java.util.Collections.singletonList;
import static org.apache.arrow.memory.util.LargeMemoryUtil.capAtMaxInt;
import static org.apache.arrow.memory.util.LargeMemoryUtil.checkedCastToInt;
import static org.apache.arrow.util.Preconditions.checkNotNull;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import org.apache.arrow.memory.ArrowBuf;
import org.apache.arrow.memory.BufferAllocator;
import org.apache.arrow.memory.OutOfMemoryException;
import org.apache.arrow.memory.util.ArrowBufPointer;
import org.apache.arrow.memory.util.ByteFunctionHelpers;
import org.apache.arrow.memory.util.CommonUtil;
import org.apache.arrow.memory.util.hash.ArrowBufHasher;
import org.apache.arrow.util.Preconditions;
import org.apache.arrow.vector.AddOrGetResult;
import org.apache.arrow.vector.BaseFixedWidthVector;
import org.apache.arrow.vector.BaseValueVector;
import org.apache.arrow.vector.BaseVariableWidthVector;
import org.apache.arrow.vector.BitVectorHelper;
import org.apache.arrow.vector.BufferBacked;
import org.apache.arrow.vector.DensityAwareVector;
import org.apache.arrow.vector.FieldVector;
import org.apache.arrow.vector.NullVector;
import org.apache.arrow.vector.UInt4Vector;
import org.apache.arrow.vector.ValueVector;
import org.apache.arrow.vector.ZeroVector;
import org.apache.arrow.vector.compare.VectorVisitor;
import org.apache.arrow.vector.complex.impl.ComplexCopier;
import org.apache.arrow.vector.complex.impl.UnionLargeListReader;
import org.apache.arrow.vector.complex.impl.UnionLargeListWriter;
import org.apache.arrow.vector.complex.reader.FieldReader;
import org.apache.arrow.vector.ipc.message.ArrowFieldNode;
import org.apache.arrow.vector.types.Types.MinorType;
import org.apache.arrow.vector.types.pojo.ArrowType;
import org.apache.arrow.vector.types.pojo.Field;
import org.apache.arrow.vector.types.pojo.FieldType;
import org.apache.arrow.vector.util.CallBack;
import org.apache.arrow.vector.util.JsonStringArrayList;
import org.apache.arrow.vector.util.OversizedAllocationException;
import org.apache.arrow.vector.util.SchemaChangeRuntimeException;
import org.apache.arrow.vector.util.TransferPair;
/**
* A list vector contains lists of a specific type of elements. Its structure contains 3 elements.
* <ol>
* <li>A validity buffer.</li>
* <li> An offset buffer, that denotes lists boundaries. </li>
* <li> A child data vector that contains the elements of lists. </li>
* </ol>
*
* This is the LargeList variant of list, it has a 64-bit wide offset
*
* <p>
* WARNING: Currently Arrow in Java doesn't support 64-bit vectors. This class
* follows the expected behaviour of a LargeList but doesn't actually support allocating
* a 64-bit vector. It has little use until 64-bit vectors are supported and should be used
* with caution.
* todo review checkedCastToInt usage in this class.
* Once int64 indexed vectors are supported these checks aren't needed.
* </p>
*/
public class LargeListVector extends BaseValueVector implements RepeatedValueVector, FieldVector, PromotableVector {
public static LargeListVector empty(String name, BufferAllocator allocator) {
return new LargeListVector(name, allocator, FieldType.nullable(ArrowType.LargeList.INSTANCE), null);
}
public static final FieldVector DEFAULT_DATA_VECTOR = ZeroVector.INSTANCE;
public static final String DATA_VECTOR_NAME = "$data$";
public static final byte OFFSET_WIDTH = 8;
protected ArrowBuf offsetBuffer;
protected FieldVector vector;
protected final CallBack callBack;
protected int valueCount;
protected long offsetAllocationSizeInBytes = INITIAL_VALUE_ALLOCATION * OFFSET_WIDTH;
private final String name;
protected String defaultDataVectorName = DATA_VECTOR_NAME;
protected ArrowBuf validityBuffer;
protected UnionLargeListReader reader;
private final FieldType fieldType;
private int validityAllocationSizeInBytes;
/**
* The maximum index that is actually set.
*/
private int lastSet;
/**
* Constructs a new instance.
*
* @param name The name of the instance.
* @param allocator The allocator to use for allocating/reallocating buffers.
* @param fieldType The type of this list.
* @param callBack A schema change callback.
*/
public LargeListVector(String name, BufferAllocator allocator, FieldType fieldType, CallBack callBack) {
super(allocator);
this.name = name;
this.validityBuffer = allocator.getEmpty();
this.fieldType = checkNotNull(fieldType);
this.callBack = callBack;
this.validityAllocationSizeInBytes = getValidityBufferSizeFromCount(INITIAL_VALUE_ALLOCATION);
this.lastSet = -1;
this.offsetBuffer = allocator.getEmpty();
this.vector = vector == null ? DEFAULT_DATA_VECTOR : vector;
this.valueCount = 0;
}
@Override
public void initializeChildrenFromFields(List<Field> children) {
if (children.size() != 1) {
throw new IllegalArgumentException("Lists have only one child. Found: " + children);
}
Field field = children.get(0);
AddOrGetResult<FieldVector> addOrGetVector = addOrGetVector(field.getFieldType());
if (!addOrGetVector.isCreated()) {
throw new IllegalArgumentException("Child vector already existed: " + addOrGetVector.getVector());
}
addOrGetVector.getVector().initializeChildrenFromFields(field.getChildren());
}
@Override
public void setInitialCapacity(int numRecords) {
validityAllocationSizeInBytes = getValidityBufferSizeFromCount(numRecords);
offsetAllocationSizeInBytes = (long) (numRecords + 1) * OFFSET_WIDTH;
if (vector instanceof BaseFixedWidthVector || vector instanceof BaseVariableWidthVector) {
vector.setInitialCapacity(numRecords * RepeatedValueVector.DEFAULT_REPEAT_PER_RECORD);
} else {
vector.setInitialCapacity(numRecords);
}
}
/**
* Specialized version of setInitialCapacity() for ListVector. This is
* used by some callers when they want to explicitly control and be
* conservative about memory allocated for inner data vector. This is
* very useful when we are working with memory constraints for a query
* and have a fixed amount of memory reserved for the record batch. In
* such cases, we are likely to face OOM or related problems when
* we reserve memory for a record batch with value count x and
* do setInitialCapacity(x) such that each vector allocates only
* what is necessary and not the default amount but the multiplier
* forces the memory requirement to go beyond what was needed.
*
* @param numRecords value count
* @param density density of ListVector. Density is the average size of
* list per position in the List vector. For example, a
* density value of 10 implies each position in the list
* vector has a list of 10 values.
* A density value of 0.1 implies out of 10 positions in
* the list vector, 1 position has a list of size 1 and
* remaining positions are null (no lists) or empty lists.
* This helps in tightly controlling the memory we provision
* for inner data vector.
*/
@Override
public void setInitialCapacity(int numRecords, double density) {
validityAllocationSizeInBytes = getValidityBufferSizeFromCount(numRecords);
if ((numRecords * density) >= Integer.MAX_VALUE) {
throw new OversizedAllocationException("Requested amount of memory is more than max allowed");
}
offsetAllocationSizeInBytes = (numRecords + 1) * OFFSET_WIDTH;
int innerValueCapacity = Math.max((int) (numRecords * density), 1);
if (vector instanceof DensityAwareVector) {
((DensityAwareVector) vector).setInitialCapacity(innerValueCapacity, density);
} else {
vector.setInitialCapacity(innerValueCapacity);
}
}
/**
* Get the density of this ListVector.
* @return density
*/
public double getDensity() {
if (valueCount == 0) {
return 0.0D;
}
final long startOffset = offsetBuffer.getLong(0L);
final long endOffset = offsetBuffer.getLong((long) valueCount * OFFSET_WIDTH);
final double totalListSize = endOffset - startOffset;
return totalListSize / valueCount;
}
@Override
public List<FieldVector> getChildrenFromFields() {
return singletonList(getDataVector());
}
/**
* Load the buffers of this vector with provided source buffers.
* The caller manages the source buffers and populates them before invoking
* this method.
* @param fieldNode the fieldNode indicating the value count
* @param ownBuffers the buffers for this Field (own buffers only, children not included)
*/
@Override
public void loadFieldBuffers(ArrowFieldNode fieldNode, List<ArrowBuf> ownBuffers) {
if (ownBuffers.size() != 2) {
throw new IllegalArgumentException("Illegal buffer count, expected " + 2 + ", got: " + ownBuffers.size());
}
ArrowBuf bitBuffer = ownBuffers.get(0);
ArrowBuf offBuffer = ownBuffers.get(1);
validityBuffer.getReferenceManager().release();
validityBuffer = BitVectorHelper.loadValidityBuffer(fieldNode, bitBuffer, allocator);
offsetBuffer.getReferenceManager().release();
offsetBuffer = offBuffer.getReferenceManager().retain(offBuffer, allocator);
validityAllocationSizeInBytes = checkedCastToInt(validityBuffer.capacity());
offsetAllocationSizeInBytes = offsetBuffer.capacity();
lastSet = fieldNode.getLength() - 1;
valueCount = fieldNode.getLength();
}
/**
* Get the buffers belonging to this vector.
* @return the inner buffers.
*/
@Override
public List<ArrowBuf> getFieldBuffers() {
List<ArrowBuf> result = new ArrayList<>(2);
setReaderAndWriterIndex();
result.add(validityBuffer);
result.add(offsetBuffer);
return result;
}
/**
* Set the reader and writer indexes for the inner buffers.
*/
private void setReaderAndWriterIndex() {
validityBuffer.readerIndex(0);
offsetBuffer.readerIndex(0);
if (valueCount == 0) {
validityBuffer.writerIndex(0);
offsetBuffer.writerIndex(0);
} else {
validityBuffer.writerIndex(getValidityBufferSizeFromCount(valueCount));
offsetBuffer.writerIndex((valueCount + 1) * OFFSET_WIDTH);
}
}
@Override
@Deprecated
public List<BufferBacked> getFieldInnerVectors() {
throw new UnsupportedOperationException("There are no inner vectors. Use getFieldBuffers");
}
/**
* Same as {@link #allocateNewSafe()}.
*/
@Override
public void allocateNew() throws OutOfMemoryException {
if (!allocateNewSafe()) {
throw new OutOfMemoryException("Failure while allocating memory");
}
}
/**
* Allocate memory for the vector. We internally use a default value count
* of 4096 to allocate memory for at least these many elements in the
* vector.
*
* @return false if memory allocation fails, true otherwise.
*/
public boolean allocateNewSafe() {
boolean success = false;
try {
/* we are doing a new allocation -- release the current buffers */
clear();
/* allocate validity buffer */
allocateValidityBuffer(validityAllocationSizeInBytes);
/* allocate offset and data buffer */
boolean dataAlloc = false;
try {
allocateOffsetBuffer(offsetAllocationSizeInBytes);
dataAlloc = vector.allocateNewSafe();
} catch (Exception e) {
e.printStackTrace();
clear();
return false;
} finally {
if (!dataAlloc) {
clear();
}
}
success = dataAlloc;
} finally {
if (!success) {
clear();
return false;
}
}
return true;
}
private void allocateValidityBuffer(final long size) {
final int curSize = (int) size;
validityBuffer = allocator.buffer(curSize);
validityBuffer.readerIndex(0);
validityAllocationSizeInBytes = curSize;
validityBuffer.setZero(0, validityBuffer.capacity());
}
protected void allocateOffsetBuffer(final long size) {
offsetBuffer = allocator.buffer(size);
offsetBuffer.readerIndex(0);
offsetAllocationSizeInBytes = size;
offsetBuffer.setZero(0, offsetBuffer.capacity());
}
/**
* Resize the vector to increase the capacity. The internal behavior is to
* double the current value capacity.
*/
@Override
public void reAlloc() {
/* reallocate the validity buffer */
reallocValidityBuffer();
/* reallocate the offset and data */
reallocOffsetBuffer();
vector.reAlloc();
}
private void reallocValidityAndOffsetBuffers() {
reallocOffsetBuffer();
reallocValidityBuffer();
}
protected void reallocOffsetBuffer() {
final long currentBufferCapacity = offsetBuffer.capacity();
long newAllocationSize = currentBufferCapacity * 2;
if (newAllocationSize == 0) {
if (offsetAllocationSizeInBytes > 0) {
newAllocationSize = offsetAllocationSizeInBytes;
} else {
newAllocationSize = INITIAL_VALUE_ALLOCATION * OFFSET_WIDTH * 2;
}
}
newAllocationSize = CommonUtil.nextPowerOfTwo(newAllocationSize);
newAllocationSize = Math.min(newAllocationSize, (long) (OFFSET_WIDTH) * Integer.MAX_VALUE);
assert newAllocationSize >= 1;
if (newAllocationSize > MAX_ALLOCATION_SIZE || newAllocationSize <= offsetBuffer.capacity()) {
throw new OversizedAllocationException("Unable to expand the buffer");
}
final ArrowBuf newBuf = allocator.buffer(newAllocationSize);
newBuf.setBytes(0, offsetBuffer, 0, currentBufferCapacity);
newBuf.setZero(currentBufferCapacity, newBuf.capacity() - currentBufferCapacity);
offsetBuffer.getReferenceManager().release(1);
offsetBuffer = newBuf;
offsetAllocationSizeInBytes = newAllocationSize;
}
private void reallocValidityBuffer() {
final int currentBufferCapacity = checkedCastToInt(validityBuffer.capacity());
long newAllocationSize = currentBufferCapacity * 2;
if (newAllocationSize == 0) {
if (validityAllocationSizeInBytes > 0) {
newAllocationSize = validityAllocationSizeInBytes;
} else {
newAllocationSize = getValidityBufferSizeFromCount(INITIAL_VALUE_ALLOCATION) * 2;
}
}
newAllocationSize = CommonUtil.nextPowerOfTwo(newAllocationSize);
assert newAllocationSize >= 1;
if (newAllocationSize > MAX_ALLOCATION_SIZE) {
throw new OversizedAllocationException("Unable to expand the buffer");
}
final ArrowBuf newBuf = allocator.buffer((int) newAllocationSize);
newBuf.setBytes(0, validityBuffer, 0, currentBufferCapacity);
newBuf.setZero(currentBufferCapacity, newBuf.capacity() - currentBufferCapacity);
validityBuffer.getReferenceManager().release(1);
validityBuffer = newBuf;
validityAllocationSizeInBytes = (int) newAllocationSize;
}
/**
* Same as {@link #copyFrom(int, int, ValueVector)} except that
* it handles the case when the capacity of the vector needs to be expanded
* before copy.
* @param inIndex position to copy from in source vector
* @param outIndex position to copy to in this vector
* @param from source vector
*/
@Override
public void copyFromSafe(int inIndex, int outIndex, ValueVector from) {
copyFrom(inIndex, outIndex, from);
}
/**
* Copy a cell value from a particular index in source vector to a particular
* position in this vector.
* @param inIndex position to copy from in source vector
* @param outIndex position to copy to in this vector
* @param from source vector
*/
@Override
public void copyFrom(int inIndex, int outIndex, ValueVector from) {
Preconditions.checkArgument(this.getMinorType() == from.getMinorType());
FieldReader in = from.getReader();
in.setPosition(inIndex);
UnionLargeListWriter out = getWriter();
out.setPosition(outIndex);
ComplexCopier.copy(in, out);
}
@Override
public UInt4Vector getOffsetVector() {
throw new UnsupportedOperationException("There is no inner offset vector");
}
/**
* Get the inner data vector for this list vector.
* @return data vector
*/
@Override
public FieldVector getDataVector() {
return vector;
}
@Override
public TransferPair getTransferPair(String ref, BufferAllocator allocator) {
return getTransferPair(ref, allocator, null);
}
@Override
public TransferPair getTransferPair(String ref, BufferAllocator allocator, CallBack callBack) {
return new TransferImpl(ref, allocator, callBack);
}
@Override
public TransferPair makeTransferPair(ValueVector target) {
return new TransferImpl((LargeListVector) target);
}
@Override
public long getValidityBufferAddress() {
return (validityBuffer.memoryAddress());
}
@Override
public long getDataBufferAddress() {
throw new UnsupportedOperationException();
}
@Override
public long getOffsetBufferAddress() {
return (offsetBuffer.memoryAddress());
}
@Override
public ArrowBuf getValidityBuffer() {
return validityBuffer;
}
@Override
public ArrowBuf getDataBuffer() {
throw new UnsupportedOperationException();
}
@Override
public ArrowBuf getOffsetBuffer() {
return offsetBuffer;
}
@Override
public int getValueCount() {
return valueCount;
}
@Override
public int hashCode(int index) {
return hashCode(index, null);
}
@Override
public int hashCode(int index, ArrowBufHasher hasher) {
if (isSet(index) == 0) {
return ArrowBufPointer.NULL_HASH_CODE;
}
int hash = 0;
final long start = offsetBuffer.getLong((long) index * OFFSET_WIDTH);
final long end = offsetBuffer.getLong(((long) index + 1L) * OFFSET_WIDTH);
for (long i = start; i < end; i++) {
hash = ByteFunctionHelpers.combineHash(hash, vector.hashCode(checkedCastToInt(i), hasher));
}
return hash;
}
@Override
public <OUT, IN> OUT accept(VectorVisitor<OUT, IN> visitor, IN value) {
return visitor.visit(this, value);
}
public UnionLargeListWriter getWriter() {
return new UnionLargeListWriter(this);
}
protected void replaceDataVector(FieldVector v) {
vector.clear();
vector = v;
}
@Override
public UnionVector promoteToUnion() {
UnionVector vector = new UnionVector("$data$", allocator, callBack);
replaceDataVector(vector);
invalidateReader();
if (callBack != null) {
callBack.doWork();
}
return vector;
}
private class TransferImpl implements TransferPair {
LargeListVector to;
TransferPair dataTransferPair;
public TransferImpl(String name, BufferAllocator allocator, CallBack callBack) {
this(new LargeListVector(name, allocator, fieldType, callBack));
}
public TransferImpl(LargeListVector to) {
this.to = to;
to.addOrGetVector(vector.getField().getFieldType());
if (to.getDataVector() instanceof ZeroVector) {
to.addOrGetVector(vector.getField().getFieldType());
}
dataTransferPair = getDataVector().makeTransferPair(to.getDataVector());
}
/**
* Transfer this vector'data to another vector. The memory associated
* with this vector is transferred to the allocator of target vector
* for accounting and management purposes.
*/
@Override
public void transfer() {
to.clear();
dataTransferPair.transfer();
to.validityBuffer = transferBuffer(validityBuffer, to.allocator);
to.offsetBuffer = transferBuffer(offsetBuffer, to.allocator);
to.lastSet = lastSet;
if (valueCount > 0) {
to.setValueCount(valueCount);
}
clear();
}
/**
* Slice this vector at desired index and length and transfer the
* corresponding data to the target vector.
* @param startIndex start position of the split in source vector.
* @param length length of the split.
*/
@Override
public void splitAndTransfer(int startIndex, int length) {
Preconditions.checkArgument(startIndex >= 0 && length >= 0 && startIndex + length <= valueCount,
"Invalid parameters startIndex: %s, length: %s for valueCount: %s", startIndex, length, valueCount);
final long startPoint = offsetBuffer.getLong((long) startIndex * OFFSET_WIDTH);
final long sliceLength = offsetBuffer.getLong((long) (startIndex + length) * OFFSET_WIDTH) - startPoint;
to.clear();
to.allocateOffsetBuffer((length + 1) * OFFSET_WIDTH);
/* splitAndTransfer offset buffer */
for (int i = 0; i < length + 1; i++) {
final long relativeOffset = offsetBuffer.getLong((long) (startIndex + i) * OFFSET_WIDTH) - startPoint;
to.offsetBuffer.setLong((long) i * OFFSET_WIDTH, relativeOffset);
}
/* splitAndTransfer validity buffer */
splitAndTransferValidityBuffer(startIndex, length, to);
/* splitAndTransfer data buffer */
dataTransferPair.splitAndTransfer(checkedCastToInt(startPoint), checkedCastToInt(sliceLength));
to.lastSet = length - 1;
to.setValueCount(length);
}
/*
* transfer the validity.
*/
private void splitAndTransferValidityBuffer(int startIndex, int length, LargeListVector target) {
int firstByteSource = BitVectorHelper.byteIndex(startIndex);
int lastByteSource = BitVectorHelper.byteIndex(valueCount - 1);
int byteSizeTarget = getValidityBufferSizeFromCount(length);
int offset = startIndex % 8;
if (length > 0) {
if (offset == 0) {
// slice
if (target.validityBuffer != null) {
target.validityBuffer.getReferenceManager().release();
}
target.validityBuffer = validityBuffer.slice(firstByteSource, byteSizeTarget);
target.validityBuffer.getReferenceManager().retain(1);
} else {
/* Copy data
* When the first bit starts from the middle of a byte (offset != 0),
* copy data from src BitVector.
* Each byte in the target is composed by a part in i-th byte,
* another part in (i+1)-th byte.
*/
target.allocateValidityBuffer(byteSizeTarget);
for (int i = 0; i < byteSizeTarget - 1; i++) {
byte b1 = BitVectorHelper.getBitsFromCurrentByte(validityBuffer, firstByteSource + i, offset);
byte b2 = BitVectorHelper.getBitsFromNextByte(validityBuffer, firstByteSource + i + 1, offset);
target.validityBuffer.setByte(i, (b1 + b2));
}
/* Copying the last piece is done in the following manner:
* if the source vector has 1 or more bytes remaining, we copy
* the last piece as a byte formed by shifting data
* from the current byte and the next byte.
*
* if the source vector has no more bytes remaining
* (we are at the last byte), we copy the last piece as a byte
* by shifting data from the current byte.
*/
if ((firstByteSource + byteSizeTarget - 1) < lastByteSource) {
byte b1 = BitVectorHelper.getBitsFromCurrentByte(validityBuffer,
firstByteSource + byteSizeTarget - 1, offset);
byte b2 = BitVectorHelper.getBitsFromNextByte(validityBuffer,
firstByteSource + byteSizeTarget, offset);
target.validityBuffer.setByte(byteSizeTarget - 1, b1 + b2);
} else {
byte b1 = BitVectorHelper.getBitsFromCurrentByte(validityBuffer,
firstByteSource + byteSizeTarget - 1, offset);
target.validityBuffer.setByte(byteSizeTarget - 1, b1);
}
}
}
}
@Override
public ValueVector getTo() {
return to;
}
@Override
public void copyValueSafe(int from, int to) {
this.to.copyFrom(from, to, LargeListVector.this);
}
}
@Override
public UnionLargeListReader getReader() {
if (reader == null) {
reader = new UnionLargeListReader(this);
}
return reader;
}
/**
* Initialize the data vector (and execute callback) if it hasn't already been done,
* returns the data vector.
*/
public <T extends ValueVector> AddOrGetResult<T> addOrGetVector(FieldType fieldType) {
boolean created = false;
if (vector instanceof NullVector) {
vector = fieldType.createNewSingleVector(defaultDataVectorName, allocator, callBack);
// returned vector must have the same field
created = true;
if (callBack != null &&
// not a schema change if changing from ZeroVector to ZeroVector
(fieldType.getType().getTypeID() != ArrowType.ArrowTypeID.Null)) {
callBack.doWork();
}
}
if (vector.getField().getType().getTypeID() != fieldType.getType().getTypeID()) {
final String msg = String.format("Inner vector type mismatch. Requested type: [%s], actual type: [%s]",
fieldType.getType().getTypeID(), vector.getField().getType().getTypeID());
throw new SchemaChangeRuntimeException(msg);
}
invalidateReader();
return new AddOrGetResult<>((T) vector, created);
}
/**
* Get the size (number of bytes) of underlying buffers used by this
* vector.
* @return size of underlying buffers.
*/
@Override
public int getBufferSize() {
if (valueCount == 0) {
return 0;
}
final int offsetBufferSize = (valueCount + 1) * OFFSET_WIDTH;
final int validityBufferSize = getValidityBufferSizeFromCount(valueCount);
return offsetBufferSize + validityBufferSize + vector.getBufferSize();
}
@Override
public int getBufferSizeFor(int valueCount) {
if (valueCount == 0) {
return 0;
}
final int validityBufferSize = getValidityBufferSizeFromCount(valueCount);
long innerVectorValueCount = offsetBuffer.getLong((long) valueCount * OFFSET_WIDTH);
return ((valueCount + 1) * OFFSET_WIDTH) +
vector.getBufferSizeFor(checkedCastToInt(innerVectorValueCount)) +
validityBufferSize;
}
@Override
public Field getField() {
return new Field(getName(), fieldType, Collections.singletonList(getDataVector().getField()));
}
@Override
public MinorType getMinorType() {
return MinorType.LARGELIST;
}
@Override
public String getName() {
return name;
}
@Override
public void clear() {
offsetBuffer = releaseBuffer(offsetBuffer);
vector.clear();
valueCount = 0;
super.clear();
validityBuffer = releaseBuffer(validityBuffer);
lastSet = -1;
}
@Override
public void reset() {
offsetBuffer.setZero(0, offsetBuffer.capacity());
vector.reset();
valueCount = 0;
validityBuffer.setZero(0, validityBuffer.capacity());
lastSet = -1;
}
/**
* Return the underlying buffers associated with this vector. Note that this doesn't
* impact the reference counts for this buffer so it only should be used for in-context
* access. Also note that this buffer changes regularly thus
* external classes shouldn't hold a reference to it (unless they change it).
*
* @param clear Whether to clear vector before returning; the buffers will still be refcounted
* but the returned array will be the only reference to them
* @return The underlying {@link ArrowBuf buffers} that is used by this
* vector instance.
*/
@Override
public ArrowBuf[] getBuffers(boolean clear) {
setReaderAndWriterIndex();
final ArrowBuf[] buffers;
if (getBufferSize() == 0) {
buffers = new ArrowBuf[0];
} else {
List<ArrowBuf> list = new ArrayList<>();
list.add(offsetBuffer);
list.add(validityBuffer);
list.addAll(Arrays.asList(vector.getBuffers(false)));
buffers = list.toArray(new ArrowBuf[list.size()]);
}
if (clear) {
for (ArrowBuf buffer : buffers) {
buffer.getReferenceManager().retain();
}
clear();
}
return buffers;
}
protected void invalidateReader() {
reader = null;
}
/**
* Get the element in the list vector at a particular index.
* @param index position of the element
* @return Object at given position
*/
@Override
public Object getObject(int index) {
if (isSet(index) == 0) {
return null;
}
final List<Object> vals = new JsonStringArrayList<>();
final long start = offsetBuffer.getLong((long) index * OFFSET_WIDTH);
final long end = offsetBuffer.getLong(((long) index + 1L) * OFFSET_WIDTH);
final ValueVector vv = getDataVector();
for (long i = start; i < end; i++) {
vals.add(vv.getObject(checkedCastToInt(i)));
}
return vals;
}
/**
* Check if element at given index is null.
*
* @param index position of element
* @return true if element at given index is null, false otherwise
*/
@Override
public boolean isNull(int index) {
return (isSet(index) == 0);
}
/**
* Check if element at given index is empty list.
* @param index position of element
* @return true if element at given index is empty list or NULL, false otherwise
*/
public boolean isEmpty(int index) {
if (isNull(index)) {
return true;
} else {
final long start = offsetBuffer.getLong((long) index * OFFSET_WIDTH);
final long end = offsetBuffer.getLong(((long) index + 1L) * OFFSET_WIDTH);
return start == end;
}
}
/**
* Same as {@link #isNull(int)}.
*
* @param index position of element
* @return 1 if element at given index is not null, 0 otherwise
*/
public int isSet(int index) {
final int byteIndex = index >> 3;
final byte b = validityBuffer.getByte(byteIndex);
final int bitIndex = index & 7;
return (b >> bitIndex) & 0x01;
}
/**
* Get the number of elements that are null in the vector.
*
* @return the number of null elements.
*/
@Override
public int getNullCount() {
return BitVectorHelper.getNullCount(validityBuffer, valueCount);
}
/**
* Get the current value capacity for the vector.
* @return number of elements that vector can hold.
*/
@Override
public int getValueCapacity() {
return getValidityAndOffsetValueCapacity();
}
protected int getOffsetBufferValueCapacity() {
return checkedCastToInt(offsetBuffer.capacity() / OFFSET_WIDTH);
}
private int getValidityAndOffsetValueCapacity() {
final int offsetValueCapacity = Math.max(getOffsetBufferValueCapacity() - 1, 0);
return Math.min(offsetValueCapacity, getValidityBufferValueCapacity());
}
private int getValidityBufferValueCapacity() {
return capAtMaxInt(validityBuffer.capacity() * 8);
}
/**
* Sets the list at index to be not-null. Reallocates validity buffer if index
* is larger than current capacity.
*/
public void setNotNull(int index) {
while (index >= getValidityAndOffsetValueCapacity()) {
reallocValidityAndOffsetBuffers();
}
BitVectorHelper.setBit(validityBuffer, index);
lastSet = index;
}
/**
* Start a new value in the list vector.
*
* @param index index of the value to start
*/
public long startNewValue(long index) {
while (index >= getValidityAndOffsetValueCapacity()) {
reallocValidityAndOffsetBuffers();
}
for (int i = lastSet + 1; i <= index; i++) {
final long currentOffset = offsetBuffer.getLong((long) i * OFFSET_WIDTH);
offsetBuffer.setLong(((long) i + 1L) * OFFSET_WIDTH, currentOffset);
}
BitVectorHelper.setBit(validityBuffer, index);
lastSet = checkedCastToInt(index);
return offsetBuffer.getLong(((long) lastSet + 1L) * OFFSET_WIDTH);
}
/**
* End the current value.
*
* @param index index of the value to end
* @param size number of elements in the list that was written
*/
public void endValue(int index, long size) {
final long currentOffset = offsetBuffer.getLong(((long) index + 1L) * OFFSET_WIDTH);
offsetBuffer.setLong(((long) index + 1L) * OFFSET_WIDTH, currentOffset + size);
}
/**
* Sets the value count for the vector.
*
* <p>
* Important note: The underlying vector does not support 64-bit
* allocations yet. This may throw if attempting to hold larger
* than what a 32-bit vector can store.
* </p>
*
* @param valueCount value count
*/
@Override
public void setValueCount(int valueCount) {
this.valueCount = valueCount;
if (valueCount > 0) {
while (valueCount > getValidityAndOffsetValueCapacity()) {
/* check if validity and offset buffers need to be re-allocated */
reallocValidityAndOffsetBuffers();
}
for (int i = lastSet + 1; i < valueCount; i++) {
/* fill the holes with offsets */
final long currentOffset = offsetBuffer.getLong((long) i * OFFSET_WIDTH);
offsetBuffer.setLong(((long) i + 1L) * OFFSET_WIDTH, currentOffset);
}
}
/* valueCount for the data vector is the current end offset */
final long childValueCount = (valueCount == 0) ? 0 :
offsetBuffer.getLong(((long) lastSet + 1L) * OFFSET_WIDTH);
/* set the value count of data vector and this will take care of
* checking whether data buffer needs to be reallocated.
* TODO: revisit when 64-bit vectors are supported
*/
Preconditions.checkArgument(childValueCount <= Integer.MAX_VALUE || childValueCount >= Integer.MIN_VALUE,
"LargeListVector doesn't yet support 64-bit allocations: %s", childValueCount);
vector.setValueCount((int) childValueCount);
}
public void setLastSet(int value) {
lastSet = value;
}
public int getLastSet() {
return lastSet;
}
public long getElementStartIndex(int index) {
return offsetBuffer.getLong((long) index * OFFSET_WIDTH);
}
public long getElementEndIndex(int index) {
return offsetBuffer.getLong(((long) index + 1L) * OFFSET_WIDTH);
}
}
| |
/*
* Copyright 2020 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/dialogflow/cx/v3beta1/test_case.proto
package com.google.cloud.dialogflow.cx.v3beta1;
public final class TestCaseProto {
private TestCaseProto() {}
public static void registerAllExtensions(com.google.protobuf.ExtensionRegistryLite registry) {}
public static void registerAllExtensions(com.google.protobuf.ExtensionRegistry registry) {
registerAllExtensions((com.google.protobuf.ExtensionRegistryLite) registry);
}
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_dialogflow_cx_v3beta1_TestCase_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_dialogflow_cx_v3beta1_TestCase_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_dialogflow_cx_v3beta1_TestCaseResult_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_dialogflow_cx_v3beta1_TestCaseResult_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_dialogflow_cx_v3beta1_TestConfig_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_dialogflow_cx_v3beta1_TestConfig_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_dialogflow_cx_v3beta1_ConversationTurn_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_dialogflow_cx_v3beta1_ConversationTurn_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_dialogflow_cx_v3beta1_ConversationTurn_UserInput_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_dialogflow_cx_v3beta1_ConversationTurn_UserInput_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_dialogflow_cx_v3beta1_ConversationTurn_VirtualAgentOutput_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_dialogflow_cx_v3beta1_ConversationTurn_VirtualAgentOutput_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_dialogflow_cx_v3beta1_TestRunDifference_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_dialogflow_cx_v3beta1_TestRunDifference_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_dialogflow_cx_v3beta1_TransitionCoverage_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_dialogflow_cx_v3beta1_TransitionCoverage_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_dialogflow_cx_v3beta1_TransitionCoverage_TransitionNode_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_dialogflow_cx_v3beta1_TransitionCoverage_TransitionNode_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_dialogflow_cx_v3beta1_TransitionCoverage_Transition_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_dialogflow_cx_v3beta1_TransitionCoverage_Transition_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_dialogflow_cx_v3beta1_TransitionRouteGroupCoverage_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_dialogflow_cx_v3beta1_TransitionRouteGroupCoverage_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_dialogflow_cx_v3beta1_TransitionRouteGroupCoverage_Coverage_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_dialogflow_cx_v3beta1_TransitionRouteGroupCoverage_Coverage_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_dialogflow_cx_v3beta1_TransitionRouteGroupCoverage_Coverage_Transition_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_dialogflow_cx_v3beta1_TransitionRouteGroupCoverage_Coverage_Transition_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_dialogflow_cx_v3beta1_IntentCoverage_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_dialogflow_cx_v3beta1_IntentCoverage_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_dialogflow_cx_v3beta1_IntentCoverage_Intent_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_dialogflow_cx_v3beta1_IntentCoverage_Intent_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_dialogflow_cx_v3beta1_CalculateCoverageRequest_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_dialogflow_cx_v3beta1_CalculateCoverageRequest_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_dialogflow_cx_v3beta1_CalculateCoverageResponse_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_dialogflow_cx_v3beta1_CalculateCoverageResponse_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_dialogflow_cx_v3beta1_ListTestCasesRequest_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_dialogflow_cx_v3beta1_ListTestCasesRequest_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_dialogflow_cx_v3beta1_ListTestCasesResponse_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_dialogflow_cx_v3beta1_ListTestCasesResponse_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_dialogflow_cx_v3beta1_BatchDeleteTestCasesRequest_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_dialogflow_cx_v3beta1_BatchDeleteTestCasesRequest_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_dialogflow_cx_v3beta1_CreateTestCaseRequest_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_dialogflow_cx_v3beta1_CreateTestCaseRequest_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_dialogflow_cx_v3beta1_UpdateTestCaseRequest_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_dialogflow_cx_v3beta1_UpdateTestCaseRequest_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_dialogflow_cx_v3beta1_GetTestCaseRequest_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_dialogflow_cx_v3beta1_GetTestCaseRequest_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_dialogflow_cx_v3beta1_RunTestCaseRequest_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_dialogflow_cx_v3beta1_RunTestCaseRequest_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_dialogflow_cx_v3beta1_RunTestCaseResponse_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_dialogflow_cx_v3beta1_RunTestCaseResponse_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_dialogflow_cx_v3beta1_RunTestCaseMetadata_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_dialogflow_cx_v3beta1_RunTestCaseMetadata_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_dialogflow_cx_v3beta1_BatchRunTestCasesRequest_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_dialogflow_cx_v3beta1_BatchRunTestCasesRequest_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_dialogflow_cx_v3beta1_BatchRunTestCasesResponse_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_dialogflow_cx_v3beta1_BatchRunTestCasesResponse_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_dialogflow_cx_v3beta1_BatchRunTestCasesMetadata_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_dialogflow_cx_v3beta1_BatchRunTestCasesMetadata_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_dialogflow_cx_v3beta1_TestError_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_dialogflow_cx_v3beta1_TestError_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_dialogflow_cx_v3beta1_ImportTestCasesRequest_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_dialogflow_cx_v3beta1_ImportTestCasesRequest_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_dialogflow_cx_v3beta1_ImportTestCasesResponse_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_dialogflow_cx_v3beta1_ImportTestCasesResponse_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_dialogflow_cx_v3beta1_ImportTestCasesMetadata_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_dialogflow_cx_v3beta1_ImportTestCasesMetadata_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_dialogflow_cx_v3beta1_TestCaseError_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_dialogflow_cx_v3beta1_TestCaseError_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_dialogflow_cx_v3beta1_ExportTestCasesRequest_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_dialogflow_cx_v3beta1_ExportTestCasesRequest_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_dialogflow_cx_v3beta1_ExportTestCasesResponse_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_dialogflow_cx_v3beta1_ExportTestCasesResponse_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_dialogflow_cx_v3beta1_ExportTestCasesMetadata_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_dialogflow_cx_v3beta1_ExportTestCasesMetadata_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_dialogflow_cx_v3beta1_ListTestCaseResultsRequest_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_dialogflow_cx_v3beta1_ListTestCaseResultsRequest_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_dialogflow_cx_v3beta1_ListTestCaseResultsResponse_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_dialogflow_cx_v3beta1_ListTestCaseResultsResponse_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_dialogflow_cx_v3beta1_GetTestCaseResultRequest_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_dialogflow_cx_v3beta1_GetTestCaseResultRequest_fieldAccessorTable;
public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() {
return descriptor;
}
private static com.google.protobuf.Descriptors.FileDescriptor descriptor;
static {
java.lang.String[] descriptorData = {
"\n2google/cloud/dialogflow/cx/v3beta1/tes"
+ "t_case.proto\022\"google.cloud.dialogflow.cx"
+ ".v3beta1\032\034google/api/annotations.proto\032\027"
+ "google/api/client.proto\032\037google/api/fiel"
+ "d_behavior.proto\032\031google/api/resource.pr"
+ "oto\032-google/cloud/dialogflow/cx/v3beta1/"
+ "flow.proto\032/google/cloud/dialogflow/cx/v"
+ "3beta1/intent.proto\032-google/cloud/dialog"
+ "flow/cx/v3beta1/page.proto\0329google/cloud"
+ "/dialogflow/cx/v3beta1/response_message."
+ "proto\0320google/cloud/dialogflow/cx/v3beta"
+ "1/session.proto\032?google/cloud/dialogflow"
+ "/cx/v3beta1/transition_route_group.proto"
+ "\032#google/longrunning/operations.proto\032\033g"
+ "oogle/protobuf/empty.proto\032 google/proto"
+ "buf/field_mask.proto\032\034google/protobuf/st"
+ "ruct.proto\032\037google/protobuf/timestamp.pr"
+ "oto\032\027google/rpc/status.proto\"\356\003\n\010TestCas"
+ "e\022\014\n\004name\030\001 \001(\t\022\014\n\004tags\030\002 \003(\t\022\031\n\014display"
+ "_name\030\003 \001(\tB\003\340A\002\022\r\n\005notes\030\004 \001(\t\022C\n\013test_"
+ "config\030\r \001(\0132..google.cloud.dialogflow.c"
+ "x.v3beta1.TestConfig\022Z\n\034test_case_conver"
+ "sation_turns\030\005 \003(\01324.google.cloud.dialog"
+ "flow.cx.v3beta1.ConversationTurn\0226\n\rcrea"
+ "tion_time\030\n \001(\0132\032.google.protobuf.Timest"
+ "ampB\003\340A\003\022L\n\020last_test_result\030\014 \001(\01322.goo"
+ "gle.cloud.dialogflow.cx.v3beta1.TestCase"
+ "Result:u\352Ar\n\"dialogflow.googleapis.com/T"
+ "estCase\022Lprojects/{project}/locations/{l"
+ "ocation}/agents/{agent}/testCases/{test_"
+ "case}\"\265\003\n\016TestCaseResult\022\014\n\004name\030\001 \001(\t\022?"
+ "\n\013environment\030\002 \001(\tB*\372A\'\n%dialogflow.goo"
+ "gleapis.com/Environment\022P\n\022conversation_"
+ "turns\030\003 \003(\01324.google.cloud.dialogflow.cx"
+ ".v3beta1.ConversationTurn\022C\n\013test_result"
+ "\030\004 \001(\0162..google.cloud.dialogflow.cx.v3be"
+ "ta1.TestResult\022-\n\ttest_time\030\005 \001(\0132\032.goog"
+ "le.protobuf.Timestamp:\215\001\352A\211\001\n(dialogflow"
+ ".googleapis.com/TestCaseResult\022]projects"
+ "/{project}/locations/{location}/agents/{"
+ "agent}/testCases/{test_case}/results/{re"
+ "sult}\"\\\n\nTestConfig\022\033\n\023tracking_paramete"
+ "rs\030\001 \003(\t\0221\n\004flow\030\002 \001(\tB#\372A \n\036dialogflow."
+ "googleapis.com/Flow\"\342\006\n\020ConversationTurn"
+ "\022R\n\nuser_input\030\001 \001(\0132>.google.cloud.dial"
+ "ogflow.cx.v3beta1.ConversationTurn.UserI"
+ "nput\022e\n\024virtual_agent_output\030\002 \001(\0132G.goo"
+ "gle.cloud.dialogflow.cx.v3beta1.Conversa"
+ "tionTurn.VirtualAgentOutput\032\277\001\n\tUserInpu"
+ "t\022=\n\005input\030\005 \001(\0132..google.cloud.dialogfl"
+ "ow.cx.v3beta1.QueryInput\0224\n\023injected_par"
+ "ameters\030\002 \001(\0132\027.google.protobuf.Struct\022\032"
+ "\n\022is_webhook_enabled\030\003 \001(\010\022!\n\031enable_sen"
+ "timent_analysis\030\007 \001(\010\032\320\003\n\022VirtualAgentOu"
+ "tput\0223\n\022session_parameters\030\004 \001(\0132\027.googl"
+ "e.protobuf.Struct\022O\n\013differences\030\005 \003(\01325"
+ ".google.cloud.dialogflow.cx.v3beta1.Test"
+ "RunDifferenceB\003\340A\003\0228\n\017diagnostic_info\030\006 "
+ "\001(\0132\027.google.protobuf.StructB\006\340A\002\340A\004\022D\n\020"
+ "triggered_intent\030\007 \001(\0132*.google.cloud.di"
+ "alogflow.cx.v3beta1.Intent\022>\n\014current_pa"
+ "ge\030\010 \001(\0132(.google.cloud.dialogflow.cx.v3"
+ "beta1.Page\022P\n\016text_responses\030\t \003(\01328.goo"
+ "gle.cloud.dialogflow.cx.v3beta1.Response"
+ "Message.Text\022\"\n\006status\030\n \001(\0132\022.google.rp"
+ "c.Status\"\322\001\n\021TestRunDifference\022L\n\004type\030\001"
+ " \001(\0162>.google.cloud.dialogflow.cx.v3beta"
+ "1.TestRunDifference.DiffType\022\023\n\013descript"
+ "ion\030\002 \001(\t\"Z\n\010DiffType\022\031\n\025DIFF_TYPE_UNSPE"
+ "CIFIED\020\000\022\n\n\006INTENT\020\001\022\010\n\004PAGE\020\002\022\016\n\nPARAME"
+ "TERS\020\003\022\r\n\tUTTERANCE\020\004\"\226\005\n\022TransitionCove"
+ "rage\022V\n\013transitions\030\001 \003(\0132A.google.cloud"
+ ".dialogflow.cx.v3beta1.TransitionCoverag"
+ "e.Transition\022\026\n\016coverage_score\030\002 \001(\002\032\214\001\n"
+ "\016TransitionNode\0228\n\004page\030\001 \001(\0132(.google.c"
+ "loud.dialogflow.cx.v3beta1.PageH\000\0228\n\004flo"
+ "w\030\002 \001(\0132(.google.cloud.dialogflow.cx.v3b"
+ "eta1.FlowH\000B\006\n\004kind\032\200\003\n\nTransition\022U\n\006so"
+ "urce\030\001 \001(\0132E.google.cloud.dialogflow.cx."
+ "v3beta1.TransitionCoverage.TransitionNod"
+ "e\022\r\n\005index\030\004 \001(\005\022U\n\006target\030\002 \001(\0132E.googl"
+ "e.cloud.dialogflow.cx.v3beta1.Transition"
+ "Coverage.TransitionNode\022\017\n\007covered\030\003 \001(\010"
+ "\022O\n\020transition_route\030\005 \001(\01323.google.clou"
+ "d.dialogflow.cx.v3beta1.TransitionRouteH"
+ "\000\022I\n\revent_handler\030\006 \001(\01320.google.cloud."
+ "dialogflow.cx.v3beta1.EventHandlerH\000B\010\n\006"
+ "detail\"\341\003\n\034TransitionRouteGroupCoverage\022"
+ "\\\n\tcoverages\030\001 \003(\0132I.google.cloud.dialog"
+ "flow.cx.v3beta1.TransitionRouteGroupCove"
+ "rage.Coverage\022\026\n\016coverage_score\030\002 \001(\002\032\312\002"
+ "\n\010Coverage\022M\n\013route_group\030\001 \001(\01328.google"
+ ".cloud.dialogflow.cx.v3beta1.TransitionR"
+ "outeGroup\022i\n\013transitions\030\002 \003(\0132T.google."
+ "cloud.dialogflow.cx.v3beta1.TransitionRo"
+ "uteGroupCoverage.Coverage.Transition\022\026\n\016"
+ "coverage_score\030\003 \001(\002\032l\n\nTransition\022M\n\020tr"
+ "ansition_route\030\001 \001(\01323.google.cloud.dial"
+ "ogflow.cx.v3beta1.TransitionRoute\022\017\n\007cov"
+ "ered\030\002 \001(\010\"\306\001\n\016IntentCoverage\022J\n\007intents"
+ "\030\001 \003(\01329.google.cloud.dialogflow.cx.v3be"
+ "ta1.IntentCoverage.Intent\022\026\n\016coverage_sc"
+ "ore\030\002 \001(\002\032P\n\006Intent\0225\n\006intent\030\001 \001(\tB%\372A\""
+ "\n dialogflow.googleapis.com/Intent\022\017\n\007co"
+ "vered\030\002 \001(\010\"\234\002\n\030CalculateCoverageRequest"
+ "\0226\n\005agent\030\003 \001(\tB\'\340A\002\372A!\n\037dialogflow.goog"
+ "leapis.com/Agent\022\\\n\004type\030\002 \001(\0162I.google."
+ "cloud.dialogflow.cx.v3beta1.CalculateCov"
+ "erageRequest.CoverageTypeB\003\340A\002\"j\n\014Covera"
+ "geType\022\035\n\031COVERAGE_TYPE_UNSPECIFIED\020\000\022\n\n"
+ "\006INTENT\020\001\022\023\n\017PAGE_TRANSITION\020\002\022\032\n\026TRANSI"
+ "TION_ROUTE_GROUP\020\003\"\351\002\n\031CalculateCoverage"
+ "Response\0223\n\005agent\030\005 \001(\tB$\372A!\n\037dialogflow"
+ ".googleapis.com/Agent\022M\n\017intent_coverage"
+ "\030\002 \001(\01322.google.cloud.dialogflow.cx.v3be"
+ "ta1.IntentCoverageH\000\022U\n\023transition_cover"
+ "age\030\004 \001(\01326.google.cloud.dialogflow.cx.v"
+ "3beta1.TransitionCoverageH\000\022`\n\024route_gro"
+ "up_coverage\030\006 \001(\0132@.google.cloud.dialogf"
+ "low.cx.v3beta1.TransitionRouteGroupCover"
+ "ageH\000B\017\n\rcoverage_type\"\223\002\n\024ListTestCases"
+ "Request\022:\n\006parent\030\001 \001(\tB*\340A\002\372A$\022\"dialogf"
+ "low.googleapis.com/TestCase\022\021\n\tpage_size"
+ "\030\002 \001(\005\022\022\n\npage_token\030\003 \001(\t\022S\n\004view\030\004 \001(\016"
+ "2E.google.cloud.dialogflow.cx.v3beta1.Li"
+ "stTestCasesRequest.TestCaseView\"C\n\014TestC"
+ "aseView\022\036\n\032TEST_CASE_VIEW_UNSPECIFIED\020\000\022"
+ "\t\n\005BASIC\020\001\022\010\n\004FULL\020\002\"r\n\025ListTestCasesRes"
+ "ponse\022@\n\ntest_cases\030\001 \003(\0132,.google.cloud"
+ ".dialogflow.cx.v3beta1.TestCase\022\027\n\017next_"
+ "page_token\030\002 \001(\t\"\224\001\n\033BatchDeleteTestCase"
+ "sRequest\022:\n\006parent\030\001 \001(\tB*\340A\002\372A$\022\"dialog"
+ "flow.googleapis.com/TestCase\0229\n\005names\030\003 "
+ "\003(\tB*\340A\002\372A$\n\"dialogflow.googleapis.com/T"
+ "estCase\"\231\001\n\025CreateTestCaseRequest\022:\n\006par"
+ "ent\030\001 \001(\tB*\340A\002\372A$\022\"dialogflow.googleapis"
+ ".com/TestCase\022D\n\ttest_case\030\002 \001(\0132,.googl"
+ "e.cloud.dialogflow.cx.v3beta1.TestCaseB\003"
+ "\340A\002\"\223\001\n\025UpdateTestCaseRequest\022D\n\ttest_ca"
+ "se\030\001 \001(\0132,.google.cloud.dialogflow.cx.v3"
+ "beta1.TestCaseB\003\340A\002\0224\n\013update_mask\030\002 \001(\013"
+ "2\032.google.protobuf.FieldMaskB\003\340A\002\"N\n\022Get"
+ "TestCaseRequest\0228\n\004name\030\001 \001(\tB*\340A\002\372A$\n\"d"
+ "ialogflow.googleapis.com/TestCase\"\222\001\n\022Ru"
+ "nTestCaseRequest\0228\n\004name\030\001 \001(\tB*\340A\002\372A$\n\""
+ "dialogflow.googleapis.com/TestCase\022B\n\013en"
+ "vironment\030\002 \001(\tB-\340A\001\372A\'\n%dialogflow.goog"
+ "leapis.com/Environment\"Y\n\023RunTestCaseRes"
+ "ponse\022B\n\006result\030\002 \001(\01322.google.cloud.dia"
+ "logflow.cx.v3beta1.TestCaseResult\"\025\n\023Run"
+ "TestCaseMetadata\"\332\001\n\030BatchRunTestCasesRe"
+ "quest\022:\n\006parent\030\001 \001(\tB*\340A\002\372A$\022\"dialogflo"
+ "w.googleapis.com/TestCase\022B\n\013environment"
+ "\030\002 \001(\tB-\340A\001\372A\'\n%dialogflow.googleapis.co"
+ "m/Environment\022>\n\ntest_cases\030\003 \003(\tB*\340A\002\372A"
+ "$\n\"dialogflow.googleapis.com/TestCase\"`\n"
+ "\031BatchRunTestCasesResponse\022C\n\007results\030\001 "
+ "\003(\01322.google.cloud.dialogflow.cx.v3beta1"
+ ".TestCaseResult\"Z\n\031BatchRunTestCasesMeta"
+ "data\022=\n\006errors\030\001 \003(\0132-.google.cloud.dial"
+ "ogflow.cx.v3beta1.TestError\"\232\001\n\tTestErro"
+ "r\022:\n\ttest_case\030\001 \001(\tB\'\372A$\n\"dialogflow.go"
+ "ogleapis.com/TestCase\022\"\n\006status\030\002 \001(\0132\022."
+ "google.rpc.Status\022-\n\ttest_time\030\003 \001(\0132\032.g"
+ "oogle.protobuf.Timestamp\"\204\001\n\026ImportTestC"
+ "asesRequest\022:\n\006parent\030\001 \001(\tB*\340A\002\372A$\022\"dia"
+ "logflow.googleapis.com/TestCase\022\021\n\007gcs_u"
+ "ri\030\002 \001(\tH\000\022\021\n\007content\030\003 \001(\014H\000B\010\n\006source\""
+ "Q\n\027ImportTestCasesResponse\0226\n\005names\030\001 \003("
+ "\tB\'\372A$\n\"dialogflow.googleapis.com/TestCa"
+ "se\"\\\n\027ImportTestCasesMetadata\022A\n\006errors\030"
+ "\001 \003(\01321.google.cloud.dialogflow.cx.v3bet"
+ "a1.TestCaseError\"t\n\rTestCaseError\022?\n\ttes"
+ "t_case\030\001 \001(\0132,.google.cloud.dialogflow.c"
+ "x.v3beta1.TestCase\022\"\n\006status\030\002 \001(\0132\022.goo"
+ "gle.rpc.Status\"\241\002\n\026ExportTestCasesReques"
+ "t\022:\n\006parent\030\001 \001(\tB*\340A\002\372A$\022\"dialogflow.go"
+ "ogleapis.com/TestCase\022\021\n\007gcs_uri\030\002 \001(\tH\000"
+ "\022Z\n\013data_format\030\003 \001(\0162E.google.cloud.dia"
+ "logflow.cx.v3beta1.ExportTestCasesReques"
+ "t.DataFormat\022\016\n\006filter\030\004 \001(\t\"=\n\nDataForm"
+ "at\022\033\n\027DATA_FORMAT_UNSPECIFIED\020\000\022\010\n\004BLOB\020"
+ "\001\022\010\n\004JSON\020\002B\r\n\013destination\"N\n\027ExportTest"
+ "CasesResponse\022\021\n\007gcs_uri\030\001 \001(\tH\000\022\021\n\007cont"
+ "ent\030\002 \001(\014H\000B\r\n\013destination\"\031\n\027ExportTest"
+ "CasesMetadata\"\225\001\n\032ListTestCaseResultsReq"
+ "uest\022@\n\006parent\030\001 \001(\tB0\340A\002\372A*\022(dialogflow"
+ ".googleapis.com/TestCaseResult\022\021\n\tpage_s"
+ "ize\030\002 \001(\005\022\022\n\npage_token\030\003 \001(\t\022\016\n\006filter\030"
+ "\004 \001(\t\"\205\001\n\033ListTestCaseResultsResponse\022M\n"
+ "\021test_case_results\030\001 \003(\01322.google.cloud."
+ "dialogflow.cx.v3beta1.TestCaseResult\022\027\n\017"
+ "next_page_token\030\002 \001(\t\"Z\n\030GetTestCaseResu"
+ "ltRequest\022>\n\004name\030\001 \001(\tB0\340A\002\372A*\n(dialogf"
+ "low.googleapis.com/TestCaseResult*A\n\nTes"
+ "tResult\022\033\n\027TEST_RESULT_UNSPECIFIED\020\000\022\n\n\006"
+ "PASSED\020\001\022\n\n\006FAILED\020\0022\266\026\n\tTestCases\022\322\001\n\rL"
+ "istTestCases\0228.google.cloud.dialogflow.c"
+ "x.v3beta1.ListTestCasesRequest\0329.google."
+ "cloud.dialogflow.cx.v3beta1.ListTestCase"
+ "sResponse\"L\202\323\344\223\002=\022;/v3beta1/{parent=proj"
+ "ects/*/locations/*/agents/*}/testCases\332A"
+ "\006parent\022\314\001\n\024BatchDeleteTestCases\022?.googl"
+ "e.cloud.dialogflow.cx.v3beta1.BatchDelet"
+ "eTestCasesRequest\032\026.google.protobuf.Empt"
+ "y\"[\202\323\344\223\002L\"G/v3beta1/{parent=projects/*/l"
+ "ocations/*/agents/*}/testCases:batchDele"
+ "te:\001*\332A\006parent\022\277\001\n\013GetTestCase\0226.google."
+ "cloud.dialogflow.cx.v3beta1.GetTestCaseR"
+ "equest\032,.google.cloud.dialogflow.cx.v3be"
+ "ta1.TestCase\"J\202\323\344\223\002=\022;/v3beta1/{name=pro"
+ "jects/*/locations/*/agents/*/testCases/*"
+ "}\332A\004name\022\334\001\n\016CreateTestCase\0229.google.clo"
+ "ud.dialogflow.cx.v3beta1.CreateTestCaseR"
+ "equest\032,.google.cloud.dialogflow.cx.v3be"
+ "ta1.TestCase\"a\202\323\344\223\002H\";/v3beta1/{parent=p"
+ "rojects/*/locations/*/agents/*}/testCase"
+ "s:\ttest_case\332A\020parent,test_case\022\353\001\n\016Upda"
+ "teTestCase\0229.google.cloud.dialogflow.cx."
+ "v3beta1.UpdateTestCaseRequest\032,.google.c"
+ "loud.dialogflow.cx.v3beta1.TestCase\"p\202\323\344"
+ "\223\002R2E/v3beta1/{test_case.name=projects/*"
+ "/locations/*/agents/*/testCases/*}:\ttest"
+ "_case\332A\025test_case,update_mask\022\335\001\n\013RunTes"
+ "tCase\0226.google.cloud.dialogflow.cx.v3bet"
+ "a1.RunTestCaseRequest\032\035.google.longrunni"
+ "ng.Operation\"w\202\323\344\223\002D\"?/v3beta1/{name=pro"
+ "jects/*/locations/*/agents/*/testCases/*"
+ "}:run:\001*\312A*\n\023RunTestCaseResponse\022\023RunTes"
+ "tCaseMetadata\022\373\001\n\021BatchRunTestCases\022<.go"
+ "ogle.cloud.dialogflow.cx.v3beta1.BatchRu"
+ "nTestCasesRequest\032\035.google.longrunning.O"
+ "peration\"\210\001\202\323\344\223\002I\"D/v3beta1/{parent=proj"
+ "ects/*/locations/*/agents/*}/testCases:b"
+ "atchRun:\001*\312A6\n\031BatchRunTestCasesResponse"
+ "\022\031BatchRunTestCasesMetadata\022\346\001\n\021Calculat"
+ "eCoverage\022<.google.cloud.dialogflow.cx.v"
+ "3beta1.CalculateCoverageRequest\032=.google"
+ ".cloud.dialogflow.cx.v3beta1.CalculateCo"
+ "verageResponse\"T\202\323\344\223\002N\022L/v3beta1/{agent="
+ "projects/*/locations/*/agents/*}/testCas"
+ "es:calculateCoverage\022\361\001\n\017ImportTestCases"
+ "\022:.google.cloud.dialogflow.cx.v3beta1.Im"
+ "portTestCasesRequest\032\035.google.longrunnin"
+ "g.Operation\"\202\001\202\323\344\223\002G\"B/v3beta1/{parent=p"
+ "rojects/*/locations/*/agents/*}/testCase"
+ "s:import:\001*\312A2\n\027ImportTestCasesResponse\022"
+ "\027ImportTestCasesMetadata\022\361\001\n\017ExportTestC"
+ "ases\022:.google.cloud.dialogflow.cx.v3beta"
+ "1.ExportTestCasesRequest\032\035.google.longru"
+ "nning.Operation\"\202\001\202\323\344\223\002G\"B/v3beta1/{pare"
+ "nt=projects/*/locations/*/agents/*}/test"
+ "Cases:export:\001*\312A2\n\027ExportTestCasesRespo"
+ "nse\022\027ExportTestCasesMetadata\022\356\001\n\023ListTes"
+ "tCaseResults\022>.google.cloud.dialogflow.c"
+ "x.v3beta1.ListTestCaseResultsRequest\032?.g"
+ "oogle.cloud.dialogflow.cx.v3beta1.ListTe"
+ "stCaseResultsResponse\"V\202\323\344\223\002G\022E/v3beta1/"
+ "{parent=projects/*/locations/*/agents/*/"
+ "testCases/*}/results\332A\006parent\022\333\001\n\021GetTes"
+ "tCaseResult\022<.google.cloud.dialogflow.cx"
+ ".v3beta1.GetTestCaseResultRequest\0322.goog"
+ "le.cloud.dialogflow.cx.v3beta1.TestCaseR"
+ "esult\"T\202\323\344\223\002G\022E/v3beta1/{name=projects/*"
+ "/locations/*/agents/*/testCases/*/result"
+ "s/*}\332A\004name\032x\312A\031dialogflow.googleapis.co"
+ "m\322AYhttps://www.googleapis.com/auth/clou"
+ "d-platform,https://www.googleapis.com/au"
+ "th/dialogflowB\325\001\n&com.google.cloud.dialo"
+ "gflow.cx.v3beta1B\rTestCaseProtoP\001ZDgoogl"
+ "e.golang.org/genproto/googleapis/cloud/d"
+ "ialogflow/cx/v3beta1;cx\370\001\001\242\002\002DF\252\002\"Google"
+ ".Cloud.Dialogflow.Cx.V3Beta1\352\002&Google::C"
+ "loud::Dialogflow::CX::V3beta1b\006proto3"
};
descriptor =
com.google.protobuf.Descriptors.FileDescriptor.internalBuildGeneratedFileFrom(
descriptorData,
new com.google.protobuf.Descriptors.FileDescriptor[] {
com.google.api.AnnotationsProto.getDescriptor(),
com.google.api.ClientProto.getDescriptor(),
com.google.api.FieldBehaviorProto.getDescriptor(),
com.google.api.ResourceProto.getDescriptor(),
com.google.cloud.dialogflow.cx.v3beta1.FlowProto.getDescriptor(),
com.google.cloud.dialogflow.cx.v3beta1.IntentProto.getDescriptor(),
com.google.cloud.dialogflow.cx.v3beta1.PageProto.getDescriptor(),
com.google.cloud.dialogflow.cx.v3beta1.ResponseMessageProto.getDescriptor(),
com.google.cloud.dialogflow.cx.v3beta1.SessionProto.getDescriptor(),
com.google.cloud.dialogflow.cx.v3beta1.TransitionRouteGroupProto.getDescriptor(),
com.google.longrunning.OperationsProto.getDescriptor(),
com.google.protobuf.EmptyProto.getDescriptor(),
com.google.protobuf.FieldMaskProto.getDescriptor(),
com.google.protobuf.StructProto.getDescriptor(),
com.google.protobuf.TimestampProto.getDescriptor(),
com.google.rpc.StatusProto.getDescriptor(),
});
internal_static_google_cloud_dialogflow_cx_v3beta1_TestCase_descriptor =
getDescriptor().getMessageTypes().get(0);
internal_static_google_cloud_dialogflow_cx_v3beta1_TestCase_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_dialogflow_cx_v3beta1_TestCase_descriptor,
new java.lang.String[] {
"Name",
"Tags",
"DisplayName",
"Notes",
"TestConfig",
"TestCaseConversationTurns",
"CreationTime",
"LastTestResult",
});
internal_static_google_cloud_dialogflow_cx_v3beta1_TestCaseResult_descriptor =
getDescriptor().getMessageTypes().get(1);
internal_static_google_cloud_dialogflow_cx_v3beta1_TestCaseResult_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_dialogflow_cx_v3beta1_TestCaseResult_descriptor,
new java.lang.String[] {
"Name", "Environment", "ConversationTurns", "TestResult", "TestTime",
});
internal_static_google_cloud_dialogflow_cx_v3beta1_TestConfig_descriptor =
getDescriptor().getMessageTypes().get(2);
internal_static_google_cloud_dialogflow_cx_v3beta1_TestConfig_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_dialogflow_cx_v3beta1_TestConfig_descriptor,
new java.lang.String[] {
"TrackingParameters", "Flow",
});
internal_static_google_cloud_dialogflow_cx_v3beta1_ConversationTurn_descriptor =
getDescriptor().getMessageTypes().get(3);
internal_static_google_cloud_dialogflow_cx_v3beta1_ConversationTurn_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_dialogflow_cx_v3beta1_ConversationTurn_descriptor,
new java.lang.String[] {
"UserInput", "VirtualAgentOutput",
});
internal_static_google_cloud_dialogflow_cx_v3beta1_ConversationTurn_UserInput_descriptor =
internal_static_google_cloud_dialogflow_cx_v3beta1_ConversationTurn_descriptor
.getNestedTypes()
.get(0);
internal_static_google_cloud_dialogflow_cx_v3beta1_ConversationTurn_UserInput_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_dialogflow_cx_v3beta1_ConversationTurn_UserInput_descriptor,
new java.lang.String[] {
"Input", "InjectedParameters", "IsWebhookEnabled", "EnableSentimentAnalysis",
});
internal_static_google_cloud_dialogflow_cx_v3beta1_ConversationTurn_VirtualAgentOutput_descriptor =
internal_static_google_cloud_dialogflow_cx_v3beta1_ConversationTurn_descriptor
.getNestedTypes()
.get(1);
internal_static_google_cloud_dialogflow_cx_v3beta1_ConversationTurn_VirtualAgentOutput_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_dialogflow_cx_v3beta1_ConversationTurn_VirtualAgentOutput_descriptor,
new java.lang.String[] {
"SessionParameters",
"Differences",
"DiagnosticInfo",
"TriggeredIntent",
"CurrentPage",
"TextResponses",
"Status",
});
internal_static_google_cloud_dialogflow_cx_v3beta1_TestRunDifference_descriptor =
getDescriptor().getMessageTypes().get(4);
internal_static_google_cloud_dialogflow_cx_v3beta1_TestRunDifference_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_dialogflow_cx_v3beta1_TestRunDifference_descriptor,
new java.lang.String[] {
"Type", "Description",
});
internal_static_google_cloud_dialogflow_cx_v3beta1_TransitionCoverage_descriptor =
getDescriptor().getMessageTypes().get(5);
internal_static_google_cloud_dialogflow_cx_v3beta1_TransitionCoverage_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_dialogflow_cx_v3beta1_TransitionCoverage_descriptor,
new java.lang.String[] {
"Transitions", "CoverageScore",
});
internal_static_google_cloud_dialogflow_cx_v3beta1_TransitionCoverage_TransitionNode_descriptor =
internal_static_google_cloud_dialogflow_cx_v3beta1_TransitionCoverage_descriptor
.getNestedTypes()
.get(0);
internal_static_google_cloud_dialogflow_cx_v3beta1_TransitionCoverage_TransitionNode_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_dialogflow_cx_v3beta1_TransitionCoverage_TransitionNode_descriptor,
new java.lang.String[] {
"Page", "Flow", "Kind",
});
internal_static_google_cloud_dialogflow_cx_v3beta1_TransitionCoverage_Transition_descriptor =
internal_static_google_cloud_dialogflow_cx_v3beta1_TransitionCoverage_descriptor
.getNestedTypes()
.get(1);
internal_static_google_cloud_dialogflow_cx_v3beta1_TransitionCoverage_Transition_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_dialogflow_cx_v3beta1_TransitionCoverage_Transition_descriptor,
new java.lang.String[] {
"Source", "Index", "Target", "Covered", "TransitionRoute", "EventHandler", "Detail",
});
internal_static_google_cloud_dialogflow_cx_v3beta1_TransitionRouteGroupCoverage_descriptor =
getDescriptor().getMessageTypes().get(6);
internal_static_google_cloud_dialogflow_cx_v3beta1_TransitionRouteGroupCoverage_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_dialogflow_cx_v3beta1_TransitionRouteGroupCoverage_descriptor,
new java.lang.String[] {
"Coverages", "CoverageScore",
});
internal_static_google_cloud_dialogflow_cx_v3beta1_TransitionRouteGroupCoverage_Coverage_descriptor =
internal_static_google_cloud_dialogflow_cx_v3beta1_TransitionRouteGroupCoverage_descriptor
.getNestedTypes()
.get(0);
internal_static_google_cloud_dialogflow_cx_v3beta1_TransitionRouteGroupCoverage_Coverage_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_dialogflow_cx_v3beta1_TransitionRouteGroupCoverage_Coverage_descriptor,
new java.lang.String[] {
"RouteGroup", "Transitions", "CoverageScore",
});
internal_static_google_cloud_dialogflow_cx_v3beta1_TransitionRouteGroupCoverage_Coverage_Transition_descriptor =
internal_static_google_cloud_dialogflow_cx_v3beta1_TransitionRouteGroupCoverage_Coverage_descriptor
.getNestedTypes()
.get(0);
internal_static_google_cloud_dialogflow_cx_v3beta1_TransitionRouteGroupCoverage_Coverage_Transition_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_dialogflow_cx_v3beta1_TransitionRouteGroupCoverage_Coverage_Transition_descriptor,
new java.lang.String[] {
"TransitionRoute", "Covered",
});
internal_static_google_cloud_dialogflow_cx_v3beta1_IntentCoverage_descriptor =
getDescriptor().getMessageTypes().get(7);
internal_static_google_cloud_dialogflow_cx_v3beta1_IntentCoverage_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_dialogflow_cx_v3beta1_IntentCoverage_descriptor,
new java.lang.String[] {
"Intents", "CoverageScore",
});
internal_static_google_cloud_dialogflow_cx_v3beta1_IntentCoverage_Intent_descriptor =
internal_static_google_cloud_dialogflow_cx_v3beta1_IntentCoverage_descriptor
.getNestedTypes()
.get(0);
internal_static_google_cloud_dialogflow_cx_v3beta1_IntentCoverage_Intent_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_dialogflow_cx_v3beta1_IntentCoverage_Intent_descriptor,
new java.lang.String[] {
"Intent", "Covered",
});
internal_static_google_cloud_dialogflow_cx_v3beta1_CalculateCoverageRequest_descriptor =
getDescriptor().getMessageTypes().get(8);
internal_static_google_cloud_dialogflow_cx_v3beta1_CalculateCoverageRequest_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_dialogflow_cx_v3beta1_CalculateCoverageRequest_descriptor,
new java.lang.String[] {
"Agent", "Type",
});
internal_static_google_cloud_dialogflow_cx_v3beta1_CalculateCoverageResponse_descriptor =
getDescriptor().getMessageTypes().get(9);
internal_static_google_cloud_dialogflow_cx_v3beta1_CalculateCoverageResponse_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_dialogflow_cx_v3beta1_CalculateCoverageResponse_descriptor,
new java.lang.String[] {
"Agent", "IntentCoverage", "TransitionCoverage", "RouteGroupCoverage", "CoverageType",
});
internal_static_google_cloud_dialogflow_cx_v3beta1_ListTestCasesRequest_descriptor =
getDescriptor().getMessageTypes().get(10);
internal_static_google_cloud_dialogflow_cx_v3beta1_ListTestCasesRequest_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_dialogflow_cx_v3beta1_ListTestCasesRequest_descriptor,
new java.lang.String[] {
"Parent", "PageSize", "PageToken", "View",
});
internal_static_google_cloud_dialogflow_cx_v3beta1_ListTestCasesResponse_descriptor =
getDescriptor().getMessageTypes().get(11);
internal_static_google_cloud_dialogflow_cx_v3beta1_ListTestCasesResponse_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_dialogflow_cx_v3beta1_ListTestCasesResponse_descriptor,
new java.lang.String[] {
"TestCases", "NextPageToken",
});
internal_static_google_cloud_dialogflow_cx_v3beta1_BatchDeleteTestCasesRequest_descriptor =
getDescriptor().getMessageTypes().get(12);
internal_static_google_cloud_dialogflow_cx_v3beta1_BatchDeleteTestCasesRequest_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_dialogflow_cx_v3beta1_BatchDeleteTestCasesRequest_descriptor,
new java.lang.String[] {
"Parent", "Names",
});
internal_static_google_cloud_dialogflow_cx_v3beta1_CreateTestCaseRequest_descriptor =
getDescriptor().getMessageTypes().get(13);
internal_static_google_cloud_dialogflow_cx_v3beta1_CreateTestCaseRequest_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_dialogflow_cx_v3beta1_CreateTestCaseRequest_descriptor,
new java.lang.String[] {
"Parent", "TestCase",
});
internal_static_google_cloud_dialogflow_cx_v3beta1_UpdateTestCaseRequest_descriptor =
getDescriptor().getMessageTypes().get(14);
internal_static_google_cloud_dialogflow_cx_v3beta1_UpdateTestCaseRequest_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_dialogflow_cx_v3beta1_UpdateTestCaseRequest_descriptor,
new java.lang.String[] {
"TestCase", "UpdateMask",
});
internal_static_google_cloud_dialogflow_cx_v3beta1_GetTestCaseRequest_descriptor =
getDescriptor().getMessageTypes().get(15);
internal_static_google_cloud_dialogflow_cx_v3beta1_GetTestCaseRequest_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_dialogflow_cx_v3beta1_GetTestCaseRequest_descriptor,
new java.lang.String[] {
"Name",
});
internal_static_google_cloud_dialogflow_cx_v3beta1_RunTestCaseRequest_descriptor =
getDescriptor().getMessageTypes().get(16);
internal_static_google_cloud_dialogflow_cx_v3beta1_RunTestCaseRequest_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_dialogflow_cx_v3beta1_RunTestCaseRequest_descriptor,
new java.lang.String[] {
"Name", "Environment",
});
internal_static_google_cloud_dialogflow_cx_v3beta1_RunTestCaseResponse_descriptor =
getDescriptor().getMessageTypes().get(17);
internal_static_google_cloud_dialogflow_cx_v3beta1_RunTestCaseResponse_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_dialogflow_cx_v3beta1_RunTestCaseResponse_descriptor,
new java.lang.String[] {
"Result",
});
internal_static_google_cloud_dialogflow_cx_v3beta1_RunTestCaseMetadata_descriptor =
getDescriptor().getMessageTypes().get(18);
internal_static_google_cloud_dialogflow_cx_v3beta1_RunTestCaseMetadata_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_dialogflow_cx_v3beta1_RunTestCaseMetadata_descriptor,
new java.lang.String[] {});
internal_static_google_cloud_dialogflow_cx_v3beta1_BatchRunTestCasesRequest_descriptor =
getDescriptor().getMessageTypes().get(19);
internal_static_google_cloud_dialogflow_cx_v3beta1_BatchRunTestCasesRequest_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_dialogflow_cx_v3beta1_BatchRunTestCasesRequest_descriptor,
new java.lang.String[] {
"Parent", "Environment", "TestCases",
});
internal_static_google_cloud_dialogflow_cx_v3beta1_BatchRunTestCasesResponse_descriptor =
getDescriptor().getMessageTypes().get(20);
internal_static_google_cloud_dialogflow_cx_v3beta1_BatchRunTestCasesResponse_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_dialogflow_cx_v3beta1_BatchRunTestCasesResponse_descriptor,
new java.lang.String[] {
"Results",
});
internal_static_google_cloud_dialogflow_cx_v3beta1_BatchRunTestCasesMetadata_descriptor =
getDescriptor().getMessageTypes().get(21);
internal_static_google_cloud_dialogflow_cx_v3beta1_BatchRunTestCasesMetadata_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_dialogflow_cx_v3beta1_BatchRunTestCasesMetadata_descriptor,
new java.lang.String[] {
"Errors",
});
internal_static_google_cloud_dialogflow_cx_v3beta1_TestError_descriptor =
getDescriptor().getMessageTypes().get(22);
internal_static_google_cloud_dialogflow_cx_v3beta1_TestError_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_dialogflow_cx_v3beta1_TestError_descriptor,
new java.lang.String[] {
"TestCase", "Status", "TestTime",
});
internal_static_google_cloud_dialogflow_cx_v3beta1_ImportTestCasesRequest_descriptor =
getDescriptor().getMessageTypes().get(23);
internal_static_google_cloud_dialogflow_cx_v3beta1_ImportTestCasesRequest_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_dialogflow_cx_v3beta1_ImportTestCasesRequest_descriptor,
new java.lang.String[] {
"Parent", "GcsUri", "Content", "Source",
});
internal_static_google_cloud_dialogflow_cx_v3beta1_ImportTestCasesResponse_descriptor =
getDescriptor().getMessageTypes().get(24);
internal_static_google_cloud_dialogflow_cx_v3beta1_ImportTestCasesResponse_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_dialogflow_cx_v3beta1_ImportTestCasesResponse_descriptor,
new java.lang.String[] {
"Names",
});
internal_static_google_cloud_dialogflow_cx_v3beta1_ImportTestCasesMetadata_descriptor =
getDescriptor().getMessageTypes().get(25);
internal_static_google_cloud_dialogflow_cx_v3beta1_ImportTestCasesMetadata_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_dialogflow_cx_v3beta1_ImportTestCasesMetadata_descriptor,
new java.lang.String[] {
"Errors",
});
internal_static_google_cloud_dialogflow_cx_v3beta1_TestCaseError_descriptor =
getDescriptor().getMessageTypes().get(26);
internal_static_google_cloud_dialogflow_cx_v3beta1_TestCaseError_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_dialogflow_cx_v3beta1_TestCaseError_descriptor,
new java.lang.String[] {
"TestCase", "Status",
});
internal_static_google_cloud_dialogflow_cx_v3beta1_ExportTestCasesRequest_descriptor =
getDescriptor().getMessageTypes().get(27);
internal_static_google_cloud_dialogflow_cx_v3beta1_ExportTestCasesRequest_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_dialogflow_cx_v3beta1_ExportTestCasesRequest_descriptor,
new java.lang.String[] {
"Parent", "GcsUri", "DataFormat", "Filter", "Destination",
});
internal_static_google_cloud_dialogflow_cx_v3beta1_ExportTestCasesResponse_descriptor =
getDescriptor().getMessageTypes().get(28);
internal_static_google_cloud_dialogflow_cx_v3beta1_ExportTestCasesResponse_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_dialogflow_cx_v3beta1_ExportTestCasesResponse_descriptor,
new java.lang.String[] {
"GcsUri", "Content", "Destination",
});
internal_static_google_cloud_dialogflow_cx_v3beta1_ExportTestCasesMetadata_descriptor =
getDescriptor().getMessageTypes().get(29);
internal_static_google_cloud_dialogflow_cx_v3beta1_ExportTestCasesMetadata_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_dialogflow_cx_v3beta1_ExportTestCasesMetadata_descriptor,
new java.lang.String[] {});
internal_static_google_cloud_dialogflow_cx_v3beta1_ListTestCaseResultsRequest_descriptor =
getDescriptor().getMessageTypes().get(30);
internal_static_google_cloud_dialogflow_cx_v3beta1_ListTestCaseResultsRequest_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_dialogflow_cx_v3beta1_ListTestCaseResultsRequest_descriptor,
new java.lang.String[] {
"Parent", "PageSize", "PageToken", "Filter",
});
internal_static_google_cloud_dialogflow_cx_v3beta1_ListTestCaseResultsResponse_descriptor =
getDescriptor().getMessageTypes().get(31);
internal_static_google_cloud_dialogflow_cx_v3beta1_ListTestCaseResultsResponse_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_dialogflow_cx_v3beta1_ListTestCaseResultsResponse_descriptor,
new java.lang.String[] {
"TestCaseResults", "NextPageToken",
});
internal_static_google_cloud_dialogflow_cx_v3beta1_GetTestCaseResultRequest_descriptor =
getDescriptor().getMessageTypes().get(32);
internal_static_google_cloud_dialogflow_cx_v3beta1_GetTestCaseResultRequest_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_dialogflow_cx_v3beta1_GetTestCaseResultRequest_descriptor,
new java.lang.String[] {
"Name",
});
com.google.protobuf.ExtensionRegistry registry =
com.google.protobuf.ExtensionRegistry.newInstance();
registry.add(com.google.api.ClientProto.defaultHost);
registry.add(com.google.api.FieldBehaviorProto.fieldBehavior);
registry.add(com.google.api.AnnotationsProto.http);
registry.add(com.google.api.ClientProto.methodSignature);
registry.add(com.google.api.ClientProto.oauthScopes);
registry.add(com.google.api.ResourceProto.resource);
registry.add(com.google.api.ResourceProto.resourceReference);
registry.add(com.google.longrunning.OperationsProto.operationInfo);
com.google.protobuf.Descriptors.FileDescriptor.internalUpdateFileDescriptor(
descriptor, registry);
com.google.api.AnnotationsProto.getDescriptor();
com.google.api.ClientProto.getDescriptor();
com.google.api.FieldBehaviorProto.getDescriptor();
com.google.api.ResourceProto.getDescriptor();
com.google.cloud.dialogflow.cx.v3beta1.FlowProto.getDescriptor();
com.google.cloud.dialogflow.cx.v3beta1.IntentProto.getDescriptor();
com.google.cloud.dialogflow.cx.v3beta1.PageProto.getDescriptor();
com.google.cloud.dialogflow.cx.v3beta1.ResponseMessageProto.getDescriptor();
com.google.cloud.dialogflow.cx.v3beta1.SessionProto.getDescriptor();
com.google.cloud.dialogflow.cx.v3beta1.TransitionRouteGroupProto.getDescriptor();
com.google.longrunning.OperationsProto.getDescriptor();
com.google.protobuf.EmptyProto.getDescriptor();
com.google.protobuf.FieldMaskProto.getDescriptor();
com.google.protobuf.StructProto.getDescriptor();
com.google.protobuf.TimestampProto.getDescriptor();
com.google.rpc.StatusProto.getDescriptor();
}
// @@protoc_insertion_point(outer_class_scope)
}
| |
package ar.com.larreta.reports;
import java.io.ByteArrayOutputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashMap;
import java.util.Map;
import org.slf4j.Logger;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.core.io.Resource;
import ar.com.larreta.annotations.Log;
import net.sf.jasperreports.engine.DefaultJasperReportsContext;
import net.sf.jasperreports.engine.JRAbstractExporter;
import net.sf.jasperreports.engine.JRDataSource;
import net.sf.jasperreports.engine.JRException;
import net.sf.jasperreports.engine.JRExporterParameter;
import net.sf.jasperreports.engine.JRParameter;
import net.sf.jasperreports.engine.JasperCompileManager;
import net.sf.jasperreports.engine.JasperFillManager;
import net.sf.jasperreports.engine.JasperPrint;
import net.sf.jasperreports.engine.JasperReport;
import net.sf.jasperreports.engine.JasperReportsContext;
import net.sf.jasperreports.engine.data.JRBeanCollectionDataSource;
import net.sf.jasperreports.engine.fill.JRFileVirtualizer;
import net.sf.jasperreports.engine.util.JRLoader;
import net.sf.jasperreports.repo.RepositoryUtil;
public abstract class Report {
public static final String BINARY_CONTENT_TYPE = "application/octet-stream";
private static @Log Logger LOG;
@Value("${report.virtualizer.size}")
private String virtualizerSize;
@Value("${report.virtualizer.directory}")
private String virtualizerDirectory;
protected Map<String, Object> parameters = new HashMap<String, Object>();
protected JRAbstractExporter exporter;
protected abstract Class getExporterType();
public Report(){
try {
Class exporterType = getExporterType();
if (exporterType!=null){
exporter = (JRAbstractExporter) exporterType.newInstance();
}
} catch (Exception e){
LOG.error("Ocurrio un error generando reporte", e);
//FIXME: Lanzar excepcion
}
}
public JasperReport getJasperReport(String reportTemplatePath) throws FileNotFoundException, JRException {
return (JasperReport) JRLoader.loadObject(getClass().getClassLoader().getResourceAsStream(reportTemplatePath));
//return RepositoryUtil.getReport(reportTemplatePath);
}
public JasperReport getJasperReport(Resource resource) throws JRException, IOException {
return JasperCompileManager.compileReport(resource.getInputStream());
//return (JasperReport) JRLoader.loadObject(resource.getInputStream());
}
/**
* Agrega un parametro que sera tenido en cuenta a la hora de construir el reporte
* @param key
* @param value
*/
public void addParameter(String key, Object value){
parameters.put(key, value);
}
/**
* Se obtiene el objeto print que permite imprimir el reporte en diferentes formatos
* @param dataSource
* @return
* @throws IOException
*/
protected JasperPrint getPrint(String reportTemplatePath, JRDataSource dataSource) throws IOException {
JasperPrint print = null;
try {
JasperReport reporte = getJasperReport(reportTemplatePath);
// creating the virtualizer
JRFileVirtualizer virtualizer = new JRFileVirtualizer(new Integer(virtualizerSize), virtualizerDirectory);
// Pass virtualizer object throw parameter map
parameters.put(JRParameter.REPORT_VIRTUALIZER, virtualizer);
print = JasperFillManager.fillReport(reporte, parameters, dataSource);
} catch (JRException e) {
LOG.error("Ocurrio un error generando reporte", e);
//FIXME: Lanzar excepcion
}
return print;
}
protected JasperPrint getPrint(Resource resource, JRDataSource dataSource) throws IOException {
JasperPrint print = null;
try {
JasperReport reporte = getJasperReport(resource);
// creating the virtualizer
JRFileVirtualizer virtualizer = new JRFileVirtualizer(new Integer(virtualizerSize), virtualizerDirectory);
// Pass virtualizer object throw parameter map
parameters.put(JRParameter.REPORT_VIRTUALIZER, virtualizer);
print = JasperFillManager.fillReport(reporte, parameters, dataSource);
} catch (JRException e) {
LOG.error("Ocurrio un error generando reporte", e);
//FIXME: Lanzar excepcion
}
return print;
}
protected JasperPrint getPrint(Resource resource) throws IOException {
JasperPrint print = null;
try {
JasperReport reporte = getJasperReport(resource);
// creating the virtualizer
JRFileVirtualizer virtualizer = new JRFileVirtualizer(new Integer(virtualizerSize), virtualizerDirectory);
// Pass virtualizer object throw parameter map
parameters.put(JRParameter.REPORT_VIRTUALIZER, virtualizer);
print = JasperFillManager.fillReport(reporte, parameters);
} catch (JRException e) {
LOG.error("Ocurrio un error generando reporte", e);
//FIXME: Lanzar excepcion
}
return print;
}
/**
* Obtiene un stream de salida con formato PDF
* @param dataSource
* @return
* @throws IOException
*/
public ByteArrayOutputStream getOutputStream(String reportTemplatePath, JRDataSource dataSource) throws IOException{
JasperPrint print = getPrint(reportTemplatePath, dataSource);
ByteArrayOutputStream baos = new ByteArrayOutputStream();
try {
setStandardExporterParams(print, baos);
setSpecificExporterParams();
exporter.exportReport();
} catch (JRException e) {
LOG.error("Ocurrio un error generando reporte", e);
//FIXME: Lanzar excepcion
}
return baos;
}
public ByteArrayOutputStream getOutputStream(Resource resource, JRDataSource dataSource) throws IOException{
JasperPrint print = getPrint(resource, dataSource);
ByteArrayOutputStream baos = new ByteArrayOutputStream();
try {
setStandardExporterParams(print, baos);
setSpecificExporterParams();
exporter.exportReport();
} catch (JRException e) {
LOG.error("Ocurrio un error generando reporte", e);
//FIXME: Lanzar excepcion
}
return baos;
}
protected void setStandardExporterParams(JasperPrint print,
ByteArrayOutputStream baos) {
exporter.setParameter(JRExporterParameter.JASPER_PRINT, print);
exporter.setParameter(JRExporterParameter.OUTPUT_STREAM, baos);
}
public ByteArrayOutputStream getOutputStream(String reportTemplatePath, Collection dataSource)throws IOException{
return getOutputStream(reportTemplatePath, new JRBeanCollectionDataSource(dataSource));
}
public ByteArrayOutputStream getOutputStream(Resource resource, Collection dataSource)throws IOException{
return getOutputStream(resource, new JRBeanCollectionDataSource(dataSource));
}
public ByteArrayOutputStream getOutputStream(String reportTemplatePath)throws IOException{
return getOutputStream(reportTemplatePath, Arrays.asList(1));
}
public ByteArrayOutputStream getOutputStream(Resource resource)throws IOException{
return getOutputStream(resource, Arrays.asList(1));
}
public ByteArrayOutputStream getOutputStream()throws IOException{
throw new IOException("Not implemented method.");
}
public abstract String getContentType();
protected abstract void setSpecificExporterParams();
}
| |
/*
* Copyright 2017-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.facebook.buck.rules.keys;
import static org.junit.Assert.assertEquals;
import com.facebook.buck.io.ArchiveMemberPath;
import com.facebook.buck.model.BuildTarget;
import com.facebook.buck.model.BuildTargetFactory;
import com.facebook.buck.rules.BuildRuleType;
import com.facebook.buck.rules.DefaultBuildTargetSourcePath;
import com.facebook.buck.rules.RuleKey;
import com.facebook.buck.rules.SourceRoot;
import com.facebook.buck.util.sha1.Sha1HashCode;
import com.google.common.hash.HashCode;
import com.google.common.hash.Hashing;
import org.junit.Test;
import java.nio.file.Paths;
import java.util.regex.Pattern;
public class CountingRuleKeyHasherTest {
private static final RuleKey RULE_KEY_1 = new RuleKey("a002b39af204cdfaa5fdb67816b13867c32ac52c");
private static final RuleKey RULE_KEY_2 = new RuleKey("b67816b13867c32ac52ca002b39af204cdfaa5fd");
private static final BuildTarget TARGET_1 =
BuildTargetFactory.newInstance(Paths.get("/root"), "//example/base:one");
private static final BuildTarget TARGET_2 =
BuildTargetFactory.newInstance(Paths.get("/root"), "//example/base:one#flavor");
@Test
public void testForwarding() {
assertEquals(
newGuavaHasher().hash(),
newCountHasher().hash());
assertEquals(
newGuavaHasher().putKey("").hash(),
newCountHasher().putKey("").hash());
assertEquals(
newGuavaHasher().putKey("42").hash(),
newCountHasher().putKey("42").hash());
assertEquals(
newGuavaHasher().putKey("4").putKey("2").hash(),
newCountHasher().putKey("4").putKey("2").hash());
assertEquals(
newGuavaHasher().putNull().hash(),
newCountHasher().putNull().hash());
assertEquals(
newGuavaHasher().putBoolean(true).hash(),
newCountHasher().putBoolean(true).hash());
assertEquals(
newGuavaHasher().putBoolean(false).hash(),
newCountHasher().putBoolean(false).hash());
assertEquals(
newGuavaHasher().putNumber(0).hash(),
newCountHasher().putNumber(0).hash());
assertEquals(
newGuavaHasher().putNumber(42).hash(),
newCountHasher().putNumber(42).hash());
assertEquals(
newGuavaHasher().putNumber((long) 0).hash(),
newCountHasher().putNumber((long) 0).hash());
assertEquals(
newGuavaHasher().putNumber((long) 42).hash(),
newCountHasher().putNumber((long) 42).hash());
assertEquals(
newGuavaHasher().putNumber((short) 0).hash(),
newCountHasher().putNumber((short) 0).hash());
assertEquals(
newGuavaHasher().putNumber((short) 42).hash(),
newCountHasher().putNumber((short) 42).hash());
assertEquals(
newGuavaHasher().putNumber((byte) 0).hash(),
newCountHasher().putNumber((byte) 0).hash());
assertEquals(
newGuavaHasher().putNumber((byte) 42).hash(),
newCountHasher().putNumber((byte) 42).hash());
assertEquals(
newGuavaHasher().putNumber((float) 0).hash(),
newCountHasher().putNumber((float) 0).hash());
assertEquals(
newGuavaHasher().putNumber((float) 42).hash(),
newCountHasher().putNumber((float) 42).hash());
assertEquals(
newGuavaHasher().putNumber((double) 0).hash(),
newCountHasher().putNumber((double) 0).hash());
assertEquals(
newGuavaHasher().putNumber((double) 42).hash(),
newCountHasher().putNumber((double) 42).hash());
assertEquals(
newGuavaHasher().putString("").hash(),
newCountHasher().putString("").hash());
assertEquals(
newGuavaHasher().putString("42").hash(),
newCountHasher().putString("42").hash());
assertEquals(
newGuavaHasher().putString("4").putString("2").hash(),
newCountHasher().putString("4").putString("2").hash());
assertEquals(
newGuavaHasher().putBytes(new byte[0]).hash(),
newCountHasher().putBytes(new byte[0]).hash());
assertEquals(
newGuavaHasher().putBytes(new byte[] {42}).hash(),
newCountHasher().putBytes(new byte[] {42}).hash());
assertEquals(
newGuavaHasher().putBytes(new byte[] {42, 42}).hash(),
newCountHasher().putBytes(new byte[] {42, 42}).hash());
assertEquals(
newGuavaHasher().putPattern(Pattern.compile("")).hash(),
newCountHasher().putPattern(Pattern.compile("")).hash());
assertEquals(
newGuavaHasher().putPattern(Pattern.compile("42")).hash(),
newCountHasher().putPattern(Pattern.compile("42")).hash());
assertEquals(
newGuavaHasher().putPattern(Pattern.compile("4")).putPattern(Pattern.compile("2")).hash(),
newCountHasher().putPattern(Pattern.compile("4")).putPattern(Pattern.compile("2")).hash());
assertEquals(
newGuavaHasher()
.putSha1(Sha1HashCode.of("a002b39af204cdfaa5fdb67816b13867c32ac52c")).hash(),
newCountHasher()
.putSha1(Sha1HashCode.of("a002b39af204cdfaa5fdb67816b13867c32ac52c")).hash());
assertEquals(
newGuavaHasher()
.putSha1(Sha1HashCode.of("b67816b13867c32ac52ca002b39af204cdfaa5fd")).hash(),
newCountHasher()
.putSha1(Sha1HashCode.of("b67816b13867c32ac52ca002b39af204cdfaa5fd")).hash());
assertEquals(
newGuavaHasher().putPath(Paths.get(""), HashCode.fromInt(0)).hash(),
newCountHasher().putPath(Paths.get(""), HashCode.fromInt(0)).hash());
assertEquals(
newGuavaHasher().putPath(Paths.get(""), HashCode.fromInt(42)).hash(),
newCountHasher().putPath(Paths.get(""), HashCode.fromInt(42)).hash());
assertEquals(
newGuavaHasher().putPath(Paths.get("42"), HashCode.fromInt(0)).hash(),
newCountHasher().putPath(Paths.get("42"), HashCode.fromInt(0)).hash());
assertEquals(
newGuavaHasher().putPath(Paths.get("42"), HashCode.fromInt(42)).hash(),
newCountHasher().putPath(Paths.get("42"), HashCode.fromInt(42)).hash());
assertEquals(
newGuavaHasher().putPath(Paths.get("42/42"), HashCode.fromInt(42)).hash(),
newCountHasher().putPath(Paths.get("42/42"), HashCode.fromInt(42)).hash());
assertEquals(
newGuavaHasher().putArchiveMemberPath(
newArchiveMember("", ""), HashCode.fromInt(0)).hash(),
newCountHasher().putArchiveMemberPath(
newArchiveMember("", ""), HashCode.fromInt(0)).hash());
assertEquals(
newGuavaHasher().putArchiveMemberPath(
newArchiveMember("", ""), HashCode.fromInt(42)).hash(),
newCountHasher().putArchiveMemberPath(
newArchiveMember("", ""), HashCode.fromInt(42)).hash());
assertEquals(
newGuavaHasher().putArchiveMemberPath(
newArchiveMember("42", "42"), HashCode.fromInt(0)).hash(),
newCountHasher().putArchiveMemberPath(
newArchiveMember("42", "42"), HashCode.fromInt(0)).hash());
assertEquals(
newGuavaHasher().putArchiveMemberPath(
newArchiveMember("42", "42"), HashCode.fromInt(42)).hash(),
newCountHasher().putArchiveMemberPath(
newArchiveMember("42", "42"), HashCode.fromInt(42)).hash());
assertEquals(
newGuavaHasher().putArchiveMemberPath(
newArchiveMember("42/42", "42/42"), HashCode.fromInt(42)).hash(),
newCountHasher().putArchiveMemberPath(
newArchiveMember("42/42", "42/42"), HashCode.fromInt(42)).hash());
assertEquals(
newGuavaHasher().putNonHashingPath("").hash(),
newCountHasher().putNonHashingPath("").hash());
assertEquals(
newGuavaHasher().putNonHashingPath("42").hash(),
newCountHasher().putNonHashingPath("42").hash());
assertEquals(
newGuavaHasher().putNonHashingPath("4").putNonHashingPath("2").hash(),
newCountHasher().putNonHashingPath("4").putNonHashingPath("2").hash());
assertEquals(
newGuavaHasher().putSourceRoot(new SourceRoot("")).hash(),
newCountHasher().putSourceRoot(new SourceRoot("")).hash());
assertEquals(
newGuavaHasher().putSourceRoot(new SourceRoot("42")).hash(),
newCountHasher().putSourceRoot(new SourceRoot("42")).hash());
assertEquals(
newGuavaHasher()
.putSourceRoot(new SourceRoot("4")).putSourceRoot(new SourceRoot("2")).hash(),
newCountHasher()
.putSourceRoot(new SourceRoot("4")).putSourceRoot(new SourceRoot("2")).hash());
assertEquals(
newGuavaHasher().putRuleKey(RULE_KEY_1).hash(),
newCountHasher().putRuleKey(RULE_KEY_1).hash());
assertEquals(
newGuavaHasher().putRuleKey(RULE_KEY_2).hash(),
newCountHasher().putRuleKey(RULE_KEY_2).hash());
assertEquals(
newGuavaHasher().putBuildRuleType(BuildRuleType.of("")).hash(),
newCountHasher().putBuildRuleType(BuildRuleType.of("")).hash());
assertEquals(
newGuavaHasher().putBuildRuleType(BuildRuleType.of("42")).hash(),
newCountHasher().putBuildRuleType(BuildRuleType.of("42")).hash());
assertEquals(
newGuavaHasher().putBuildRuleType(BuildRuleType.of("4"))
.putBuildRuleType(BuildRuleType.of("2")).hash(),
newCountHasher().putBuildRuleType(BuildRuleType.of("4"))
.putBuildRuleType(BuildRuleType.of("2")).hash());
assertEquals(
newGuavaHasher().putBuildTarget(TARGET_1).hash(),
newCountHasher().putBuildTarget(TARGET_1).hash());
assertEquals(
newGuavaHasher().putBuildTarget(TARGET_2).hash(),
newCountHasher().putBuildTarget(TARGET_2).hash());
assertEquals(
newGuavaHasher().putBuildTargetSourcePath(new DefaultBuildTargetSourcePath(TARGET_1))
.hash(),
newCountHasher().putBuildTargetSourcePath(new DefaultBuildTargetSourcePath(TARGET_1))
.hash());
assertEquals(
newGuavaHasher().putBuildTargetSourcePath(new DefaultBuildTargetSourcePath(TARGET_2))
.hash(),
newCountHasher().putBuildTargetSourcePath(new DefaultBuildTargetSourcePath(TARGET_2))
.hash());
assertEquals(
newGuavaHasher().putContainer(RuleKeyHasher.Container.LIST, 0).hash(),
newCountHasher().putContainer(RuleKeyHasher.Container.LIST, 0).hash());
assertEquals(
newGuavaHasher().putContainer(RuleKeyHasher.Container.LIST, 42).hash(),
newCountHasher().putContainer(RuleKeyHasher.Container.LIST, 42).hash());
assertEquals(
newGuavaHasher().putContainer(RuleKeyHasher.Container.MAP, 0).hash(),
newCountHasher().putContainer(RuleKeyHasher.Container.MAP, 0).hash());
assertEquals(
newGuavaHasher().putContainer(RuleKeyHasher.Container.MAP, 42).hash(),
newCountHasher().putContainer(RuleKeyHasher.Container.MAP, 42).hash());
assertEquals(
newGuavaHasher().putWrapper(RuleKeyHasher.Wrapper.SUPPLIER).hash(),
newCountHasher().putWrapper(RuleKeyHasher.Wrapper.SUPPLIER).hash());
assertEquals(
newGuavaHasher().putWrapper(RuleKeyHasher.Wrapper.OPTIONAL).hash(),
newCountHasher().putWrapper(RuleKeyHasher.Wrapper.OPTIONAL).hash());
assertEquals(
newGuavaHasher().putWrapper(RuleKeyHasher.Wrapper.EITHER_LEFT).hash(),
newCountHasher().putWrapper(RuleKeyHasher.Wrapper.EITHER_LEFT).hash());
assertEquals(
newGuavaHasher().putWrapper(RuleKeyHasher.Wrapper.EITHER_RIGHT).hash(),
newCountHasher().putWrapper(RuleKeyHasher.Wrapper.EITHER_RIGHT).hash());
assertEquals(
newGuavaHasher().putWrapper(RuleKeyHasher.Wrapper.BUILD_RULE).hash(),
newCountHasher().putWrapper(RuleKeyHasher.Wrapper.BUILD_RULE).hash());
assertEquals(
newGuavaHasher().putWrapper(RuleKeyHasher.Wrapper.APPENDABLE).hash(),
newCountHasher().putWrapper(RuleKeyHasher.Wrapper.APPENDABLE).hash());
}
@Test
public void testCounting() {
CountingRuleKeyHasher<HashCode> hasher = newCountHasher();
int count = 0;
assertEquals(count, hasher.getCount());
hasher.putKey("");
assertEquals(++count, hasher.getCount());
hasher.putKey("42").putKey("43").putKey("44");
assertEquals(count += 3, hasher.getCount());
hasher.putNull();
assertEquals(++count, hasher.getCount());
hasher.putBoolean(true);
assertEquals(++count, hasher.getCount());
hasher.putBoolean(false).putBoolean(true);
assertEquals(count += 2, hasher.getCount());
hasher.putNumber(0);
assertEquals(++count, hasher.getCount());
hasher.putNumber(42).putNumber(43);
assertEquals(count += 2, hasher.getCount());
hasher.putNumber((long) 0);
assertEquals(++count, hasher.getCount());
hasher.putNumber((long) 42).putNumber((long) 43);
assertEquals(count += 2, hasher.getCount());
hasher.putNumber((short) 0);
assertEquals(++count, hasher.getCount());
hasher.putNumber((short) 42).putNumber((short) 43);
assertEquals(count += 2, hasher.getCount());
hasher.putNumber((byte) 0);
assertEquals(++count, hasher.getCount());
hasher.putNumber((byte) 42).putNumber((byte) 43);
assertEquals(count += 2, hasher.getCount());
hasher.putNumber((float) 0);
assertEquals(++count, hasher.getCount());
hasher.putNumber((float) 42).putNumber((float) 43);
assertEquals(count += 2, hasher.getCount());
hasher.putNumber((double) 0);
assertEquals(++count, hasher.getCount());
hasher.putNumber((double) 42).putNumber((double) 43);
assertEquals(count += 2, hasher.getCount());
hasher.putString("");
assertEquals(++count, hasher.getCount());
hasher.putString("42").putString("43");
assertEquals(count += 2, hasher.getCount());
hasher.putBytes(new byte[0]);
assertEquals(++count, hasher.getCount());
hasher.putBytes(new byte[] {42});
assertEquals(++count, hasher.getCount());
hasher.putBytes(new byte[] {42, 42}).putBytes(new byte[] {43});
assertEquals(count += 2, hasher.getCount());
hasher.putPattern(Pattern.compile(""));
assertEquals(++count, hasher.getCount());
hasher.putPattern(Pattern.compile("42")).putPattern(Pattern.compile("43"));
assertEquals(count += 2, hasher.getCount());
hasher.putSha1(Sha1HashCode.of("a002b39af204cdfaa5fdb67816b13867c32ac52c"));
assertEquals(++count, hasher.getCount());
hasher.putSha1(Sha1HashCode.of("b67816b13867c32ac52ca002b39af204cdfaa5fd"))
.putSha1(Sha1HashCode.of("a002b39af204cdfaa5fdb67816b13867c32ac52c"));
assertEquals(count += 2, hasher.getCount());
hasher.putPath(Paths.get(""), HashCode.fromInt(0));
assertEquals(++count, hasher.getCount());
hasher.putPath(Paths.get(""), HashCode.fromInt(42));
assertEquals(++count, hasher.getCount());
hasher.putPath(Paths.get("42"), HashCode.fromInt(0));
assertEquals(++count, hasher.getCount());
hasher.putPath(Paths.get("42"), HashCode.fromInt(42));
assertEquals(++count, hasher.getCount());
hasher
.putPath(Paths.get("42/42"), HashCode.fromInt(42))
.putPath(Paths.get("43"), HashCode.fromInt(43));
assertEquals(count += 2, hasher.getCount());
hasher.putArchiveMemberPath(newArchiveMember("", ""), HashCode.fromInt(0));
assertEquals(++count, hasher.getCount());
hasher.putArchiveMemberPath(newArchiveMember("", ""), HashCode.fromInt(42));
assertEquals(++count, hasher.getCount());
hasher.putArchiveMemberPath(newArchiveMember("42", "42"), HashCode.fromInt(0));
assertEquals(++count, hasher.getCount());
hasher.putArchiveMemberPath(newArchiveMember("42", "42"), HashCode.fromInt(42));
assertEquals(++count, hasher.getCount());
hasher.putArchiveMemberPath(newArchiveMember("42/42", "42/42"), HashCode.fromInt(42))
.putArchiveMemberPath(newArchiveMember("43/43", "43/43"), HashCode.fromInt(43));
assertEquals(count += 2, hasher.getCount());
hasher.putNonHashingPath("");
assertEquals(++count, hasher.getCount());
hasher.putNonHashingPath("42").putNonHashingPath("43");
assertEquals(count += 2, hasher.getCount());
hasher.putSourceRoot(new SourceRoot(""));
assertEquals(++count, hasher.getCount());
hasher.putSourceRoot(new SourceRoot("42")).putSourceRoot(new SourceRoot("43"));
assertEquals(count += 2, hasher.getCount());
hasher.putRuleKey(RULE_KEY_1);
assertEquals(++count, hasher.getCount());
hasher.putRuleKey(RULE_KEY_2).putRuleKey(RULE_KEY_1);
assertEquals(count += 2, hasher.getCount());
hasher.putBuildRuleType(BuildRuleType.of(""));
assertEquals(++count, hasher.getCount());
hasher.putBuildRuleType(BuildRuleType.of("42")).putBuildRuleType(BuildRuleType.of("43"));
assertEquals(count += 2, hasher.getCount());
hasher.putBuildTarget(TARGET_1);
assertEquals(++count, hasher.getCount());
hasher.putBuildTarget(TARGET_2).putBuildTarget(TARGET_1);
assertEquals(count += 2, hasher.getCount());
hasher.putBuildTargetSourcePath(new DefaultBuildTargetSourcePath(TARGET_1));
assertEquals(++count, hasher.getCount());
hasher.putBuildTargetSourcePath(new DefaultBuildTargetSourcePath(TARGET_2))
.putBuildTargetSourcePath(new DefaultBuildTargetSourcePath(TARGET_1));
assertEquals(count += 2, hasher.getCount());
hasher.putContainer(RuleKeyHasher.Container.LIST, 0);
assertEquals(++count, hasher.getCount());
hasher.putContainer(RuleKeyHasher.Container.LIST, 42)
.putContainer(RuleKeyHasher.Container.LIST, 43);
assertEquals(count += 2, hasher.getCount());
hasher.putContainer(RuleKeyHasher.Container.MAP, 0);
assertEquals(++count, hasher.getCount());
hasher.putContainer(RuleKeyHasher.Container.MAP, 42)
.putContainer(RuleKeyHasher.Container.MAP, 43);
assertEquals(count += 2, hasher.getCount());
hasher.putWrapper(RuleKeyHasher.Wrapper.SUPPLIER);
assertEquals(++count, hasher.getCount());
hasher.putWrapper(RuleKeyHasher.Wrapper.OPTIONAL);
assertEquals(++count, hasher.getCount());
hasher.putWrapper(RuleKeyHasher.Wrapper.EITHER_LEFT);
assertEquals(++count, hasher.getCount());
hasher.putWrapper(RuleKeyHasher.Wrapper.EITHER_RIGHT);
assertEquals(++count, hasher.getCount());
hasher.putWrapper(RuleKeyHasher.Wrapper.EITHER_RIGHT);
assertEquals(++count, hasher.getCount());
hasher.putWrapper(RuleKeyHasher.Wrapper.BUILD_RULE);
assertEquals(++count, hasher.getCount());
hasher.putWrapper(RuleKeyHasher.Wrapper.APPENDABLE);
assertEquals(++count, hasher.getCount());
hasher.putWrapper(RuleKeyHasher.Wrapper.SUPPLIER)
.putWrapper(RuleKeyHasher.Wrapper.OPTIONAL)
.putWrapper(RuleKeyHasher.Wrapper.EITHER_LEFT)
.putWrapper(RuleKeyHasher.Wrapper.EITHER_RIGHT)
.putWrapper(RuleKeyHasher.Wrapper.BUILD_RULE)
.putWrapper(RuleKeyHasher.Wrapper.APPENDABLE)
.putWrapper(RuleKeyHasher.Wrapper.OPTIONAL);
assertEquals(count += 7, hasher.getCount());
hasher.putKey("key")
.putContainer(RuleKeyHasher.Container.LIST, 3)
.putString("a").putNumber(1).putNull();
assertEquals(count += 5, hasher.getCount());
}
private ArchiveMemberPath newArchiveMember(String archivePath, String memberPath) {
return ArchiveMemberPath.of(Paths.get(archivePath), Paths.get(memberPath));
}
private CountingRuleKeyHasher<HashCode> newCountHasher() {
return new CountingRuleKeyHasher<>(newGuavaHasher());
}
private GuavaRuleKeyHasher newGuavaHasher() {
return new GuavaRuleKeyHasher(Hashing.sha1().newHasher());
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kafka.streams.processor.internals;
import org.apache.kafka.common.serialization.Serdes;
import org.apache.kafka.streams.StreamsConfig;
import org.apache.kafka.streams.kstream.Windowed;
import org.apache.kafka.streams.processor.Processor;
import org.apache.kafka.streams.processor.ProcessorContext;
import org.apache.kafka.streams.processor.StateStore;
import org.apache.kafka.streams.processor.TaskId;
import org.apache.kafka.streams.processor.internals.metrics.StreamsMetricsImpl;
import org.apache.kafka.streams.state.KeyValueIterator;
import org.apache.kafka.streams.state.KeyValueStore;
import org.apache.kafka.streams.state.SessionStore;
import org.apache.kafka.streams.state.TimestampedKeyValueStore;
import org.apache.kafka.streams.state.TimestampedWindowStore;
import org.apache.kafka.streams.state.ValueAndTimestamp;
import org.apache.kafka.streams.state.WindowStore;
import org.apache.kafka.streams.state.WindowStoreIterator;
import org.apache.kafka.streams.state.internals.ThreadCache;
import org.junit.Before;
import org.junit.Test;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.function.Consumer;
import static java.util.Arrays.asList;
import static org.easymock.EasyMock.anyLong;
import static org.easymock.EasyMock.anyObject;
import static org.easymock.EasyMock.anyString;
import static org.easymock.EasyMock.expect;
import static org.easymock.EasyMock.expectLastCall;
import static org.easymock.EasyMock.mock;
import static org.easymock.EasyMock.replay;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
public class ProcessorContextImplTest {
private ProcessorContextImpl context;
private static final String KEY = "key";
private static final long VALUE = 42L;
private static final ValueAndTimestamp<Long> VALUE_AND_TIMESTAMP = ValueAndTimestamp.make(42L, 21L);
private static final String STORE_NAME = "underlying-store";
private boolean flushExecuted;
private boolean putExecuted;
private boolean putWithTimestampExecuted;
private boolean putIfAbsentExecuted;
private boolean putAllExecuted;
private boolean deleteExecuted;
private boolean removeExecuted;
private KeyValueIterator<String, Long> rangeIter;
private KeyValueIterator<String, ValueAndTimestamp<Long>> timestampedRangeIter;
private KeyValueIterator<String, Long> allIter;
private KeyValueIterator<String, ValueAndTimestamp<Long>> timestampedAllIter;
private final List<KeyValueIterator<Windowed<String>, Long>> iters = new ArrayList<>(7);
private final List<KeyValueIterator<Windowed<String>, ValueAndTimestamp<Long>>> timestampedIters = new ArrayList<>(7);
private WindowStoreIterator windowStoreIter;
@Before
public void setup() {
flushExecuted = false;
putExecuted = false;
putIfAbsentExecuted = false;
putAllExecuted = false;
deleteExecuted = false;
removeExecuted = false;
rangeIter = mock(KeyValueIterator.class);
timestampedRangeIter = mock(KeyValueIterator.class);
allIter = mock(KeyValueIterator.class);
timestampedAllIter = mock(KeyValueIterator.class);
windowStoreIter = mock(WindowStoreIterator.class);
for (int i = 0; i < 7; i++) {
iters.add(i, mock(KeyValueIterator.class));
timestampedIters.add(i, mock(KeyValueIterator.class));
}
final StreamsConfig streamsConfig = mock(StreamsConfig.class);
expect(streamsConfig.getString(StreamsConfig.APPLICATION_ID_CONFIG)).andReturn("add-id");
expect(streamsConfig.defaultValueSerde()).andReturn(Serdes.ByteArray());
expect(streamsConfig.defaultKeySerde()).andReturn(Serdes.ByteArray());
replay(streamsConfig);
final ProcessorStateManager stateManager = mock(ProcessorStateManager.class);
expect(stateManager.getGlobalStore("GlobalKeyValueStore")).andReturn(keyValueStoreMock());
expect(stateManager.getGlobalStore("GlobalTimestampedKeyValueStore")).andReturn(timestampedKeyValueStoreMock());
expect(stateManager.getGlobalStore("GlobalWindowStore")).andReturn(windowStoreMock());
expect(stateManager.getGlobalStore("GlobalTimestampedWindowStore")).andReturn(timestampedWindowStoreMock());
expect(stateManager.getGlobalStore("GlobalSessionStore")).andReturn(sessionStoreMock());
expect(stateManager.getGlobalStore(anyString())).andReturn(null);
expect(stateManager.getStore("LocalKeyValueStore")).andReturn(keyValueStoreMock());
expect(stateManager.getStore("LocalTimestampedKeyValueStore")).andReturn(timestampedKeyValueStoreMock());
expect(stateManager.getStore("LocalWindowStore")).andReturn(windowStoreMock());
expect(stateManager.getStore("LocalTimestampedWindowStore")).andReturn(timestampedWindowStoreMock());
expect(stateManager.getStore("LocalSessionStore")).andReturn(sessionStoreMock());
replay(stateManager);
context = new ProcessorContextImpl(
mock(TaskId.class),
mock(StreamTask.class),
streamsConfig,
mock(RecordCollector.class),
stateManager,
mock(StreamsMetricsImpl.class),
mock(ThreadCache.class)
);
context.setCurrentNode(new ProcessorNode<String, Long>("fake", null,
new HashSet<>(asList(
"LocalKeyValueStore",
"LocalTimestampedKeyValueStore",
"LocalWindowStore",
"LocalTimestampedWindowStore",
"LocalSessionStore"))));
}
@Test
public void globalKeyValueStoreShouldBeReadOnly() {
doTest("GlobalKeyValueStore", (Consumer<KeyValueStore<String, Long>>) store -> {
verifyStoreCannotBeInitializedOrClosed(store);
checkThrowsUnsupportedOperation(store::flush, "flush()");
checkThrowsUnsupportedOperation(() -> store.put("1", 1L), "put()");
checkThrowsUnsupportedOperation(() -> store.putIfAbsent("1", 1L), "putIfAbsent()");
checkThrowsUnsupportedOperation(() -> store.putAll(Collections.emptyList()), "putAll()");
checkThrowsUnsupportedOperation(() -> store.delete("1"), "delete()");
assertEquals((Long) VALUE, store.get(KEY));
assertEquals(rangeIter, store.range("one", "two"));
assertEquals(allIter, store.all());
assertEquals(VALUE, store.approximateNumEntries());
});
}
@Test
public void globalTimestampedKeyValueStoreShouldBeReadOnly() {
doTest("GlobalTimestampedKeyValueStore", (Consumer<TimestampedKeyValueStore<String, Long>>) store -> {
verifyStoreCannotBeInitializedOrClosed(store);
checkThrowsUnsupportedOperation(store::flush, "flush()");
checkThrowsUnsupportedOperation(() -> store.put("1", ValueAndTimestamp.make(1L, 2L)), "put()");
checkThrowsUnsupportedOperation(() -> store.putIfAbsent("1", ValueAndTimestamp.make(1L, 2L)), "putIfAbsent()");
checkThrowsUnsupportedOperation(() -> store.putAll(Collections.emptyList()), "putAll()");
checkThrowsUnsupportedOperation(() -> store.delete("1"), "delete()");
assertEquals(VALUE_AND_TIMESTAMP, store.get(KEY));
assertEquals(timestampedRangeIter, store.range("one", "two"));
assertEquals(timestampedAllIter, store.all());
assertEquals(VALUE, store.approximateNumEntries());
});
}
@Test
@SuppressWarnings("deprecation")
public void globalWindowStoreShouldBeReadOnly() {
doTest("GlobalWindowStore", (Consumer<WindowStore<String, Long>>) store -> {
verifyStoreCannotBeInitializedOrClosed(store);
checkThrowsUnsupportedOperation(store::flush, "flush()");
checkThrowsUnsupportedOperation(() -> store.put("1", 1L, 1L), "put()");
checkThrowsUnsupportedOperation(() -> store.put("1", 1L), "put()");
assertEquals(iters.get(0), store.fetchAll(0L, 0L));
assertEquals(windowStoreIter, store.fetch(KEY, 0L, 1L));
assertEquals(iters.get(1), store.fetch(KEY, KEY, 0L, 1L));
assertEquals((Long) VALUE, store.fetch(KEY, 1L));
assertEquals(iters.get(2), store.all());
});
}
@Test
@SuppressWarnings("deprecation")
public void globalTimestampedWindowStoreShouldBeReadOnly() {
doTest("GlobalTimestampedWindowStore", (Consumer<TimestampedWindowStore<String, Long>>) store -> {
verifyStoreCannotBeInitializedOrClosed(store);
checkThrowsUnsupportedOperation(store::flush, "flush()");
checkThrowsUnsupportedOperation(() -> store.put("1", ValueAndTimestamp.make(1L, 1L), 1L), "put() [with timestamp]");
checkThrowsUnsupportedOperation(() -> store.put("1", ValueAndTimestamp.make(1L, 1L)), "put() [no timestamp]");
assertEquals(timestampedIters.get(0), store.fetchAll(0L, 0L));
assertEquals(windowStoreIter, store.fetch(KEY, 0L, 1L));
assertEquals(timestampedIters.get(1), store.fetch(KEY, KEY, 0L, 1L));
assertEquals(VALUE_AND_TIMESTAMP, store.fetch(KEY, 1L));
assertEquals(timestampedIters.get(2), store.all());
});
}
@Test
public void globalSessionStoreShouldBeReadOnly() {
doTest("GlobalSessionStore", (Consumer<SessionStore<String, Long>>) store -> {
verifyStoreCannotBeInitializedOrClosed(store);
checkThrowsUnsupportedOperation(store::flush, "flush()");
checkThrowsUnsupportedOperation(() -> store.remove(null), "remove()");
checkThrowsUnsupportedOperation(() -> store.put(null, null), "put()");
assertEquals(iters.get(3), store.findSessions(KEY, 1L, 2L));
assertEquals(iters.get(4), store.findSessions(KEY, KEY, 1L, 2L));
assertEquals(iters.get(5), store.fetch(KEY));
assertEquals(iters.get(6), store.fetch(KEY, KEY));
});
}
@Test
public void localKeyValueStoreShouldNotAllowInitOrClose() {
doTest("LocalKeyValueStore", (Consumer<KeyValueStore<String, Long>>) store -> {
verifyStoreCannotBeInitializedOrClosed(store);
store.flush();
assertTrue(flushExecuted);
store.put("1", 1L);
assertTrue(putExecuted);
store.putIfAbsent("1", 1L);
assertTrue(putIfAbsentExecuted);
store.putAll(Collections.emptyList());
assertTrue(putAllExecuted);
store.delete("1");
assertTrue(deleteExecuted);
assertEquals((Long) VALUE, store.get(KEY));
assertEquals(rangeIter, store.range("one", "two"));
assertEquals(allIter, store.all());
assertEquals(VALUE, store.approximateNumEntries());
});
}
@Test
public void localTimestampedKeyValueStoreShouldNotAllowInitOrClose() {
doTest("LocalTimestampedKeyValueStore", (Consumer<TimestampedKeyValueStore<String, Long>>) store -> {
verifyStoreCannotBeInitializedOrClosed(store);
store.flush();
assertTrue(flushExecuted);
store.put("1", ValueAndTimestamp.make(1L, 2L));
assertTrue(putExecuted);
store.putIfAbsent("1", ValueAndTimestamp.make(1L, 2L));
assertTrue(putIfAbsentExecuted);
store.putAll(Collections.emptyList());
assertTrue(putAllExecuted);
store.delete("1");
assertTrue(deleteExecuted);
assertEquals(VALUE_AND_TIMESTAMP, store.get(KEY));
assertEquals(timestampedRangeIter, store.range("one", "two"));
assertEquals(timestampedAllIter, store.all());
assertEquals(VALUE, store.approximateNumEntries());
});
}
@Test
@SuppressWarnings("deprecation")
public void localWindowStoreShouldNotAllowInitOrClose() {
doTest("LocalWindowStore", (Consumer<WindowStore<String, Long>>) store -> {
verifyStoreCannotBeInitializedOrClosed(store);
store.flush();
assertTrue(flushExecuted);
store.put("1", 1L);
assertTrue(putExecuted);
assertEquals(iters.get(0), store.fetchAll(0L, 0L));
assertEquals(windowStoreIter, store.fetch(KEY, 0L, 1L));
assertEquals(iters.get(1), store.fetch(KEY, KEY, 0L, 1L));
assertEquals((Long) VALUE, store.fetch(KEY, 1L));
assertEquals(iters.get(2), store.all());
});
}
@Test
@SuppressWarnings("deprecation")
public void localTimestampedWindowStoreShouldNotAllowInitOrClose() {
doTest("LocalTimestampedWindowStore", (Consumer<TimestampedWindowStore<String, Long>>) store -> {
verifyStoreCannotBeInitializedOrClosed(store);
store.flush();
assertTrue(flushExecuted);
store.put("1", ValueAndTimestamp.make(1L, 1L));
assertTrue(putExecuted);
store.put("1", ValueAndTimestamp.make(1L, 1L), 1L);
assertTrue(putWithTimestampExecuted);
assertEquals(timestampedIters.get(0), store.fetchAll(0L, 0L));
assertEquals(windowStoreIter, store.fetch(KEY, 0L, 1L));
assertEquals(timestampedIters.get(1), store.fetch(KEY, KEY, 0L, 1L));
assertEquals(VALUE_AND_TIMESTAMP, store.fetch(KEY, 1L));
assertEquals(timestampedIters.get(2), store.all());
});
}
@Test
public void localSessionStoreShouldNotAllowInitOrClose() {
doTest("LocalSessionStore", (Consumer<SessionStore<String, Long>>) store -> {
verifyStoreCannotBeInitializedOrClosed(store);
store.flush();
assertTrue(flushExecuted);
store.remove(null);
assertTrue(removeExecuted);
store.put(null, null);
assertTrue(putExecuted);
assertEquals(iters.get(3), store.findSessions(KEY, 1L, 2L));
assertEquals(iters.get(4), store.findSessions(KEY, KEY, 1L, 2L));
assertEquals(iters.get(5), store.fetch(KEY));
assertEquals(iters.get(6), store.fetch(KEY, KEY));
});
}
@SuppressWarnings("unchecked")
private KeyValueStore<String, Long> keyValueStoreMock() {
final KeyValueStore<String, Long> keyValueStoreMock = mock(KeyValueStore.class);
initStateStoreMock(keyValueStoreMock);
expect(keyValueStoreMock.get(KEY)).andReturn(VALUE);
expect(keyValueStoreMock.approximateNumEntries()).andReturn(VALUE);
expect(keyValueStoreMock.range("one", "two")).andReturn(rangeIter);
expect(keyValueStoreMock.all()).andReturn(allIter);
keyValueStoreMock.put(anyString(), anyLong());
expectLastCall().andAnswer(() -> {
putExecuted = true;
return null;
});
keyValueStoreMock.putIfAbsent(anyString(), anyLong());
expectLastCall().andAnswer(() -> {
putIfAbsentExecuted = true;
return null;
});
keyValueStoreMock.putAll(anyObject(List.class));
expectLastCall().andAnswer(() -> {
putAllExecuted = true;
return null;
});
keyValueStoreMock.delete(anyString());
expectLastCall().andAnswer(() -> {
deleteExecuted = true;
return null;
});
replay(keyValueStoreMock);
return keyValueStoreMock;
}
@SuppressWarnings("unchecked")
private TimestampedKeyValueStore<String, Long> timestampedKeyValueStoreMock() {
final TimestampedKeyValueStore<String, Long> timestampedKeyValueStoreMock = mock(TimestampedKeyValueStore.class);
initStateStoreMock(timestampedKeyValueStoreMock);
expect(timestampedKeyValueStoreMock.get(KEY)).andReturn(VALUE_AND_TIMESTAMP);
expect(timestampedKeyValueStoreMock.approximateNumEntries()).andReturn(VALUE);
expect(timestampedKeyValueStoreMock.range("one", "two")).andReturn(timestampedRangeIter);
expect(timestampedKeyValueStoreMock.all()).andReturn(timestampedAllIter);
timestampedKeyValueStoreMock.put(anyString(), anyObject(ValueAndTimestamp.class));
expectLastCall().andAnswer(() -> {
putExecuted = true;
return null;
});
timestampedKeyValueStoreMock.putIfAbsent(anyString(), anyObject(ValueAndTimestamp.class));
expectLastCall().andAnswer(() -> {
putIfAbsentExecuted = true;
return null;
});
timestampedKeyValueStoreMock.putAll(anyObject(List.class));
expectLastCall().andAnswer(() -> {
putAllExecuted = true;
return null;
});
timestampedKeyValueStoreMock.delete(anyString());
expectLastCall().andAnswer(() -> {
deleteExecuted = true;
return null;
});
replay(timestampedKeyValueStoreMock);
return timestampedKeyValueStoreMock;
}
@SuppressWarnings({"unchecked", "deprecation"})
private WindowStore<String, Long> windowStoreMock() {
final WindowStore<String, Long> windowStore = mock(WindowStore.class);
initStateStoreMock(windowStore);
expect(windowStore.fetchAll(anyLong(), anyLong())).andReturn(iters.get(0));
expect(windowStore.fetch(anyString(), anyString(), anyLong(), anyLong())).andReturn(iters.get(1));
expect(windowStore.fetch(anyString(), anyLong(), anyLong())).andReturn(windowStoreIter);
expect(windowStore.fetch(anyString(), anyLong())).andReturn(VALUE);
expect(windowStore.all()).andReturn(iters.get(2));
windowStore.put(anyString(), anyLong());
expectLastCall().andAnswer(() -> {
putExecuted = true;
return null;
});
replay(windowStore);
return windowStore;
}
@SuppressWarnings({"unchecked", "deprecation"})
private TimestampedWindowStore<String, Long> timestampedWindowStoreMock() {
final TimestampedWindowStore<String, Long> windowStore = mock(TimestampedWindowStore.class);
initStateStoreMock(windowStore);
expect(windowStore.fetchAll(anyLong(), anyLong())).andReturn(timestampedIters.get(0));
expect(windowStore.fetch(anyString(), anyString(), anyLong(), anyLong())).andReturn(timestampedIters.get(1));
expect(windowStore.fetch(anyString(), anyLong(), anyLong())).andReturn(windowStoreIter);
expect(windowStore.fetch(anyString(), anyLong())).andReturn(VALUE_AND_TIMESTAMP);
expect(windowStore.all()).andReturn(timestampedIters.get(2));
windowStore.put(anyString(), anyObject(ValueAndTimestamp.class));
expectLastCall().andAnswer(() -> {
putExecuted = true;
return null;
});
windowStore.put(anyString(), anyObject(ValueAndTimestamp.class), anyLong());
expectLastCall().andAnswer(() -> {
putWithTimestampExecuted = true;
return null;
});
replay(windowStore);
return windowStore;
}
@SuppressWarnings("unchecked")
private SessionStore<String, Long> sessionStoreMock() {
final SessionStore<String, Long> sessionStore = mock(SessionStore.class);
initStateStoreMock(sessionStore);
expect(sessionStore.findSessions(anyString(), anyLong(), anyLong())).andReturn(iters.get(3));
expect(sessionStore.findSessions(anyString(), anyString(), anyLong(), anyLong())).andReturn(iters.get(4));
expect(sessionStore.fetch(anyString())).andReturn(iters.get(5));
expect(sessionStore.fetch(anyString(), anyString())).andReturn(iters.get(6));
sessionStore.put(anyObject(Windowed.class), anyLong());
expectLastCall().andAnswer(() -> {
putExecuted = true;
return null;
});
sessionStore.remove(anyObject(Windowed.class));
expectLastCall().andAnswer(() -> {
removeExecuted = true;
return null;
});
replay(sessionStore);
return sessionStore;
}
private void initStateStoreMock(final StateStore stateStore) {
expect(stateStore.name()).andReturn(STORE_NAME);
expect(stateStore.persistent()).andReturn(true);
expect(stateStore.isOpen()).andReturn(true);
stateStore.flush();
expectLastCall().andAnswer(() -> {
flushExecuted = true;
return null;
});
}
private <T extends StateStore> void doTest(final String name, final Consumer<T> checker) {
final Processor processor = new Processor<String, Long>() {
@Override
@SuppressWarnings("unchecked")
public void init(final ProcessorContext context) {
final T store = (T) context.getStateStore(name);
checker.accept(store);
}
@Override
public void process(final String k, final Long v) {
//No-op.
}
@Override
public void close() {
//No-op.
}
};
processor.init(context);
}
private void verifyStoreCannotBeInitializedOrClosed(final StateStore store) {
assertEquals(STORE_NAME, store.name());
assertTrue(store.persistent());
assertTrue(store.isOpen());
checkThrowsUnsupportedOperation(() -> store.init(null, null), "init()");
checkThrowsUnsupportedOperation(store::close, "close()");
}
private void checkThrowsUnsupportedOperation(final Runnable check, final String name) {
try {
check.run();
fail(name + " should throw exception");
} catch (final UnsupportedOperationException e) {
//ignore.
}
}
}
| |
/***************************************************************************
* Copyright 2022 Kieker Project (http://kieker-monitoring.net)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
***************************************************************************/
package kieker.common.record.flow.trace.operation;
import java.nio.BufferOverflowException;
import kieker.common.exception.RecordInstantiationException;
import kieker.common.record.flow.trace.operation.AfterOperationEvent;
import kieker.common.record.io.IValueDeserializer;
import kieker.common.record.io.IValueSerializer;
import kieker.common.record.flow.IExceptionRecord;
/**
* @author Jan Waller
* API compatibility: Kieker 1.15.0
*
* @since 1.5
*/
public class AfterOperationFailedEvent extends AfterOperationEvent implements IExceptionRecord {
/** Descriptive definition of the serialization size of the record. */
public static final int SIZE = TYPE_SIZE_LONG // IEventRecord.timestamp
+ TYPE_SIZE_LONG // ITraceRecord.traceId
+ TYPE_SIZE_INT // ITraceRecord.orderIndex
+ TYPE_SIZE_STRING // IOperationSignature.operationSignature
+ TYPE_SIZE_STRING // IClassSignature.classSignature
+ TYPE_SIZE_STRING; // IExceptionRecord.cause
public static final Class<?>[] TYPES = {
long.class, // IEventRecord.timestamp
long.class, // ITraceRecord.traceId
int.class, // ITraceRecord.orderIndex
String.class, // IOperationSignature.operationSignature
String.class, // IClassSignature.classSignature
String.class, // IExceptionRecord.cause
};
/** property name array. */
public static final String[] VALUE_NAMES = {
"timestamp",
"traceId",
"orderIndex",
"operationSignature",
"classSignature",
"cause",
};
/** default constants. */
public static final String CAUSE = "";
private static final long serialVersionUID = -235912152331253573L;
/** property declarations. */
private final String cause;
/**
* Creates a new instance of this class using the given parameters.
*
* @param timestamp
* timestamp
* @param traceId
* traceId
* @param orderIndex
* orderIndex
* @param operationSignature
* operationSignature
* @param classSignature
* classSignature
* @param cause
* cause
*/
public AfterOperationFailedEvent(final long timestamp, final long traceId, final int orderIndex, final String operationSignature, final String classSignature, final String cause) {
super(timestamp, traceId, orderIndex, operationSignature, classSignature);
this.cause = cause == null?CAUSE:cause;
}
/**
* @param deserializer
* The deserializer to use
* @throws RecordInstantiationException
* when the record could not be deserialized
*/
public AfterOperationFailedEvent(final IValueDeserializer deserializer) throws RecordInstantiationException {
super(deserializer);
this.cause = deserializer.getString();
}
/**
* {@inheritDoc}
*/
@Override
public void serialize(final IValueSerializer serializer) throws BufferOverflowException {
serializer.putLong(this.getTimestamp());
serializer.putLong(this.getTraceId());
serializer.putInt(this.getOrderIndex());
serializer.putString(this.getOperationSignature());
serializer.putString(this.getClassSignature());
serializer.putString(this.getCause());
}
/**
* {@inheritDoc}
*/
@Override
public Class<?>[] getValueTypes() {
return TYPES; // NOPMD
}
/**
* {@inheritDoc}
*/
@Override
public String[] getValueNames() {
return VALUE_NAMES; // NOPMD
}
/**
* {@inheritDoc}
*/
@Override
public int getSize() {
return SIZE;
}
/**
* {@inheritDoc}
*/
@Override
public boolean equals(final Object obj) {
if (obj == null) {
return false;
}
if (obj == this) {
return true;
}
if (obj.getClass() != this.getClass()) {
return false;
}
final AfterOperationFailedEvent castedRecord = (AfterOperationFailedEvent) obj;
if (this.getLoggingTimestamp() != castedRecord.getLoggingTimestamp()) {
return false;
}
if (this.getTimestamp() != castedRecord.getTimestamp()) {
return false;
}
if (this.getTraceId() != castedRecord.getTraceId()) {
return false;
}
if (this.getOrderIndex() != castedRecord.getOrderIndex()) {
return false;
}
if (!this.getOperationSignature().equals(castedRecord.getOperationSignature())) {
return false;
}
if (!this.getClassSignature().equals(castedRecord.getClassSignature())) {
return false;
}
if (!this.getCause().equals(castedRecord.getCause())) {
return false;
}
return true;
}
/**
* {@inheritDoc}
*/
@Override
public int hashCode() {
int code = 0;
code += ((int)this.getTimestamp());
code += ((int)this.getTraceId());
code += ((int)this.getOrderIndex());
code += this.getOperationSignature().hashCode();
code += this.getClassSignature().hashCode();
code += this.getCause().hashCode();
return code;
}
public final String getCause() {
return this.cause;
}
/**
* {@inheritDoc}
*/
@Override
public String toString() {
String result = "AfterOperationFailedEvent: ";
result += "timestamp = ";
result += this.getTimestamp() + ", ";
result += "traceId = ";
result += this.getTraceId() + ", ";
result += "orderIndex = ";
result += this.getOrderIndex() + ", ";
result += "operationSignature = ";
result += this.getOperationSignature() + ", ";
result += "classSignature = ";
result += this.getClassSignature() + ", ";
result += "cause = ";
result += this.getCause() + ", ";
return result;
}
}
| |
/*
* $Id: JSONCleaner.java 1099157 2011-05-03 17:53:55Z jogep $
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.struts2.json;
import java.util.Iterator;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import java.util.TreeMap;
import java.util.HashMap;
import java.util.regex.Pattern;
import java.util.regex.Matcher;
import com.opensymphony.xwork2.util.TextParseUtil;
import com.opensymphony.xwork2.util.WildcardUtil;
import com.opensymphony.xwork2.util.logging.Logger;
import com.opensymphony.xwork2.util.logging.LoggerFactory;
/**
* <p>Isolate the process of cleaning JSON data from the Interceptor class
* itself.</p>
*
* <p>The allowed and blocked wildcard patterns, combined with
* defaultBlock, let you filter out values that should not be injected, in
* the same way that ParameterFilterInterceptor does. Note that you can
* only remove values from a Map. Removing values from a List is dangerous
* because it could change the meaning of the data!</p>
*/
public abstract class JSONCleaner {
private static final Logger LOG = LoggerFactory.getLogger(JSONCleaner.class);
public static class Filter
{
public Pattern pattern;
public boolean allow;
public Filter(String pattern, boolean allow)
{
this.pattern = WildcardUtil.compileWildcardPattern(pattern);
this.allow = allow;
}
}
private boolean defaultBlock = false;
private Collection<String> allowed;
private Collection<String> blocked;
private Map<String, Filter> includesExcludesMap;
public Object clean(String ognlPrefix, Object data) throws JSONException {
if (data == null)
return null;
else if (data instanceof List)
return cleanList(ognlPrefix, data);
else if (data instanceof Map)
return cleanMap(ognlPrefix, data);
else
return cleanValue(ognlPrefix, data);
}
protected Object cleanList(String ognlPrefix, Object data) throws JSONException {
List list = (List) data;
int count = list.size();
for (int i = 0; i < count; i++) {
list.set(i, clean(ognlPrefix + "[" + i + "]", list.get(i)));
}
return list;
}
protected Object cleanMap(String ognlPrefix, Object data) throws JSONException {
Map map = (Map) data;
Iterator iter = map.entrySet().iterator();
while (iter.hasNext()) {
Map.Entry e = (Map.Entry) iter.next();
String key = (ognlPrefix.length() > 0 ? ognlPrefix + "." : "") + e.getKey();
if (allow(key)) {
e.setValue(clean(key, e.getValue()));
} else {
if (LOG.isDebugEnabled()) {
LOG.debug("blocked: " + key);
}
iter.remove();
}
}
return map;
}
protected abstract Object cleanValue(String ognlName, Object data) throws JSONException;
private boolean allow(String ognl) {
Map<String, Filter> includesExcludesMap = getIncludesExcludesMap();
boolean allow = !isDefaultBlock();
if (includesExcludesMap != null) {
for (String currRule : includesExcludesMap.keySet()) {
Filter f = includesExcludesMap.get(currRule);
if (f.pattern.matcher(ognl).matches()) {
allow = f.allow;
}
}
}
return allow;
}
/**
* @return the compiled list of includes and excludes
*/
public Map<String, Filter> getIncludesExcludesMap() {
if (allowed == null && blocked == null) {
return includesExcludesMap;
}
if (includesExcludesMap == null) {
includesExcludesMap = new TreeMap<String, Filter>();
Map<String, Boolean> existingExpr = new HashMap<String, Boolean>();
Map<String, Map<String, String>> includePatternData = JSONUtil.getIncludePatternData();
String splitPattern = includePatternData.get(JSONUtil.SPLIT_PATTERN).get(JSONUtil.WILDCARD_PATTERN);
String joinString = includePatternData.get(JSONUtil.JOIN_STRING).get(JSONUtil.WILDCARD_PATTERN);
String arrayBegin = includePatternData.get(JSONUtil.ARRAY_BEGIN_STRING).get(JSONUtil.WILDCARD_PATTERN);
String arrayEnd = includePatternData.get(JSONUtil.ARRAY_END_STRING).get(JSONUtil.WILDCARD_PATTERN);
if (allowed != null) {
for (String a : allowed) {
// Compile a pattern for each level of the object hierarchy
// so cleanMap() won't short-circuit too early.
String expr = "";
for (String piece : a.split(splitPattern)) {
if (expr.length() > 0) {
expr += joinString;
}
expr += piece;
if (!existingExpr.containsKey(expr)) {
existingExpr.put(expr, Boolean.TRUE);
String s = expr;
if (piece.endsWith(arrayEnd)) {
s = expr.substring(0, expr.lastIndexOf(arrayBegin));
}
if (s.length() > 0) {
includesExcludesMap.put(s, new Filter(s, true));
if (LOG.isDebugEnabled()) {
LOG.debug("Adding include wildcard expression: " + s);
}
}
}
}
}
}
if (blocked != null) {
for (String b : blocked) {
includesExcludesMap.put(b, new Filter(b, false));
}
}
}
return includesExcludesMap;
}
/**
* Allow external caching of the compiled result.
*
* @param map the compiled list of includes and excludes
*/
public void setIncludesExcludesMap(Map<String, Filter> map) {
includesExcludesMap = map;
}
/**
* @return value of defaultBlock
*/
public boolean isDefaultBlock() {
return defaultBlock;
}
/**
* @param defaultExclude The defaultExclude to set.
*/
public void setDefaultBlock(boolean defaultExclude) {
this.defaultBlock = defaultExclude;
}
/**
* @return list of blocked wildcard patterns
*/
public Collection<String> getBlockedCollection() {
return blocked;
}
/**
* @param blocked The blocked to set.
*/
public void setBlockedCollection(Collection<String> blocked) {
this.blocked = blocked;
}
/**
* @param blocked The blocked paramters as comma separated String.
*/
public void setBlocked(String blocked) {
setBlockedCollection(asCollection(blocked));
}
/**
* @return list of allowed wildcard patterns
*/
public Collection<String> getAllowedCollection() {
return allowed;
}
/**
* @param allowed The allowed to set.
*/
public void setAllowedCollection(Collection<String> allowed) {
this.allowed = allowed;
}
/**
* @param allowed The allowed paramters as comma separated String.
*/
public void setAllowed(String allowed) {
setAllowedCollection(asCollection(allowed));
}
/**
* Return a collection from the comma delimited String.
*
* @param commaDelim the comma delimited String.
* @return A collection from the comma delimited String. Returns <tt>null</tt> if the string is empty.
*/
private Collection<String> asCollection(String commaDelim) {
if (commaDelim == null || commaDelim.trim().length() == 0) {
return null;
}
return TextParseUtil.commaDelimitedStringToSet(commaDelim);
}
}
| |
/*
* Copyright 2002 Felix Pahl. All rights reserved.
* Use is subject to license terms.
*/
package info.joriki.awt.image.jpeg;
class JPEGCosineTransform implements JPEGSpeaker
{
final static float c2 = (float) Math.cos (1*Math.PI/DCTlength);
final static float c4 = (float) Math.cos (2*Math.PI/DCTlength);
final static float c6 = (float) Math.cos (3*Math.PI/DCTlength);
static void transform (float [] data)
{
float tmp0,tmp1,tmp2,tmp3,tmp4,tmp5,tmp6,tmp7;
float tmp10,tmp11,tmp12,tmp13;
float z1,z2,z3,z4,z5;
float z11,z13;
int i0,i1,i2,i3,i4,i5,i6,i7;
for (int pass = 0;pass < 2;pass++)
{
int step = pass == 0 ? DCTlength : 1;
int incr = pass == 0 ? 1 : DCTlength;
int lim = DCTlength * incr;
for (int off = 0;off < lim;off += incr)
{
i7 = step +
(i6 = step +
(i5 = step +
(i4 = step +
(i3 = step +
(i2 = step +
(i1 = step +
(i0 = off)))))));
tmp0 = data [i0] + data [i7];
tmp7 = data [i0] - data [i7];
tmp1 = data [i1] + data [i6];
tmp6 = data [i1] - data [i6];
tmp2 = data [i2] + data [i5];
tmp5 = data [i2] - data [i5];
tmp3 = data [i3] + data [i4];
tmp4 = data [i3] - data [i4];
/* Even part */
tmp10 = tmp0 + tmp3;/* phase 2 */
tmp13 = tmp0 - tmp3;
tmp11 = tmp1 + tmp2;
tmp12 = tmp1 - tmp2;
data [i0] = tmp10 + tmp11; /* phase 3 */
data [i4] = tmp10 - tmp11;
z1 = c4 * (tmp12 + tmp13);
data [i2] = tmp13 + z1;/* phase 5 */
data [i6] = tmp13 - z1;
/* Odd part */
tmp10 = tmp4 + tmp5;/* phase 2 */
tmp11 = tmp5 + tmp6;
tmp12 = tmp6 + tmp7;
/* The rotator is modified from fig 4-8 to avoid extra negations. */
z5 = c6 * (tmp10 - tmp12);
z2 = (c2 - c6) * tmp10 + z5;
z4 = (c2 + c6) * tmp12 + z5;
z3 = c4 * tmp11;
z11 = tmp7 + z3;/* phase 5 */
z13 = tmp7 - z3;
data [i5] = z13 + z2;/* phase 6 */
data [i3] = z13 - z2;
data [i1] = z11 + z4;
data [i7] = z11 - z4;
}
}
renormalize (data);
}
static void inverseTransform (float [] data)
{
float tmp0,tmp1,tmp2,tmp3,tmp4,tmp5,tmp6,tmp7;
float tmp10,tmp11,tmp12,tmp13;
float z10,z11,z12,z13;
float delta;
int i0,i1,i2,i3,i4,i5,i6,i7;
for (int pass = 0;pass < 2;pass++)
{
int step = pass == 0 ? DCTlength : 1;
int incr = pass == 0 ? 1 : DCTlength;
int lim = DCTlength * incr;
for (int off = 0;off < lim;off += incr)
{
i7 = step +
(i6 = step +
(i5 = step +
(i4 = step +
(i3 = step +
(i2 = step +
(i1 = step +
(i0 = off)))))));
/* Even part */
tmp0 = data [i0];
tmp1 = data [i2];
tmp2 = data [i4];
tmp3 = data [i6];
tmp10 = tmp0 + tmp2;/* phase 3 */
tmp11 = tmp0 - tmp2;
tmp13 = tmp1 + tmp3;/* phases 5-3 */
tmp12 = (2*c4) * (tmp1 - tmp3) - tmp13;
tmp0 = tmp10 + tmp13;/* phase 2 */
tmp3 = tmp10 - tmp13;
tmp1 = tmp11 + tmp12;
tmp2 = tmp11 - tmp12;
/* Odd part */
tmp4 = data [i1];
tmp5 = data [i3];
tmp6 = data [i5];
tmp7 = data [i7];
z13 = tmp6 + tmp5;/* phase 6 */
z10 = tmp6 - tmp5;
z11 = tmp4 + tmp7;
z12 = tmp4 - tmp7;
tmp7 = z11 + z13;/* phase 5 */
tmp11 = (2*c4) * (z11 - z13);
delta = (2*c2) * (z10 + z12);
tmp10 = (2*(c2 - c6)) * z12 - delta;
tmp12 = (-2*(c2+c6)) * z10 + delta;
tmp6 = tmp12 - tmp7;/* phase 2 */
tmp5 = tmp11 - tmp6;
tmp4 = tmp10 + tmp5;
data [i0] = tmp0 + tmp7;
data [i7] = tmp0 - tmp7;
data [i1] = tmp1 + tmp6;
data [i6] = tmp1 - tmp6;
data [i2] = tmp2 + tmp5;
data [i5] = tmp2 - tmp5;
data [i4] = tmp3 + tmp4;
data [i3] = tmp3 - tmp4;
}
}
renormalize (data);
}
static void transform (float [] inData,float [] outData,int inOff,int inScan)
{
float tmp0,tmp1,tmp2,tmp3,tmp4,tmp5,tmp6,tmp7;
float tmp10,tmp11,tmp12,tmp13;
float z1,z2,z3,z4,z5;
float z11,z13;
int i0,i1,i2,i3,i4,i5,i6,i7;
for (int pass = 0;pass < 2;pass++)
{
float [] srcData = inData;
float [] destData = pass == 0 ? inData : outData;
int step = pass == 0 ? inScan : 1;
int inIncr = pass == 0 ? 1 : inScan;
int outIncr = pass == 0 ? 1 : DCTlength;
int diff = pass == 0 ? 0 : -inOff;
int diffIncr = outIncr - inIncr;
int lim = inOff + DCTlength * inIncr;
for (int off = inOff;off < lim;off += inIncr,diff += diffIncr)
{
i7 = step +
(i6 = step +
(i5 = step +
(i4 = step +
(i3 = step +
(i2 = step +
(i1 = step +
(i0 = off)))))));
tmp0 = srcData [i0] + srcData [i7];
tmp7 = srcData [i0] - srcData [i7];
tmp1 = srcData [i1] + srcData [i6];
tmp6 = srcData [i1] - srcData [i6];
tmp2 = srcData [i2] + srcData [i5];
tmp5 = srcData [i2] - srcData [i5];
tmp3 = srcData [i3] + srcData [i4];
tmp4 = srcData [i3] - srcData [i4];
/* Even part */
tmp10 = tmp0 + tmp3;/* phase 2 */
tmp13 = tmp0 - tmp3;
tmp11 = tmp1 + tmp2;
tmp12 = tmp1 - tmp2;
destData [i0 + diff] = tmp10 + tmp11; /* phase 3 */
destData [i4 + diff] = tmp10 - tmp11;
z1 = c4 * (tmp12 + tmp13);
destData [i2 + diff] = tmp13 + z1;/* phase 5 */
destData [i6 + diff] = tmp13 - z1;
/* Odd part */
tmp10 = tmp4 + tmp5;/* phase 2 */
tmp11 = tmp5 + tmp6;
tmp12 = tmp6 + tmp7;
/* The rotator is modified from fig 4-8 to avoid extra negations. */
z5 = c6 * (tmp10 - tmp12);
z2 = (c2 - c6) * tmp10 + z5;
z4 = (c2 + c6) * tmp12 + z5;
z3 = c4 * tmp11;
z11 = tmp7 + z3;/* phase 5 */
z13 = tmp7 - z3;
destData [i5 + diff] = z13 + z2;/* phase 6 */
destData [i3 + diff] = z13 - z2;
destData [i1 + diff] = z11 + z4;
destData [i7 + diff] = z11 - z4;
}
}
renormalize (outData);
}
static void inverseTransform (float [] inData,float [] outData,int outOff,int outScan)
{
renormalize (inData);
float tmp0,tmp1,tmp2,tmp3,tmp4,tmp5,tmp6,tmp7;
float tmp10,tmp11,tmp12,tmp13;
float z10,z11,z12,z13;
float delta;
int i0,i1,i2,i3,i4,i5,i6,i7;
for (int pass = 0;pass < 2;pass++)
{
float [] srcData = inData;
float [] destData = pass == 0 ? inData : outData;
int step = pass == 0 ? DCTlength : 1;
int inIncr = pass == 0 ? 1 : DCTlength;
int outIncr = pass == 0 ? 1 : outScan;
int diff = pass == 0 ? 0 : outOff;
int diffIncr = outIncr - inIncr;
int lim = DCTlength * inIncr;
for (int off = 0;off < lim;off += inIncr,diff += diffIncr)
{
i7 = step +
(i6 = step +
(i5 = step +
(i4 = step +
(i3 = step +
(i2 = step +
(i1 = step +
(i0 = off)))))));
/* Even part */
tmp0 = srcData [i0];
tmp1 = srcData [i2];
tmp2 = srcData [i4];
tmp3 = srcData [i6];
tmp10 = tmp0 + tmp2;/* phase 3 */
tmp11 = tmp0 - tmp2;
tmp13 = tmp1 + tmp3;/* phases 5-3 */
tmp12 = (2*c4) * (tmp1 - tmp3) - tmp13;
tmp0 = tmp10 + tmp13;/* phase 2 */
tmp3 = tmp10 - tmp13;
tmp1 = tmp11 + tmp12;
tmp2 = tmp11 - tmp12;
/* Odd part */
tmp4 = srcData [i1];
tmp5 = srcData [i3];
tmp6 = srcData [i5];
tmp7 = srcData [i7];
z13 = tmp6 + tmp5;/* phase 6 */
z10 = tmp6 - tmp5;
z11 = tmp4 + tmp7;
z12 = tmp4 - tmp7;
tmp7 = z11 + z13;/* phase 5 */
tmp11 = (2*c4) * (z11 - z13);
delta = (2*c2) * (z10 + z12);
tmp10 = (2*(c2 - c6)) * z12 - delta;
tmp12 = (-2*(c2+c6)) * z10 + delta;
tmp6 = tmp12 - tmp7;/* phase 2 */
tmp5 = tmp11 - tmp6;
tmp4 = tmp10 + tmp5;
destData [i0 + diff] = tmp0 + tmp7;
destData [i7 + diff] = tmp0 - tmp7;
destData [i1 + diff] = tmp1 + tmp6;
destData [i6 + diff] = tmp1 - tmp6;
destData [i2 + diff] = tmp2 + tmp5;
destData [i5 + diff] = tmp2 - tmp5;
destData [i4 + diff] = tmp3 + tmp4;
destData [i3 + diff] = tmp3 - tmp4;
}
}
}
final static double norm = 1. / DCTlength;
final static void renormalize (float [] data)
{
for (int i = 0;i < DCTsize;i++)
data [i] *= norm;
}
}
| |
package exm.stc.frontend;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map.Entry;
import exm.stc.common.Logging;
import exm.stc.common.exceptions.STCRuntimeError;
import exm.stc.common.lang.Arg;
import exm.stc.common.lang.RefCounting;
import exm.stc.common.lang.TaskProp.TaskPropKey;
import exm.stc.common.lang.TaskProp.TaskProps;
import exm.stc.common.lang.Types;
import exm.stc.common.lang.Types.FunctionType;
import exm.stc.common.lang.Types.RefType;
import exm.stc.common.lang.Types.StructType;
import exm.stc.common.lang.Types.StructType.StructField;
import exm.stc.common.lang.Types.Type;
import exm.stc.common.lang.Types.Typed;
import exm.stc.common.lang.Var;
import exm.stc.common.lang.Var.DefType;
import exm.stc.common.lang.WaitVar;
/**
* This class contains logic for translating frontend variables to
* middle/backend variables. For example, frontend variables will have
* additional logical type information that is not relevant to the
* actual implementation or optimisation. We also have the option
* of representing the same logical variable in different ways.
*/
public class VarRepr {
/**
* Cache results of conversions, to avoid recomputing.
*/
private static HashMap<Type, Type> conversionCache
= new HashMap<Type, Type>();
public static Var backendVar(Var frontendVar) {
assert(frontendVar != null);
Type backendT = backendType(frontendVar.type(), true);
return frontendVar.substituteType(backendT);
}
public static List<Var> backendVars(Var ...frontendVars) {
return backendVars(Arrays.asList(frontendVars));
}
public static List<Var> backendVars(List<Var> frontendVars) {
ArrayList<Var> result = new ArrayList<Var>(frontendVars.size());
for (Var v: frontendVars) {
result.add(backendVar(v));
}
return result;
}
public static List<WaitVar> backendWaitVars(List<WaitVar> frontendVars) {
ArrayList<WaitVar> result = new ArrayList<WaitVar>(frontendVars.size());
for (WaitVar v: frontendVars) {
result.add(backendWaitVar(v));
}
return result;
}
public static Arg backendArg(Var frontendVar) {
return backendVar(frontendVar).asArg();
}
public static WaitVar backendWaitVar(WaitVar frontendVar) {
return new WaitVar(backendVar(frontendVar.var),
frontendVar.explicit);
}
public static Arg backendArg(Arg frontendArg) {
return backendArg(frontendArg, false);
}
public static Arg backendArg(Arg frontendArg,
boolean passThroughNulls) {
if (frontendArg == null) {
if (passThroughNulls) {
return null;
} else {
throw new STCRuntimeError("argument was null");
}
}
if (frontendArg.isVar()) {
return backendArg(frontendArg.getVar());
} else {
// Constants don't change
return frontendArg;
}
}
public static List<Arg> backendArgs(Arg ...frontendArgs) {
return backendArgs(Arrays.asList(frontendArgs));
}
public static List<Arg> backendArgs(List<Arg> frontendArgs) {
ArrayList<Arg> result = new ArrayList<Arg>(frontendArgs.size());
for (Arg v: frontendArgs) {
result.add(backendArg(v));
}
return result;
}
public static TaskProps backendProps(TaskProps props) {
TaskProps res = new TaskProps();
for (Entry<TaskPropKey, Arg> e: props.entrySet()) {
res.put(e.getKey(), backendArg(e.getValue()));
}
return res;
}
/**
* Convert a frontend logical type used for typechecking and user-facing
* messages to a backend type used for implementation
* @param type
* @param checkInstantiate if true, expect to be able to instantiate type
* @return
*/
public static Type backendType(Type type, boolean checkInstantiate) {
// Remove any subtype info, etc
return backendTypeInternal(type.getImplType(), checkInstantiate);
}
/**
* Internal backend type
* @param type a type that has had implType applied already
* @param checkInstantiate if true, expect to be able to instantiate type
* @return
*/
private static Type backendTypeInternal(Type type,
boolean checkInstantiate) {
Type originalType = type;
Type lookup = conversionCache.get(type);
if (lookup != null) {
return lookup;
}
if (Types.isContainer(type) || Types.isContainerRef(type) ||
Types.isContainerLocal(type)) {
Type frontendElemType = Types.containerElemType(type);
Type backendElemType = backendTypeInternal(frontendElemType,
checkInstantiate);
if (storeRefInContainer(backendElemType)) {
type = Types.substituteElemType(type, new RefType(backendElemType, true));
}
} else if (Types.isRef(type)) {
Type frontendDerefT = type.memberType();
Type backendDerefT = backendTypeInternal(frontendDerefT,
checkInstantiate);
if (!frontendDerefT.equals(backendDerefT)) {
type = new RefType(backendDerefT, Types.isMutableRef(type));
}
} else if (Types.isStruct(type) || Types.isStructLocal(type)) {
type = backendStructType((StructType)type, checkInstantiate);
}
assert(!checkInstantiate || type.isConcrete()) :
"Cannot instantiate type " + type;
Logging.getSTCLogger().trace("Type conversion frontend => backend: " +
originalType + " to backend " + type);
conversionCache.put(originalType, type);
return type;
}
/**
* Convert struct type.
* Retain name, but convert types of fields
* @param frontend
* @return
*/
private static Type backendStructType(StructType frontend,
boolean checkInstantiate) {
List<StructField> backendFields = new ArrayList<StructField>();
for (StructField frontendF: frontend.fields()) {
Type fieldT = backendTypeInternal(frontendF.type().getImplType(),
checkInstantiate);
if (storeRefInStruct(fieldT)) {
// Need to store as ref to separate data
fieldT = new RefType(fieldT, true);
}
backendFields.add(new StructField(fieldT, frontendF.name()));
}
return new StructType(frontend.isLocal(), frontend.typeName(),
backendFields);
}
/**
* Whether to store a container value as a reference to data elsewhere
* @param type
* @return
*/
public static boolean storeRefInContainer(Typed type) {
return storeRefInCompound(type, CompoundType.CONTAINER);
}
public static boolean storeRefInStruct(Typed type) {
return storeRefInCompound(type, CompoundType.STRUCT);
}
private static enum CompoundType {
STRUCT,
CONTAINER,
}
private static boolean storeRefInCompound(Typed type,
CompoundType compound) {
if (Types.isFile(type) || Types.isStruct(type)) {
if (compound == CompoundType.CONTAINER) {
// TODO: would make sense to store directly, but don't have capacity to
// init/modify individual fields currently in container
return true;
} else {
assert(compound == CompoundType.STRUCT);
// Store structs, etc directly in struct
return false;
}
} else if (compound == CompoundType.CONTAINER && isBig(type)) {
// Want to be able to distribute data if many copies stored
return true;
} else if (RefCounting.trackWriteRefCount(type.type(), DefType.LOCAL_USER)) {
// Need to track write refcount separately to manage closing,
// so must store as ref to separate datum
return true;
} else {
return false;
}
}
/**
* Data that is likely to be big
* @param type
*/
private static boolean isBig(Typed type) {
return Types.isBlob(type) || Types.isContainer(type);
}
public static FunctionType backendFnType(FunctionType frontendType) {
Type lookup = conversionCache.get(frontendType);
if (lookup != null) {
return (FunctionType)lookup;
}
// translate input and output arg types
List<Type> backendInputs = new ArrayList<Type>();
List<Type> backendOutputs = new ArrayList<Type>();
for (Type in: frontendType.getInputs()) {
backendInputs.add(backendType(in, false));
}
for (Type out: frontendType.getOutputs()) {
backendOutputs.add(backendType(out, false));
}
FunctionType result = new FunctionType(backendInputs, backendOutputs,
frontendType.hasVarargs(), frontendType.getTypeVars());
conversionCache.put(frontendType, result);
return result;
}
public static Type elemRepr(Type memberType, CompoundType c,
boolean mutable) {
if (storeRefInCompound(memberType, c)) {
return new RefType(memberType, mutable);
} else {
return memberType;
}
}
/**
* The type of internal storage used for a Swift type when stored in a
* container.
*
* @param memberType
* the type of array members for the array being dereferenced
* @return
*/
public static Type containerElemRepr(Type memberType, boolean mutable) {
return elemRepr(memberType, CompoundType.CONTAINER, mutable);
}
/**
* The type of internal storage used for a Swift type when stored in a
* struct.
*
* @param memberType the type of struct member
* @return
*/
public static Type structElemRepr(Type memberType, boolean mutable) {
return elemRepr(memberType, CompoundType.STRUCT, mutable);
}
}
| |
/**
* File MCSMGMSolver.java
*
* Copyright 2014 Coen van Leeuwen
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package nl.coenvl.sam.solvers;
import java.util.HashMap;
import java.util.Map;
import java.util.UUID;
import nl.coenvl.sam.MailMan;
import nl.coenvl.sam.agents.Agent;
import nl.coenvl.sam.messages.HashMessage;
import nl.coenvl.sam.messages.Message;
import nl.coenvl.sam.variables.AssignmentMap;
import nl.coenvl.sam.variables.CostMap;
import nl.coenvl.sam.variables.DiscreteVariable;
/**
* MCSMGMSolver
*
* @author leeuwencjv
* @version 0.1
* @since 17 okt. 2014
*
*/
public class MCSMGMSolver<V> extends MGMSolver<V> {
private enum State {
SENDVALUE,
SENDIMPACT,
SENDGAIN,
PICKVALUE
}
private static final String IMPACT_VALUE = "MCSMGM:ImpactValue";
// Add keeping track of neighbor impacts
private State algoState;
private final CostMap<UUID> neighborImpacts;
private final Map<ConstraintKey, ConstraintCost> constraintChanges;
public MCSMGMSolver(final Agent<DiscreteVariable<V>, V> agent) {
super(agent);
this.neighborImpacts = new CostMap<>();
this.constraintChanges = new HashMap<>();
this.algoState = State.SENDVALUE;
}
@Override
public synchronized void push(final Message m) {
super.push(m);
if (m.getType().equals(MCSMGMSolver.IMPACT_VALUE)) {
this.neighborImpacts.put(m.getSource(), (Double) m.get("delta"));
}
}
@Override
public synchronized void tick() {
switch (this.algoState) {
case SENDIMPACT:
this.sendImpact();
this.algoState = State.SENDGAIN;
break;
case SENDGAIN:
this.sendGain();
this.algoState = State.PICKVALUE;
break;
case PICKVALUE:
this.pickValue();
this.algoState = State.SENDVALUE;
break;
default:
case SENDVALUE:
this.sendValue();
this.algoState = State.SENDIMPACT;
break;
}
}
/**
* Compute the impact of the newly received messages
*/
private void sendImpact() {
for (final UUID target : this.parent.getConstrainedVariableIds()) {
final AssignmentMap<V> pa = new AssignmentMap<>();
pa.setAssignment(this.myVariable, this.myVariable.getValue());
pa.put(target, this.myProblemContext.get(target));
// Compute the cost INCREASE due to the update
final double delta = this.parent.getConstraintForAgent(target).getCostIf(this.myVariable, pa);
// double delta = this.parent.getLocalCostIf(pa);
// See if we have any modified problem
final ConstraintKey key = new ConstraintKey(target,
this.myVariable.getValue(),
this.myProblemContext.get(target));
if (!this.constraintChanges.containsKey(key)) {
this.constraintChanges.put(key, new ConstraintCost());
}
final ConstraintCost r = this.constraintChanges.get(key);
if (r.localCost == null) {
r.localCost = delta;
}
// Inform the neighbors
final HashMessage m = new HashMessage(this.myVariable.getID(), MCSMGMSolver.IMPACT_VALUE);
// Only add delta if we want to propagate back our cost
if (delta > 0) {
m.put("delta", delta);
r.localCost = 0.;
} else {
m.put("delta", 0.0);
}
this.constraintChanges.put(key, r);
MailMan.sendMessage(target, m);
}
}
/**
*
*/
private void sendGain() {
// Get current costs (without impact)
this.myProblemContext.setAssignment(this.myVariable, this.myVariable.getValue());
double before = this.parent.getLocalCostIf(this.myProblemContext);
// First process all the received impact messages
for (final UUID a : this.parent.getConstrainedVariableIds()) {
// Can we indeed assume that myProblemContext is still up to date?
final ConstraintKey key = new ConstraintKey(a, this.myVariable.getValue(), this.myProblemContext.get(a));
if (!this.constraintChanges.containsKey(key)) {
this.constraintChanges.put(key, new ConstraintCost());
}
// The remote impact should be in the neighborImpact list
this.constraintChanges.get(key).remoteCost = this.neighborImpacts.get(a);
before += this.neighborImpacts.get(a);
}
// Now compute the best local reduction
double bestCost = before;
V bestAssignment = null;
final AssignmentMap<V> temp = this.myProblemContext.clone();
for (final V assignment : this.myVariable) {
temp.setAssignment(this.myVariable, assignment);
double localCost = 0;
// for (Agent a : this.parent.getNeighborhood()) {
for (final UUID a : this.parent.getConstrainedVariableIds()) {
final ConstraintKey key = new ConstraintKey(a, assignment, temp.get(a));
if (this.constraintChanges.containsKey(key)) {
// Add remote cost of neighbor)
final ConstraintCost r = this.constraintChanges.get(key);
localCost += (r.remoteCost == null ? 0 : r.remoteCost);
}
}
// Although this will influence less and less since temp will become empty
localCost += this.parent.getLocalCostIf(temp);
if (localCost < bestCost) {
bestCost = localCost;
bestAssignment = assignment;
}
}
// Get the REDUCTION after changing the value
this.bestLocalReduction = before - bestCost;
this.bestLocalAssignment = bestAssignment;
final Message lrMsg = new HashMessage(this.myVariable.getID(), MGMSolver.LOCAL_REDUCTION);
lrMsg.put("LR", this.bestLocalReduction);
this.sendToNeighbors(lrMsg);
}
@Override
public void reset() {
super.reset();
this.neighborImpacts.clear();
this.constraintChanges.clear();
this.algoState = State.SENDVALUE;
}
private class ConstraintKey {
public final V myValue;
public final V hisValue;
public final UUID neighbor;
public ConstraintKey(final UUID neighbor, final V myValue, final V hisValue) {
this.neighbor = neighbor;
this.myValue = myValue;
this.hisValue = hisValue;
}
/*
* (non-Javadoc)
*
* @see java.lang.Object#hashCode()
*/
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = (prime * result) + ((this.hisValue == null) ? 0 : this.hisValue.hashCode());
result = (prime * result) + ((this.myValue == null) ? 0 : this.myValue.hashCode());
result = (prime * result) + ((this.neighbor == null) ? 0 : this.neighbor.hashCode());
return result;
}
/*
* (non-Javadoc)
*
* @see java.lang.Object#equals(java.lang.Object)
*/
@Override
public boolean equals(final Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (this.getClass() != obj.getClass()) {
return false;
}
@SuppressWarnings("unchecked")
final ConstraintKey other = (ConstraintKey) obj;
if (this.hisValue == null) {
if (other.hisValue != null) {
return false;
}
} else if (!this.hisValue.equals(other.hisValue)) {
return false;
}
if (this.myValue == null) {
if (other.myValue != null) {
return false;
}
} else if (!this.myValue.equals(other.myValue)) {
return false;
}
if (this.neighbor == null) {
if (other.neighbor != null) {
return false;
}
} else if (!this.neighbor.equals(other.neighbor)) {
return false;
}
return true;
}
@Override
public String toString() {
return this.myValue + "&" + this.neighbor + "=" + this.hisValue;
}
}
private class ConstraintCost {
public Double remoteCost; // Double because I want to check for null
public Double localCost;
public ConstraintCost() {
this.remoteCost = null;
this.localCost = null;
}
@Override
public String toString() {
return "l" + this.localCost + "r" + this.remoteCost;
}
}
@Override
public String toString() {
return this.getClass().getSimpleName() + " for Agent " + this.parent.getName();
}
}
| |
package io.indexr.segment.storage;
import com.google.common.base.Preconditions;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonProperty;
import org.apache.commons.io.FileUtils;
import org.apache.commons.lang.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.List;
import io.indexr.segment.ColumnType;
import io.indexr.segment.Row;
import io.indexr.segment.RowTraversal;
import io.indexr.segment.SegmentMode;
import io.indexr.segment.SegmentSchema;
import io.indexr.segment.storage.itg.IntegratedSegment;
import io.indexr.util.JsonUtil;
/**
* DPSegment is a segment which only resides on local disk. It is column based, supports rows literal, ingestion, and merge.
*
* Usually DPSegment is used as an intermediate format. You can create a new one, ingest rows into it, merge with another segments,
* maybe check correctness by literal over it. After you done play with it, transform into {@link IntegratedSegment}.
*
* Dir structure:
* <pre>
* .../segment/path/
* metadata.json
* schema.json
* colName0.pack
* colName0.dpn
* colName0.index
* colName1.pack
* colName1.dpn
* colName1.index
* ...
* </pre>
*
* An updating DPSegment can not be quried, i.e. those methods like {@link #rowTraversal()},
* {@link #column(int)} will return <code>null</code> while {@link #isUpdate()} return <code>true</code>.
*
* This class is <b>NOT</b> multi-thread safe.
*/
public class DPSegment extends StorageSegment<DPColumn> {
private static final Logger logger = LoggerFactory.getLogger(DPSegment.class);
private final Path segmentPath;
private boolean update;
DPSegment(Metadata metadata, Path segmentPath, String name, SegmentSchema schema) throws IOException {
super(metadata.version, metadata.mode, name, schema, metadata.rowCount,
(ci, sc, rc) -> new DPColumn(metadata.version, metadata.mode, ci, sc.name, sc.sqlType, sc.isIndexed, rc, segmentPath));
this.segmentPath = segmentPath;
}
/**
* A simple structure used to pack metadata of DPSegment, convenient for JSON ser/desr.
*/
private static class Metadata {
@JsonProperty("version")
public int version;
@JsonProperty("mode")
public String modeName;
@JsonIgnore
public SegmentMode mode;
@JsonProperty("rowCount")
public long rowCount;
@JsonCreator
public Metadata(@JsonProperty("version") int version,
@JsonProperty("mode") String mode,
@JsonProperty("rowCount") long rowCount) {
this.version = version;
this.mode = SegmentMode.fromName(mode);
this.modeName = this.mode.name();
this.rowCount = rowCount;
}
}
public static DPSegment open(String path) throws IOException {
return open(Paths.get(path));
}
public static DPSegment open(Path path) throws IOException {
return open(-1, null, path, null, null);
}
/**
* Open a DPSegment.
*
* The newly open segment cannot be updated.
* You can use {@link #update()} to enable update.
*
* @param version The segment version, check {@link Version}.
* @param mode The segment mode.
* @param path The path of segment.
* @param name The unique segment identifier in the whole system.
* @param schema The schema of segment.
* @param options The open options.
* @return A segment resides on the path.
*/
public static DPSegment open(int version, SegmentMode mode, Path path, String name, SegmentSchema schema, OpenOption... options) throws IOException {
//Preconditions.checkArgument(name != null);
Preconditions.checkArgument(path != null);
if (OpenOption.Overwrite.in(options)) {
FileUtils.deleteDirectory(path.toFile());
}
if (!Files.exists(path)) {
Files.createDirectories(path);
}
Metadata metadata;
SegmentSchema segmentSchema;
Path metadataPath = path.resolve("metadata.json");
Path schemaPath = path.resolve("schema.json");
if (Files.exists(metadataPath) && Files.exists(schemaPath)) {
metadata = JsonUtil.loadWithRE(metadataPath, Metadata.class);
segmentSchema = JsonUtil.loadWithRE(schemaPath, SegmentSchema.class);
if (schema != null) {
Preconditions.checkState(segmentSchema.equals(schema), "Provided segment schema and the current one not match!");
}
} else {
Preconditions.checkArgument(schema != null, "Segment schema should be provided while not exists yet!");
Preconditions.checkArgument(Version.fromId(version) != null, "Illegal version!");
mode = mode == null ? SegmentMode.DEFAULT : mode;
metadata = new Metadata(version, mode.name(), 0);
segmentSchema = schema;
saveInfo(path, metadata, segmentSchema);
}
return new DPSegment(metadata, path, name, segmentSchema);
}
private static void saveInfo(Path segmentPath, Metadata metadata, SegmentSchema schema) {
JsonUtil.saveWithRE(segmentPath.resolve("metadata.json"), metadata);
JsonUtil.saveWithRE(segmentPath.resolve("schema.json"), schema);
}
public Path path() {
return segmentPath;
}
@Override
public boolean isColumned() {
return !update;
}
@Override
public int packCount() {
if (update) {
return 0;
}
return super.packCount();
}
@Override
public ColumnNode columnNode(int colId) throws IOException {
if (update) {
return null;
}
return super.columnNode(colId);
}
@Override
public DPColumn column(int colId) {
if (update) {
return null;
}
return super.column(colId);
}
@Override
public RowTraversal rowTraversal(long offset, long count) {
if (update) {
return null;
}
return super.rowTraversal(offset, count);
}
// ------------------------------------------------------------------
// Update stuff
// ------------------------------------------------------------------
/**
* Ingest one row.
*/
public void add(Row row) throws IOException {
Preconditions.checkState(rowCount < MAX_ROW_COUNT,
"Try to ingest too many rows into one segment, limit is %s, ingested %s",
MAX_ROW_COUNT, rowCount);
Preconditions.checkState(update, "This segment is immutable for now!");
for (int colId = 0; colId < columns.size(); colId++) {
DPColumn column = columns.get(colId);
switch (segmentSchema.columns.get(colId).getDataType()) {
case ColumnType.INT:
column.add(row.getInt(colId));
break;
case ColumnType.LONG:
column.add(row.getLong(colId));
break;
case ColumnType.FLOAT:
column.add(row.getFloat(colId));
break;
case ColumnType.DOUBLE:
column.add(row.getDouble(colId));
break;
case ColumnType.STRING:
column.add(row.getString(colId).clone());
break;
default:
throw new IllegalStateException();
}
}
rowCount++;
}
public boolean isUpdate() {
return update;
}
/**
* Indicate that this segment can be updated now.
* It will initialize the resources like open files, set up caches.
*/
public DPSegment update() throws IOException {
if (update) {
return this;
}
// Remove outdate info.
for (int i = 0; i < columnNodes.length; i++) {
columnNodes[i] = null;
}
for (DPColumn col : columns) {
col.initUpdate();
}
update = true;
return this;
}
/**
* End up update this Segment. Flush the cache to file and close the open files.
* After this call, any ingesting operation will rise exceptions.
*/
public void seal() throws IOException {
if (!update) {
return;
}
saveInfo(segmentPath, new Metadata(version(), mode.name(), rowCount), segmentSchema);
for (DPColumn col : columns) {
col.seal();
}
update = false;
}
@Override
public void close() throws IOException {
seal();
super.close();
}
// ------------------------------------------------------------------
// Merge stuff
// ------------------------------------------------------------------
public void merge(List<StorageSegment> segments) throws IOException {
if (segments.isEmpty()) {
return;
}
long rowCount = this.rowCount();
for (StorageSegment segment : segments) {
for (DPColumn column : columns) {
boolean ok = false;
for (Object oc : segment.columns) {
StorageColumn otherCol = (StorageColumn) oc;
if (StringUtils.equals(column.name(), otherCol.name()) && column.dataType() == otherCol.dataType()) {
ok = true;
}
}
if (!ok) {
throw new IllegalStateException(String.format("segment with %s cannot merge into %s", segment.schema(), segmentSchema));
}
}
rowCount += segment.rowCount();
}
update();
for (DPColumn column : columns) {
List<StorageColumn> toMerge = new ArrayList<>(segments.size());
for (StorageSegment otherSeg : segments) {
StorageColumn match = null;
for (Object oc : otherSeg.columns) {
StorageColumn otherCol = (StorageColumn) oc;
if (StringUtils.equals(column.name(), otherCol.name()) && column.dataType() == otherCol.dataType()) {
match = otherCol;
}
}
toMerge.add(match);
}
column.merge(toMerge);
}
this.rowCount = rowCount;
seal();
}
}
| |
/*
* Copyright 2009 Martin Grotzke
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package de.javakaffee.web.msm;
import static de.javakaffee.web.msm.MemcachedUtil.toMemcachedExpiration;
import static de.javakaffee.web.msm.Statistics.StatsType.ATTRIBUTES_SERIALIZATION;
import static de.javakaffee.web.msm.Statistics.StatsType.BACKUP;
import static de.javakaffee.web.msm.Statistics.StatsType.MEMCACHED_UPDATE;
import static de.javakaffee.web.msm.Statistics.StatsType.RELEASE_LOCK;
import java.util.Arrays;
import java.util.Map;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import javax.annotation.CheckForNull;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import net.spy.memcached.MemcachedClient;
import org.apache.juli.logging.Log;
import org.apache.juli.logging.LogFactory;
import de.javakaffee.web.msm.BackupSessionTask.BackupResult;
/**
* Stores the provided session in memcached if the session was modified
* or if the session needs to be relocated (set <code>force</code> to <code>true</code>).
*
* @author <a href="mailto:martin.grotzke@javakaffee.de">Martin Grotzke</a>
*/
public class BackupSessionTask implements Callable<BackupResult> {
private static final Log _log = LogFactory.getLog( BackupSessionTask.class );
private final MemcachedBackupSession _session;
private final boolean _force;
private final TranscoderService _transcoderService;
private final boolean _sessionBackupAsync;
private final int _sessionBackupTimeout;
private final MemcachedClient _memcached;
private final MemcachedNodesManager _memcachedNodesManager;
private final Statistics _statistics;
/**
* @param session
* the session to save
* @param sessionBackupAsync
* @param sessionBackupTimeout
* @param memcached
* @param force
* specifies, if the session needs to be saved by all means, e.g.
* as it has to be relocated to another memcached
* node (the session id had been changed before in this case).
* @param memcachedNodesManager
* @param failoverNodeIds
*/
public BackupSessionTask( final MemcachedBackupSession session,
final boolean sessionIdChanged,
final TranscoderService transcoderService,
final boolean sessionBackupAsync,
final int sessionBackupTimeout,
final MemcachedClient memcached,
final MemcachedNodesManager memcachedNodesManager,
final Statistics statistics ) {
_session = session;
_force = sessionIdChanged;
_transcoderService = transcoderService;
_sessionBackupAsync = sessionBackupAsync;
_sessionBackupTimeout = sessionBackupTimeout;
_memcached = memcached;
_memcachedNodesManager = memcachedNodesManager;
_statistics = statistics;
}
/**
* {@inheritDoc}
*/
@Override
public BackupResult call() throws Exception {
if ( _log.isDebugEnabled() ) {
_log.debug( "Starting for session id " + _session.getId() );
}
_session.setBackupRunning( true );
try {
final long startBackup = System.currentTimeMillis();
final Map<String, Object> attributes = _session.getAttributesFiltered();
final byte[] attributesData = serializeAttributes( _session, attributes );
final int hashCode = Arrays.hashCode( attributesData );
final BackupResult result;
if ( _session.getDataHashCode() != hashCode
|| _force
|| _session.authenticationChanged() ) {
_session.setLastBackupTime( System.currentTimeMillis() );
final byte[] data = _transcoderService.serialize( _session, attributesData );
result = doBackupSession( _session, data, attributesData );
if ( result.isSuccess() ) {
_session.setDataHashCode( hashCode );
}
} else {
result = new BackupResult( BackupResultStatus.SKIPPED );
}
switch ( result.getStatus() ) {
case FAILURE:
_statistics.requestWithBackupFailure();
_session.backupFailed();
break;
case SKIPPED:
_statistics.requestWithoutSessionModification();
_session.storeThisAccessedTimeFromLastBackupCheck();
break;
case SUCCESS:
_statistics.registerSince( BACKUP, startBackup );
_session.storeThisAccessedTimeFromLastBackupCheck();
_session.backupFinished();
break;
}
if ( _log.isDebugEnabled() ) {
_log.debug( "Finished for session id " + _session.getId() +
", returning status " + result.getStatus() );
}
return result;
} catch (Exception e) {
_log.warn("FAILED for session id " + _session.getId(), e);
throw e;
}
finally {
_session.setBackupRunning( false );
releaseLock();
}
}
private void releaseLock() {
if ( _session.isLocked() ) {
try {
if ( _log.isDebugEnabled() ) {
_log.debug( "Releasing lock for session " + _session.getIdInternal() );
}
final long start = System.currentTimeMillis();
_memcached.delete( _memcachedNodesManager.getSessionIdFormat().createLockName( _session.getIdInternal() ) ).get();
_statistics.registerSince( RELEASE_LOCK, start );
_session.releaseLock();
} catch( final Exception e ) {
_log.warn( "Caught exception when trying to release lock for session " + _session.getIdInternal(), e );
}
}
}
private byte[] serializeAttributes( final MemcachedBackupSession session, final Map<String, Object> attributes ) {
final long start = System.currentTimeMillis();
final byte[] attributesData = _transcoderService.serializeAttributes( session, attributes );
_statistics.registerSince( ATTRIBUTES_SERIALIZATION, start );
return attributesData;
}
/**
* Store the provided session in memcached.
* @param session the session to backup
* @param data the serialized session data (session fields and session attributes).
* @param attributesData just the serialized session attributes.
*
* @return the {@link BackupResultStatus}
*/
BackupResult doBackupSession( final MemcachedBackupSession session, final byte[] data, final byte[] attributesData ) throws InterruptedException {
if ( _log.isDebugEnabled() ) {
_log.debug( "Trying to store session in memcached: " + session.getId() );
}
try {
storeSessionInMemcached( session, data );
return new BackupResult( BackupResultStatus.SUCCESS, data, attributesData );
} catch (final ExecutionException e) {
handleException(session, e);
return new BackupResult(BackupResultStatus.FAILURE, data, null);
} catch (final TimeoutException e) {
handleException(session, e);
return new BackupResult(BackupResultStatus.FAILURE, data, null);
}
}
private void handleException(final MemcachedBackupSession session, final Exception e) {
//if ( _log.isWarnEnabled() ) {
String msg = "Could not store session " + session.getId() + " in memcached.";
if ( _force ) {
msg += "\nNote that this session was relocated to this node because the" +
" original node was not available.";
}
_log.warn(msg, e);
//}
_memcachedNodesManager.setNodeAvailableForSessionId(session.getId(), false);
}
private void storeSessionInMemcached( final MemcachedBackupSession session, final byte[] data) throws InterruptedException, ExecutionException, TimeoutException {
/* calculate the expiration time (instead of using just maxInactiveInterval), as
* this is relevant for the update of the expiration time: if we would just use
* maxInactiveInterval, the session would exist longer in memcached than it would
* be valid in tomcat
*/
final int expirationTime = session.getMemcachedExpirationTimeToSet();
final long start = System.currentTimeMillis();
try {
final Future<Boolean> future = _memcached.set(
_memcachedNodesManager.getStorageKeyFormat().format(session.getId()),
toMemcachedExpiration(expirationTime), data );
if ( !_sessionBackupAsync ) {
future.get( _sessionBackupTimeout, TimeUnit.MILLISECONDS );
session.setLastMemcachedExpirationTime( expirationTime );
session.setLastBackupTime( System.currentTimeMillis() );
}
else {
/* in async mode, we asume the session was stored successfully
*/
session.setLastMemcachedExpirationTime( expirationTime );
session.setLastBackupTime( System.currentTimeMillis() );
}
} finally {
_statistics.registerSince( MEMCACHED_UPDATE, start );
}
}
static final class BackupResult {
public static final BackupResult SKIPPED = new BackupResult( BackupResultStatus.SKIPPED );
public static final BackupResult FAILURE = new BackupResult( BackupResultStatus.FAILURE );
private final BackupResultStatus _status;
private final byte[] _data;
private final byte[] _attributesData;
public BackupResult( @Nonnull final BackupResultStatus status ) {
this( status, null, null );
}
public BackupResult( @Nonnull final BackupResultStatus status, @Nullable final byte[] data, @Nullable final byte[] attributesData ) {
_status = status;
_data = data;
_attributesData = attributesData;
}
/**
* The status/result of the backup operation.
* @return the status
*/
@Nonnull
BackupResultStatus getStatus() {
return _status;
}
/**
* The serialized session data (session fields and session attributes).
* This can be <code>null</code> (if {@link #getStatus()} is {@link BackupResultStatus#SKIPPED}).
*
* @return the session data
*/
@CheckForNull
byte[] getData() {
return _data;
}
/**
* The serialized attributes that were actually stored in memcached with the
* full serialized session data. This can be <code>null</code>, e.g. if
* {@link #getStatus()} is {@link BackupResultStatus#FAILURE} or {@link BackupResultStatus#SKIPPED}.
*
* @return the attributesData
*/
@CheckForNull
byte[] getAttributesData() {
return _attributesData;
}
/**
* Specifies if the backup was performed successfully.
*
* @return <code>true</code> if the status is {@link BackupResultStatus#SUCCESS},
* otherwise <code>false</code>.
*/
public boolean isSuccess() {
return _status == BackupResultStatus.SUCCESS;
}
@Override
public String toString() {
return "BackupResult [_status=" + _status + ", _data="
+ (_data != null ? "byte[" + _data.length + "]" : "null") + ", _attributesData="
+ (_attributesData != null ? "byte[" + _attributesData.length + "]" : "null") + "]";
}
}
}
| |
/*
* (C) Copyright 2016 Hewlett Packard Enterprise Development LP
*
* Licensed under the Apache License, Version 2.0 (the "License");
* You may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hp.ov.sdk.dto.servers.serverhardwaretype;
import java.util.List;
import org.apache.commons.lang3.builder.EqualsBuilder;
import org.apache.commons.lang3.builder.HashCodeBuilder;
import org.apache.commons.lang3.builder.ToStringBuilder;
import com.google.gson.annotations.Since;
import com.google.gson.annotations.Until;
import com.hp.ov.sdk.dto.BaseModelResource;
public class ServerHardwareType extends BaseModelResource {
private static final long serialVersionUID = 1L;
private List<Adapter> adapters;
private List<BiosSettings> biosSettings;
private List<String> bootCapabilities;
private List<BootMode> bootModes;
private List<String> capabilities;
@Since(300)
private String family;
private String formFactor;
@Until(199)
private String id;
private String model;
private List<PxeBootPolicy> pxeBootPolicies;
/**
* This field has a special treatment when deserialization occurs. Since the
* {@link ServerHardwareType} is never serialized, there is no need to deal
* with the serialization.
*
* @see com.hp.ov.sdk.adaptors.StorageCapabilitiesDeserializer
*/
private StorageCapabilities storageCapabilities;
/**
* @return the adapters
*/
public List<Adapter> getAdapters() {
return adapters;
}
/**
* @param adapters the adapters to set
*/
public void setAdapters(List<Adapter> adapters) {
this.adapters = adapters;
}
/**
* @return the biosSettings
*/
public List<BiosSettings> getBiosSettings() {
return biosSettings;
}
/**
* @param biosSettings the biosSettings to set
*/
public void setBiosSettings(List<BiosSettings> biosSettings) {
this.biosSettings = biosSettings;
}
/**
* @return the bootCapabilities
*/
public List<String> getBootCapabilities() {
return bootCapabilities;
}
/**
* @param bootCapabilities the bootCapabilities to set
*/
public void setBootCapabilities(List<String> bootCapabilities) {
this.bootCapabilities = bootCapabilities;
}
/**
* @return the bootModes
*/
public List<BootMode> getBootModes() {
return bootModes;
}
/**
* @param bootModes the bootModes to set
*/
public void setBootModes(List<BootMode> bootModes) {
this.bootModes = bootModes;
}
/**
* @return the capabilities
*/
public List<String> getCapabilities() {
return capabilities;
}
/**
* @param capabilities the capabilities to set
*/
public void setCapabilities(List<String> capabilities) {
this.capabilities = capabilities;
}
/**
* @return the family
*/
public String getFamily() {
return family;
}
/**
* @param family the family to set
*/
public void setFamily(String family) {
this.family = family;
}
/**
* @return the formFactor
*/
public String getFormFactor() {
return formFactor;
}
/**
* @param formFactor the formFactor to set
*/
public void setFormFactor(String formFactor) {
this.formFactor = formFactor;
}
/**
* @return the id
*/
public String getId() {
return id;
}
/**
* @param id the id to set
*/
public void setId(String id) {
this.id = id;
}
/**
* @return the model
*/
public String getModel() {
return model;
}
/**
* @param model the model to set
*/
public void setModel(String model) {
this.model = model;
}
/**
* @return the pxeBootPolicies
*/
public List<PxeBootPolicy> getPxeBootPolicies() {
return pxeBootPolicies;
}
/**
* @param pxeBootPolicies the pxeBootPolicies to set
*/
public void setPxeBootPolicies(List<PxeBootPolicy> pxeBootPolicies) {
this.pxeBootPolicies = pxeBootPolicies;
}
/**
* @return the storageCapabilities
*/
public StorageCapabilities getStorageCapabilities() {
return storageCapabilities;
}
/**
* @param storageCapabilities the storageCapabilities to set
*/
public void setStorageCapabilities(StorageCapabilities storageCapabilities) {
this.storageCapabilities = storageCapabilities;
}
@Override
public final boolean canEqual(Object obj) {
return (obj instanceof ServerHardwareType);
}
@Override
public String toString() {
return ToStringBuilder.reflectionToString(this);
}
@Override
public final boolean equals(Object obj) {
if (this == obj) return true;
if (obj instanceof ServerHardwareType) {
ServerHardwareType that = (ServerHardwareType) obj;
return that.canEqual(this) && new EqualsBuilder()
.appendSuper(super.equals(obj))
.append(adapters, that.adapters)
.append(biosSettings, that.biosSettings)
.append(bootCapabilities, that.bootCapabilities)
.append(bootModes, that.bootModes)
.append(capabilities, that.capabilities)
.append(family, that.family)
.append(formFactor, that.formFactor)
.append(id, that.id)
.append(model, that.model)
.append(pxeBootPolicies, that.pxeBootPolicies)
.append(storageCapabilities, that.storageCapabilities)
.isEquals();
}
return false;
}
@Override
public final int hashCode() {
return new HashCodeBuilder()
.appendSuper(super.hashCode())
.append(adapters)
.append(biosSettings)
.append(bootCapabilities)
.append(bootModes)
.append(capabilities)
.append(family)
.append(formFactor)
.append(id)
.append(model)
.append(pxeBootPolicies)
.append(storageCapabilities)
.toHashCode();
}
}
| |
package apincer.android.uamp.utils;
import android.annotation.SuppressLint;
import android.annotation.TargetApi;
import android.app.Activity;
import android.content.Context;
import android.content.pm.ApplicationInfo;
import android.content.pm.PackageManager;
import android.content.res.Resources;
import android.content.res.TypedArray;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Outline;
import android.graphics.Paint;
import android.graphics.PorterDuff;
import android.graphics.PorterDuffColorFilter;
import android.graphics.PorterDuffXfermode;
import android.graphics.Rect;
import android.graphics.RectF;
import android.graphics.Typeface;
import android.graphics.drawable.Drawable;
import android.graphics.drawable.GradientDrawable;
import android.os.Build;
import android.os.IBinder;
import android.support.annotation.ColorInt;
import android.support.annotation.DrawableRes;
import android.support.annotation.NonNull;
import android.support.annotation.Nullable;
import android.support.v4.graphics.drawable.DrawableCompat;
import android.support.v7.graphics.Palette;
import android.support.v7.view.menu.MenuPopupHelper;
import android.support.v7.widget.PopupMenu;
import android.support.v7.widget.Toolbar;
import android.text.Spannable;
import android.text.style.BackgroundColorSpan;
import android.text.style.ForegroundColorSpan;
import android.text.style.StyleSpan;
import android.util.DisplayMetrics;
import android.util.Log;
import android.util.TypedValue;
import android.view.Display;
import android.view.MenuItem;
import android.view.View;
import android.view.ViewOutlineProvider;
import android.view.WindowManager;
import android.view.inputmethod.InputMethodManager;
import android.widget.EditText;
import android.widget.ImageButton;
import android.widget.ImageView;
import android.widget.SearchView;
import android.widget.TextView;
import java.lang.reflect.Field;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.util.Locale;
import apincer.android.uamp.R;
/**
* Created by e1022387 on 6/4/2017.
*/
public class UIUtils {
public static final int INVALID_COLOR = -1;
public static int colorAccent = INVALID_COLOR;
public static int getStatusBarHeight(Context context) {
int result = 0;
int resourceId = context.getResources().getIdentifier("status_bar_height", "dimen", "android");
if (resourceId > 0) {
result = context.getResources().getDimensionPixelSize(resourceId);
}
return result;
}
/**
* Convert a dp float value to pixels
*
* @param dp float value in dps to convert
* @return DP value converted to pixels
*/
public static int dp2px(Context context, float dp) {
float px = TypedValue.applyDimension(TypedValue.COMPLEX_UNIT_DIP, dp, context.getResources().getDisplayMetrics());
return Math.round(px);
}
public static void setHeight(View view, int itemHeight, int itemCount, int maxHeight) {
if((itemHeight*itemCount) > maxHeight) {
view.getLayoutParams().height = maxHeight;
}else {
view.getLayoutParams().height = itemHeight*itemCount;
}
}
@SuppressLint("RestrictedApi")
public static void makePopForceShowIcon(PopupMenu popupMenu) {
try {
Field mFieldPopup=popupMenu.getClass().getDeclaredField("mPopup");
mFieldPopup.setAccessible(true);
MenuPopupHelper mPopup = (MenuPopupHelper) mFieldPopup.get(popupMenu);
mPopup.setForceShowIcon(true);
} catch (Exception e) {
}
}
public static boolean colorizeToolbarOverflowButton(@NonNull Toolbar toolbar, @ColorInt int toolbarIconsColor) {
final Drawable overflowIcon = toolbar.getOverflowIcon();
if (overflowIcon == null)
return false;
toolbar.setOverflowIcon(getTintedDrawable(overflowIcon, toolbarIconsColor));
return true;
}
public static Drawable getTintedDrawable(@NonNull Drawable inputDrawable, @ColorInt int color) {
Drawable wrapDrawable = DrawableCompat.wrap(inputDrawable);
DrawableCompat.setTint(wrapDrawable, color);
DrawableCompat.setTintMode(wrapDrawable, PorterDuff.Mode.SRC_IN);
return wrapDrawable;
}
public static void getTintedDrawable(@NonNull ImageButton btn, @ColorInt int color) {
Drawable wrapDrawable = DrawableCompat.wrap(btn.getDrawable());
DrawableCompat.setTint(wrapDrawable, color);
DrawableCompat.setTintMode(wrapDrawable, PorterDuff.Mode.SRC_IN);
}
public static void getTintedDrawable(@NonNull SearchView searchView, @ColorInt int color) {
try {
Field searchField = SearchView.class
.getDeclaredField("mSearchButton");
searchField.setAccessible(true);
ImageView searchBtn = (ImageView) searchField.get(searchView);
Drawable wrapDrawable = DrawableCompat.wrap(searchBtn.getDrawable());
DrawableCompat.setTint(wrapDrawable, color);
DrawableCompat.setTintMode(wrapDrawable, PorterDuff.Mode.SRC_IN);
} catch (NoSuchFieldException e) {
} catch (IllegalAccessException e) {
}
}
public static void setSearchViewTextColor(@NonNull SearchView searchView, @ColorInt int color, @ColorInt int hintColor) {
try {
Field searchField = SearchView.class
.getDeclaredField("mSearchSrcTextView");
searchField.setAccessible(true);
TextView searchBtn = (TextView) searchField.get(searchView);
searchBtn.setTextColor(color);
searchBtn.setHintTextColor(hintColor);
} catch (NoSuchFieldException e) {
} catch (IllegalAccessException e) {
}
}
public static Drawable getTintedDrawable(@NonNull Context context, @DrawableRes int drawableId, @ColorInt int color) {
Drawable inputDrawable = context.getDrawable(drawableId);
Drawable wrapDrawable = DrawableCompat.wrap(inputDrawable);
DrawableCompat.setTint(wrapDrawable, color);
DrawableCompat.setTintMode(wrapDrawable, PorterDuff.Mode.SRC_IN);
return wrapDrawable;
}
/**
* Get a color value from a theme attribute.
* @param context used for getting the color.
* @param attribute theme attribute.
* @param defaultColor default to use.
* @return color value
*/
public static int getThemeColor(Context context, int attribute, int defaultColor) {
int themeColor = 0;
String packageName = context.getPackageName();
try {
Context packageContext = context.createPackageContext(packageName, 0);
ApplicationInfo applicationInfo =
context.getPackageManager().getApplicationInfo(packageName, 0);
packageContext.setTheme(applicationInfo.theme);
Resources.Theme theme = packageContext.getTheme();
TypedArray ta = theme.obtainStyledAttributes(new int[] {attribute});
themeColor = ta.getColor(0, defaultColor);
ta.recycle();
} catch (PackageManager.NameNotFoundException e) {
e.printStackTrace();
}
return themeColor;
}
public static Bitmap tintImage(Bitmap bitmap, int color) {
Paint paint = new Paint();
paint.setColorFilter(new PorterDuffColorFilter(color, PorterDuff.Mode.SRC_IN));
Bitmap bitmapResult = Bitmap.createBitmap(bitmap.getWidth(), bitmap.getHeight(), Bitmap.Config.ARGB_8888);
Canvas canvas = new Canvas(bitmapResult);
canvas.drawBitmap(bitmap, 0, 0, paint);
return bitmapResult;
}
public static void setColorFilter(MenuItem item, int color) {
Drawable drawable = item.getIcon();
if(drawable != null) {
drawable.mutate();
drawable.setColorFilter(color, PorterDuff.Mode.SRC_ATOP);
}
}
public static DisplayMetrics getDisplayMetrics(Context context) {
return context.getResources().getDisplayMetrics();
}
public static float dpToPx(Context context, float dp) {
return Math.round(dp * getDisplayMetrics(context).density);
}
public static int fetchAccentColor(Context context, @ColorInt int defColor) {
if (colorAccent == INVALID_COLOR) {
int attr = android.R.attr.colorAccent;
TypedArray androidAttr = context.getTheme().obtainStyledAttributes(new int[]{attr});
colorAccent = androidAttr.getColor(0, defColor);
androidAttr.recycle();
}
return colorAccent;
}
public static GradientDrawable createGradient(int backgroundColor) {
GradientDrawable shape = new GradientDrawable();
shape.setShape(GradientDrawable.RECTANGLE);
shape.setCornerRadii(new float[]{8, 8, 8, 8, 0, 0, 0, 0});
shape.setColor(backgroundColor);
shape.setStroke(3, Color.parseColor("#d4e2ff"));
return shape;
}
public static void highlightSearchKeyword(@NonNull TextView textView,
@Nullable String originalText, @Nullable String constraint) {
int color = textView.getContext().getColor(R.color.search_bar_color_end);
originalText = StringUtils.trimToEmpty(originalText);
constraint = StringUtils.trimToEmpty(constraint);
int i = originalText.toLowerCase(Locale.getDefault()).indexOf(constraint.toLowerCase(Locale.getDefault()));
if ((!StringUtils.isEmpty(originalText)) && (i != -1)) {
Spannable spanText = Spannable.Factory.getInstance().newSpannable(originalText);
if (i != -1 && !StringUtils.isEmpty(constraint)) {
spanText.setSpan(new BackgroundColorSpan(color), i, i + constraint.length(), Spannable.SPAN_EXCLUSIVE_EXCLUSIVE);
spanText.setSpan(new ForegroundColorSpan(Color.BLACK), i, i + constraint.length(), Spannable.SPAN_EXCLUSIVE_EXCLUSIVE);
}
textView.setText(spanText, TextView.BufferType.SPANNABLE);
} else {
textView.setText(originalText, TextView.BufferType.NORMAL);
}
}
public static void highlightSearchKeywordOnTitle(@NonNull TextView textView,
@Nullable String originalText, @Nullable String constraint) {
int color = textView.getContext().getColor(R.color.search_bar_color_start);
originalText = StringUtils.trimToEmpty(originalText);
constraint = StringUtils.trimToEmpty(constraint);
int i = originalText.toLowerCase(Locale.getDefault()).indexOf(constraint.toLowerCase(Locale.getDefault()));
if ((!StringUtils.isEmpty(originalText)) && (i != -1)) {
Spannable spanText = Spannable.Factory.getInstance().newSpannable(originalText);
if (i != -1 && !StringUtils.isEmpty(constraint)) {
spanText.setSpan(new ForegroundColorSpan(color), i, i + constraint.length(), Spannable.SPAN_EXCLUSIVE_EXCLUSIVE);
}
textView.setText(spanText, TextView.BufferType.SPANNABLE);
} else {
textView.setText(originalText, TextView.BufferType.NORMAL);
}
}
public static void highlightText(@NonNull TextView textView, @Nullable String originalText,
@Nullable String constraint, String constraint2, @ColorInt int color) {
originalText = StringUtils.trimToEmpty(originalText);
constraint = StringUtils.trimToEmpty(constraint);
constraint2 = StringUtils.trimToEmpty(constraint2);
int i = originalText.toLowerCase(Locale.getDefault()).indexOf(constraint.toLowerCase(Locale.getDefault()));
int ii = originalText.toLowerCase(Locale.getDefault()).indexOf(constraint2.toLowerCase(Locale.getDefault()));
if ((!StringUtils.isEmpty(originalText)) && (i != -1 || ii != -1)) {
Spannable spanText = Spannable.Factory.getInstance().newSpannable(originalText);
if(i != -1 && !StringUtils.isEmpty(constraint)) {
spanText.setSpan(new ForegroundColorSpan(color), i, i + constraint.length(), Spannable.SPAN_EXCLUSIVE_EXCLUSIVE);
spanText.setSpan(new StyleSpan(Typeface.BOLD), i, i + constraint.length(), Spannable.SPAN_EXCLUSIVE_EXCLUSIVE);
}
if(ii != -1 && !StringUtils.isEmpty(constraint2)) {
spanText.setSpan(new ForegroundColorSpan(color), ii, ii + constraint2.length(), Spannable.SPAN_EXCLUSIVE_EXCLUSIVE);
spanText.setSpan(new StyleSpan(Typeface.BOLD), ii, ii + constraint2.length(), Spannable.SPAN_EXCLUSIVE_EXCLUSIVE);
}
textView.setText(spanText, TextView.BufferType.SPANNABLE);
} else {
textView.setText(originalText, TextView.BufferType.NORMAL);
}
}
public static boolean isColorDark(int color) {
double darkness = 1-(0.299*Color.red(color) +
0.587*Color.green(color)+
0.144*Color.blue(color))/255;
if(darkness<0.5) {
return false; // light color
}
return true; // dark color
}
public static int lighten(int color, double fraction) {
int red = Color.red(color);
int green = Color.green(color);
int blue = Color.blue(color);
red = lightenColor(red, fraction);
green = lightenColor(green, fraction);
blue = lightenColor(blue, fraction);
int alpha = Color.alpha(color);
return Color.argb(alpha, red, green, blue);
}
public static int darken(int color, double fraction) {
int red = Color.red(color);
int green = Color.green(color);
int blue = Color.blue(color);
red = darkenColor(red, fraction);
green = darkenColor(green, fraction);
blue = darkenColor(blue, fraction);
int alpha = Color.alpha(color);
return Color.argb(alpha, red, green, blue);
}
private static int darkenColor(int color, double fraction) {
return (int)Math.max(color - (color * fraction), 0);
}
private static int lightenColor(int color, double fraction) {
return (int) Math.min(color + (color * fraction), 255);
}
public static void hideKeyboard(Context context, View view) {
InputMethodManager inputManager = (InputMethodManager) context.getSystemService(Context.INPUT_METHOD_SERVICE);
if(inputManager !=null && view.getRootView().getWindowToken()!=null) {
inputManager.hideSoftInputFromWindow(view.getRootView().getWindowToken(),InputMethodManager.HIDE_NOT_ALWAYS);
}
}
public static void showKeyboard(Context context, View view) {
try {
view.requestFocus();
InputMethodManager keyboard = (InputMethodManager) context.getSystemService(Context.INPUT_METHOD_SERVICE);
keyboard.showSoftInput(view, InputMethodManager.SHOW_IMPLICIT);
} catch (Exception e) {
e.printStackTrace();
}
}
/**
* A helper class for providing a shadow on sheets
*/
@TargetApi(21)
public static class ShadowOutline extends ViewOutlineProvider {
int width;
int height;
public ShadowOutline(int width, int height) {
this.width = width;
this.height = height;
}
@Override
public void getOutline(View view, Outline outline) {
outline.setRect(0, 0, width, height);
}
}
public static Drawable buildGradientBackground(int bgColor) {
try {
// convert to HSV to lighten and darken
int alpha = Color.alpha(bgColor);
float[] hsv = new float[3];
Color.colorToHSV(bgColor, hsv);
hsv[2] -= .6;
int darker = Color.HSVToColor(alpha, hsv);
hsv[2] += .8;
int lighter = Color.HSVToColor(alpha, hsv);
// create gradient using lighter and darker colors
GradientDrawable gd = new GradientDrawable(
GradientDrawable.Orientation.BOTTOM_TOP,new int[] { darker, lighter});
gd.setGradientType(GradientDrawable.SWEEP_GRADIENT);
return gd;
} catch (Exception e) {
e.printStackTrace();
return null;
}
}
public static Bitmap buildGradientBitmap( Context context, int bgColor, int width, int height,int topRight, int topLeft,int bottomLeft, int bottomRight) {
try {
// convert to HSV to lighten and darken
int alpha = Color.alpha(bgColor);
float[] hsv = new float[3];
Color.colorToHSV(bgColor, hsv);
hsv[2] -= .1;
int darker = Color.HSVToColor(alpha, hsv);
hsv[2] += .3;
int lighter = Color.HSVToColor(alpha, hsv);
// create gradient using lighter and darker colors
GradientDrawable gd = new GradientDrawable(
GradientDrawable.Orientation.LEFT_RIGHT,new int[] { darker, lighter});
gd.setGradientType(GradientDrawable.SWEEP_GRADIENT);
// set corner size
// top-left, top-right, bottom-right, bottom-left
//gd.setCornerRadii(new float[] {4,4,4,4,8,8,8,8});
gd.setCornerRadii(new float[] {topLeft,topLeft,topRight,topRight,bottomRight,bottomRight,bottomLeft,bottomLeft});
// get density to scale bitmap for device
float dp = context.getResources().getDisplayMetrics().density;
// create bitmap based on width and height of widget
Bitmap bitmap = Bitmap.createBitmap(Math.round(width * dp), Math.round(height * dp),
Bitmap.Config.ARGB_8888);
Canvas canvas = new Canvas(bitmap);
gd.setBounds(0, 0, canvas.getWidth(), canvas.getHeight());
gd.draw(canvas);
return bitmap;
} catch (Exception e) {
e.printStackTrace();
return null;
}
}
public static Bitmap buildGradientBitmap( Context context, Bitmap source, int width, int height, int border, int corner) {
try {
Palette palette = Palette.from(source).generate();
int bgColor = context.getColor(R.color.grey200);
// bgColor = palette.getDominantColor(bgColor);
bgColor = palette.getMutedColor(bgColor);
// convert to HSV to lighten and darken
int alpha = Color.alpha(bgColor);
float[] hsv = new float[3];
Color.colorToHSV(bgColor, hsv);
hsv[2] -= .1;
int darker = Color.HSVToColor(alpha, hsv);
hsv[2] += .3;
int lighter = Color.HSVToColor(alpha, hsv);
// create gradient using lighter and darker colors
GradientDrawable gd = new GradientDrawable(
GradientDrawable.Orientation.LEFT_RIGHT,new int[] { darker, lighter});
gd.setGradientType(GradientDrawable.SWEEP_GRADIENT);
// set corner size
// top-left, top-right, bottom-right, bottom-left
//gd.setCornerRadii(new float[] {4,4,4,4,8,8,8,8});
gd.setCornerRadii(new float[] {corner,corner,corner,corner,corner,corner,corner,corner});
// get density to scale bitmap for device
float dp = context.getResources().getDisplayMetrics().density;
// create bitmap based on width and height of widget
Bitmap bitmap = Bitmap.createBitmap(Math.round(width), Math.round(height),
Bitmap.Config.ARGB_8888);
Canvas canvas = new Canvas(bitmap);
Paint paint = new Paint(Paint.FILTER_BITMAP_FLAG);
canvas.drawBitmap(source, border, border, paint);
gd.setBounds(0, 0, canvas.getWidth(), canvas.getHeight());
gd.draw(canvas);
return bitmap;
} catch (Exception e) {
e.printStackTrace();
return null;
}
}
public static Bitmap addBorderToBitmap(Bitmap srcBitmap, int borderWidth, int borderColor){
// Initialize a new Bitmap to make it bordered bitmap
Bitmap dstBitmap = Bitmap.createBitmap(
srcBitmap.getWidth() + borderWidth*2, // Width
srcBitmap.getHeight() + borderWidth*2, // Height
Bitmap.Config.ARGB_8888 // Config
);
/*
Canvas
The Canvas class holds the "draw" calls. To draw something, you need 4 basic
components: A Bitmap to hold the pixels, a Canvas to host the draw calls (writing
into the bitmap), a drawing primitive (e.g. Rect, Path, text, Bitmap), and a paint
(to describe the colors and styles for the drawing).
*/
// Initialize a new Canvas instance
Canvas canvas = new Canvas(dstBitmap);
// Initialize a new Paint instance to draw border
Paint paint = new Paint();
paint.setColor(borderColor);
paint.setStyle(Paint.Style.STROKE);
paint.setStrokeWidth(borderWidth);
paint.setAntiAlias(true);
/*
Rect
Rect holds four integer coordinates for a rectangle. The rectangle is represented by
the coordinates of its 4 edges (left, top, right bottom). These fields can be accessed
directly. Use width() and height() to retrieve the rectangle's width and height.
Note: most methods do not check to see that the coordinates are sorted correctly
(i.e. left <= right and top <= bottom).
*/
/*
Rect(int left, int top, int right, int bottom)
Create a new rectangle with the specified coordinates.
*/
// Initialize a new Rect instance
/*
We set left = border width /2, because android draw border in a shape
by covering both inner and outer side.
By padding half border size, we included full border inside the canvas.
*/
Rect rect = new Rect(
borderWidth / 2,
borderWidth / 2,
canvas.getWidth() - borderWidth / 2,
canvas.getHeight() - borderWidth / 2
);
/*
public void drawRect (Rect r, Paint paint)
Draw the specified Rect using the specified Paint. The rectangle will be filled
or framed based on the Style in the paint.
Parameters
r : The rectangle to be drawn.
paint : The paint used to draw the rectangle
*/
// Draw a rectangle as a border/shadow on canvas
// canvas.drawRect(rect,paint);
canvas.drawCircle(borderWidth / 2, borderWidth / 2, canvas.getWidth(), paint);
canvas.drawColor(borderColor);
/*
public void drawBitmap (Bitmap bitmap, float left, float top, Paint paint)
Draw the specified bitmap, with its top/left corner at (x,y), using the specified
paint, transformed by the current matrix.
Note: if the paint contains a maskfilter that generates a mask which extends beyond
the bitmap's original width/height (e.g. BlurMaskFilter), then the bitmap will be
drawn as if it were in a Shader with CLAMP mode. Thus the color outside of the
original width/height will be the edge color replicated.
If the bitmap and canvas have different densities, this function will take care of
automatically scaling the bitmap to draw at the same density as the canvas.
Parameters
bitmap : The bitmap to be drawn
left : The position of the left side of the bitmap being drawn
top : The position of the top side of the bitmap being drawn
paint : The paint used to draw the bitmap (may be null)
*/
// Draw source bitmap to canvas
canvas.drawBitmap(srcBitmap, borderWidth, borderWidth, null);
// Return the bordered circular bitmap
return dstBitmap;
}
public static Bitmap getRoundedCornerBitmap(Bitmap bitmap) {
Bitmap output = Bitmap.createBitmap(bitmap.getWidth(),
bitmap.getHeight(), Bitmap.Config.ARGB_8888);
Canvas canvas = new Canvas(output);
final int color = 0xff424242;
final Paint paint = new Paint();
final Rect rect = new Rect(0, 0, bitmap.getWidth(), bitmap.getHeight());
final RectF rectF = new RectF(rect);
final float roundPx = 12;
paint.setAntiAlias(true);
canvas.drawARGB(0, 0, 0, 0);
paint.setColor(color);
canvas.drawRoundRect(rectF, roundPx, roundPx, paint);
paint.setXfermode(new PorterDuffXfermode(PorterDuff.Mode.SRC_IN));
canvas.drawBitmap(bitmap, rect, rect, paint);
return output;
}
/**
* Returns {@code null} if this couldn't be determined.
*/
@TargetApi(Build.VERSION_CODES.ICE_CREAM_SANDWICH)
@SuppressLint("PrivateApi")
public static Boolean hasNavigationBar() {
try {
Class<?> serviceManager = Class.forName("android.os.ServiceManager");
IBinder serviceBinder = (IBinder)serviceManager.getMethod("getService", String.class).invoke(serviceManager, "window");
Class<?> stub = Class.forName("android.view.IWindowManager$Stub");
Object windowManagerService = stub.getMethod("asInterface", IBinder.class).invoke(stub, serviceBinder);
Method hasNavigationBar = windowManagerService.getClass().getMethod("hasNavigationBar");
return (boolean)hasNavigationBar.invoke(windowManagerService);
} catch (ClassNotFoundException | ClassCastException | NoSuchMethodException | SecurityException | IllegalAccessException | IllegalArgumentException | InvocationTargetException e) {
Log.w("YOUR_TAG_HERE", "Couldn't determine whether the device has a navigation bar", e);
return null;
}
}
public static boolean hasSoftKeys(WindowManager windowManager){
Display d = windowManager.getDefaultDisplay();
DisplayMetrics realDisplayMetrics = new DisplayMetrics();
d.getRealMetrics(realDisplayMetrics);
int realHeight = realDisplayMetrics.heightPixels;
int realWidth = realDisplayMetrics.widthPixels;
DisplayMetrics displayMetrics = new DisplayMetrics();
d.getMetrics(displayMetrics);
int displayHeight = displayMetrics.heightPixels;
int displayWidth = displayMetrics.widthPixels;
return (realWidth - displayWidth) > 0 || (realHeight - displayHeight) > 0;
}
}
| |
/*
* Copyright 2014-2017 Red Hat, Inc. and/or its affiliates
* and other contributors as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.hawkular.inventory.handlers;
import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import java.io.File;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.ws.rs.client.Client;
import javax.ws.rs.client.ClientBuilder;
import javax.ws.rs.client.Entity;
import javax.ws.rs.client.WebTarget;
import javax.ws.rs.core.GenericType;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import org.hawkular.inventory.Resources;
import org.hawkular.inventory.api.model.Inventory;
import org.hawkular.inventory.api.model.RawResource;
import org.hawkular.inventory.api.model.Resource;
import org.hawkular.inventory.api.model.ResourceNode;
import org.hawkular.inventory.api.model.ResourceType;
import org.hawkular.inventory.api.model.ResultSet;
import org.jboss.arquillian.container.test.api.RunAsClient;
import org.jboss.arquillian.junit.Arquillian;
import org.junit.FixMethodOrder;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.MethodSorters;
/**
* @author Jay Shaughnessy
* @author Lucas Ponce
*/
@RunWith(Arquillian.class)
@FixMethodOrder(MethodSorters.NAME_ASCENDING)
public class InventoryRestTest extends AbstractInventoryITest {
@Test
@RunAsClient
public void test0000_statusIsUp() {
Client client = ClientBuilder.newClient();
WebTarget target = client.target(baseUrl.toString()).path("status");
Response response = target
.request(MediaType.APPLICATION_JSON_TYPE)
.get();
assertEquals(200, response.getStatus());
}
@Test
@RunAsClient
public void test0001_clean() {
WebTarget target = ClientBuilder.newClient().target(baseUrl.toString()).path("resources");
Response response = target
.request(MediaType.APPLICATION_JSON_TYPE)
.accept(MediaType.APPLICATION_JSON_TYPE)
.delete();
assertEquals(200, response.getStatus());
target = ClientBuilder.newClient().target(baseUrl.toString()).path("types");
response = target
.request(MediaType.APPLICATION_JSON_TYPE)
.accept(MediaType.APPLICATION_JSON_TYPE)
.delete();
assertEquals(200, response.getStatus());
}
@Test
@RunAsClient
public void test001_importResources() {
Client client = ClientBuilder.newClient();
WebTarget target = client.target(baseUrl.toString()).path("import");
Response response = target
.request(MediaType.APPLICATION_JSON_TYPE)
.post(Entity.entity(Resources.INVENTORY, MediaType.APPLICATION_JSON_TYPE));
assertEquals(200, response.getStatus());
}
@Test
@RunAsClient
public void test002_shouldFindResourcesById() {
Client client = ClientBuilder.newClient();
WebTarget target = client.target(baseUrl.toString()).path("resources/EAP-1");
Response response = target
.request(MediaType.APPLICATION_JSON_TYPE)
.accept(MediaType.APPLICATION_JSON_TYPE)
.get();
assertEquals(200, response.getStatus());
Resource resource = response.readEntity(Resource.class);
assertEquals("EAP-1", resource.getId());
assertEquals("EAP", resource.getType().getId());
target = client.target(baseUrl.toString()).path("resources/EAP-2");
response = target
.request(MediaType.APPLICATION_JSON_TYPE)
.accept(MediaType.APPLICATION_JSON_TYPE)
.get();
assertEquals(200, response.getStatus());
assertEquals("EAP-2", response.readEntity(Resource.class).getId());
target = client.target(baseUrl.toString()).path("resources/child-1");
response = target
.request(MediaType.APPLICATION_JSON_TYPE)
.accept(MediaType.APPLICATION_JSON_TYPE)
.get();
assertEquals(200, response.getStatus());
assertEquals("child-1", response.readEntity(Resource.class).getId());
}
@Test
@RunAsClient
public void test003_shouldNotFindResourcesById() {
Client client = ClientBuilder.newClient();
WebTarget target = client.target(baseUrl.toString()).path("resources/nada");
Response response = target
.request(MediaType.APPLICATION_JSON_TYPE)
.accept(MediaType.APPLICATION_JSON_TYPE)
.get();
assertEquals(404, response.getStatus());
}
@Test
@RunAsClient
public void test004_shouldGetTopResources() {
Client client = ClientBuilder.newClient();
WebTarget target = client.target(baseUrl.toString()).path("resources")
.queryParam("root", true);
Response response = target
.request(MediaType.APPLICATION_JSON_TYPE)
.accept(MediaType.APPLICATION_JSON_TYPE)
.get();
assertEquals(200, response.getStatus());
List<Resource> resources = (List<Resource>) response.readEntity(ResultSet.class).getResults();
assertThat(resources)
.extracting(Resource::getId)
.containsOnly("EAP-1", "EAP-2");
}
@Test
@RunAsClient
public void test005_shouldGetResourceTypes() {
Client client = ClientBuilder.newClient();
WebTarget target = client.target(baseUrl.toString()).path("types");
Response response = target
.request(MediaType.APPLICATION_JSON_TYPE)
.accept(MediaType.APPLICATION_JSON_TYPE)
.get();
assertEquals(200, response.getStatus());
assertThat((List<ResourceType>) response.readEntity(ResultSet.class).getResults())
.extracting(ResourceType::getId)
.containsOnly("EAP", "FOO", "BAR");
}
@Test
@RunAsClient
public void test006_shouldGetAllEAPs() {
Client client = ClientBuilder.newClient();
WebTarget target = client.target(baseUrl.toString()).path("resources")
.queryParam("typeId", "EAP");
Response response = target
.request(MediaType.APPLICATION_JSON_TYPE)
.accept(MediaType.APPLICATION_JSON_TYPE)
.get();
assertEquals(200, response.getStatus());
assertThat((List<Resource>) response.readEntity(ResultSet.class).getResults())
.extracting(Resource::getId)
.containsOnly("EAP-1", "EAP-2");
}
@Test
@RunAsClient
public void test007_shouldGetAllFOOs() {
Client client = ClientBuilder.newClient();
WebTarget target = client.target(baseUrl.toString()).path("resources")
.queryParam("typeId", "FOO");
Response response = target
.request(MediaType.APPLICATION_JSON_TYPE)
.accept(MediaType.APPLICATION_JSON_TYPE)
.get();
assertEquals(200, response.getStatus());
assertThat((List<Resource>) response.readEntity(ResultSet.class).getResults())
.extracting(Resource::getId)
.containsOnly("child-1", "child-3");
}
@Test
@RunAsClient
public void test008_shouldGetNoNada() {
Client client = ClientBuilder.newClient();
WebTarget target = client.target(baseUrl.toString()).path("resources")
.queryParam("typeId", "nada");
Response response = target
.request(MediaType.APPLICATION_JSON_TYPE)
.accept(MediaType.APPLICATION_JSON_TYPE)
.get();
assertEquals(200, response.getStatus());
assertThat(response.readEntity(ResultSet.class).getResults()).isEmpty();
}
@Test
@RunAsClient
public void test009_shouldGetChildren() {
Client client = ClientBuilder.newClient();
WebTarget target = client.target(baseUrl.toString()).path("resources/EAP-1/tree");
Response response = target
.request(MediaType.APPLICATION_JSON_TYPE)
.accept(MediaType.APPLICATION_JSON_TYPE)
.get();
assertEquals(200, response.getStatus());
ResourceNode tree = response.readEntity(ResourceNode.class);
assertThat(tree.getChildren())
.extracting(ResourceNode::getId)
.containsOnly("child-1", "child-2");
}
@Test
@RunAsClient
public void test010_shouldGetEmptyChildren() {
Client client = ClientBuilder.newClient();
WebTarget target = client.target(baseUrl.toString()).path("resources/child-1/tree");
Response response = target
.request(MediaType.APPLICATION_JSON_TYPE)
.accept(MediaType.APPLICATION_JSON_TYPE)
.get();
assertEquals(200, response.getStatus());
ResourceNode tree = response.readEntity(ResourceNode.class);
assertThat(tree.getChildren()).isEmpty();
}
@Test
@RunAsClient
public void test011_shouldNotGetTree() {
Client client = ClientBuilder.newClient();
WebTarget target = client.target(baseUrl.toString()).path("resources/nada/tree");
Response response = target
.request(MediaType.APPLICATION_JSON_TYPE)
.accept(MediaType.APPLICATION_JSON_TYPE)
.get();
assertEquals(404, response.getStatus());
}
@Test
@RunAsClient
public void test012_shouldGetOnlyChildren() {
Client client = ClientBuilder.newClient();
WebTarget target = client.target(baseUrl.toString()).path("resources/EAP-1/children");
Response response = target
.request(MediaType.APPLICATION_JSON_TYPE)
.accept(MediaType.APPLICATION_JSON_TYPE)
.get();
assertEquals(200, response.getStatus());
assertThat((List<Resource>) response.readEntity(ResultSet.class).getResults())
.extracting(Resource::getId)
.containsOnly("child-1", "child-2");
}
@Test
@RunAsClient
public void test015_shouldFailOnDetectedCycle() {
RawResource corruptedParent = new RawResource("CP", "CP", "feedX", "FOO", "CC",
new ArrayList<>(), new HashMap<>(), new HashMap<>());
RawResource corruptedChild = new RawResource("CC", "CC", "feedX", "BAR", "CP",
new ArrayList<>(), new HashMap<>(), new HashMap<>());
Inventory corruptedInventory = new Inventory(Arrays.asList(corruptedParent, corruptedChild), null);
Client client = ClientBuilder.newClient();
WebTarget target = client.target(baseUrl.toString()).path("import");
Response response = target
.request(MediaType.APPLICATION_JSON_TYPE)
.post(Entity.entity(corruptedInventory, MediaType.APPLICATION_JSON_TYPE));
assertEquals(200, response.getStatus());
client.close();
client = ClientBuilder.newClient();
target = client.target(baseUrl.toString()).path("resources/CP/tree");
response = target
.request(MediaType.APPLICATION_JSON_TYPE)
.accept(MediaType.APPLICATION_JSON_TYPE)
.get();
assertEquals(500, response.getStatus());
assertEquals("java.lang.IllegalStateException: Cycle detected in the tree with id CP; aborting operation. The inventory is invalid.", response.readEntity(Map.class).get("errorMsg"));
}
@Test
@RunAsClient
public void test016_shouldGetAgentConfig() {
Client client = ClientBuilder.newClient();
WebTarget target = client.target(baseUrl.toString()).path("get-inventory-config/test");
Response response = target
.request(MediaType.TEXT_PLAIN)
.get();
assertEquals(200, response.getStatus());
assertThat(response.readEntity(new GenericType<String>() {}))
.contains("AGENT CONFIG TEST");
target = client.target(baseUrl.toString()).path("get-jmx-exporter-config/WF10");
response = target
.request(MediaType.TEXT_PLAIN)
.get();
assertEquals(200, response.getStatus());
assertThat(response.readEntity(new GenericType<String>() {}))
.contains("- pattern:");
}
@Test
@RunAsClient
public void test017_shouldNotGetAgentConfig() {
Client client = ClientBuilder.newClient();
WebTarget target = client.target(baseUrl.toString()).path("get-inventory-config/nada");
Response response = target
.request(MediaType.TEXT_PLAIN)
.get();
assertEquals(404, response.getStatus());
}
@Test
@RunAsClient
public void test020_shouldGetAllEAPsPerFeed() {
Client client = ClientBuilder.newClient();
WebTarget target = client.target(baseUrl.toString()).path("resources")
.queryParam("feedId", "feed1")
.queryParam("typeId", "EAP");
Response response = target
.request(MediaType.APPLICATION_JSON_TYPE)
.accept(MediaType.APPLICATION_JSON_TYPE)
.get();
assertEquals(200, response.getStatus());
assertThat((List<Resource>) response.readEntity(ResultSet.class).getResults())
.extracting(Resource::getId)
.containsOnly("EAP-1");
client = ClientBuilder.newClient();
target = client.target(baseUrl.toString()).path("resources")
.queryParam("feedId", "feed2")
.queryParam("typeId", "EAP");
response = target
.request(MediaType.APPLICATION_JSON_TYPE)
.accept(MediaType.APPLICATION_JSON_TYPE)
.get();
assertEquals(200, response.getStatus());
assertThat((List<Resource>) response.readEntity(ResultSet.class).getResults())
.extracting(Resource::getId)
.containsOnly("EAP-2");
}
@Test
@RunAsClient
public void test021_shouldGetExport() {
Client client = ClientBuilder.newClient();
WebTarget target = client.target(baseUrl.toString()).path("export");
Response response = target
.request(MediaType.APPLICATION_JSON)
.get();
assertThat(response.getStatus()).isEqualTo(200);
Inventory imp = response.readEntity(Inventory.class);
assertThat(imp).isNotNull();
assertThat(imp.getResources()).extracting(RawResource::getId).containsOnly("EAP-1", "EAP-2", "child-1",
"child-2", "child-3", "child-4", "CC", "CP");
assertThat(imp.getTypes()).extracting(ResourceType::getId).containsOnly("EAP", "FOO", "BAR");
}
@Test
@RunAsClient
public void test022_shouldGetOneResourceType() {
Client client = ClientBuilder.newClient();
WebTarget target = client.target(baseUrl.toString()).path("types/EAP");
Response response = target
.request(MediaType.APPLICATION_JSON_TYPE)
.accept(MediaType.APPLICATION_JSON_TYPE)
.get();
assertEquals(200, response.getStatus());
ResourceType rt = response.readEntity(ResourceType.class);
assertThat(rt).isNotNull();
assertThat(rt.getId()).isEqualTo("EAP");
}
@Test
@RunAsClient
public void test023_shouldGetParent() {
Client client = ClientBuilder.newClient();
WebTarget target = client.target(baseUrl.toString()).path("resources/child-1/parent");
Response response = target
.request(MediaType.APPLICATION_JSON_TYPE)
.accept(MediaType.APPLICATION_JSON_TYPE)
.get();
assertThat(response.getStatus()).isEqualTo(200);
Resource parent = response.readEntity(Resource.class);
assertThat(parent).isNotNull();
assertThat(parent.getId()).isEqualTo("EAP-1");
}
@Test
@RunAsClient
public void test024_shouldNotGetParentForRoot() {
Client client = ClientBuilder.newClient();
WebTarget target = client.target(baseUrl.toString()).path("resources/EAP-1/parent");
Response response = target
.request(MediaType.APPLICATION_JSON_TYPE)
.accept(MediaType.APPLICATION_JSON_TYPE)
.get();
assertThat(response.getStatus()).isEqualTo(204);
assertThat(response.hasEntity()).isFalse();
}
@Test
@RunAsClient
public void test100_shouldDeleteSeveralResources() {
Client client = ClientBuilder.newClient();
WebTarget target = client.target(baseUrl.toString()).path("resources")
.queryParam("ids", "CC")
.queryParam("ids", "CP");
Response response = target
.request(MediaType.APPLICATION_JSON_TYPE)
.accept(MediaType.APPLICATION_JSON_TYPE)
.delete();
assertEquals(200, response.getStatus());
target = ClientBuilder.newClient().target(baseUrl.toString()).path("resources/CC");
response = target
.request(MediaType.APPLICATION_JSON_TYPE)
.accept(MediaType.APPLICATION_JSON_TYPE)
.get();
assertEquals(404, response.getStatus());
target = ClientBuilder.newClient().target(baseUrl.toString()).path("resources/CP");
response = target
.request(MediaType.APPLICATION_JSON_TYPE)
.accept(MediaType.APPLICATION_JSON_TYPE)
.get();
assertEquals(404, response.getStatus());
// Check that not everything was deleted
target = ClientBuilder.newClient().target(baseUrl.toString()).path("resources")
.queryParam("root", true);
response = target
.request(MediaType.APPLICATION_JSON_TYPE)
.accept(MediaType.APPLICATION_JSON_TYPE)
.get();
assertEquals(200, response.getStatus());
List<Resource> resources = (List<Resource>) response.readEntity(ResultSet.class).getResults();
assertThat(resources)
.extracting(Resource::getId)
.containsOnly("EAP-1", "EAP-2");
}
@Test
@RunAsClient
public void test101_shouldDeleteAllResources() {
Client client = ClientBuilder.newClient();
WebTarget target = client.target(baseUrl.toString()).path("resources");
Response response = target
.request(MediaType.APPLICATION_JSON_TYPE)
.accept(MediaType.APPLICATION_JSON_TYPE)
.delete();
assertEquals(200, response.getStatus());
target = ClientBuilder.newClient().target(baseUrl.toString()).path("resources")
.queryParam("root", true);
response = target
.request(MediaType.APPLICATION_JSON_TYPE)
.accept(MediaType.APPLICATION_JSON_TYPE)
.get();
assertEquals(200, response.getStatus());
List<Resource> resources = (List<Resource>) response.readEntity(ResultSet.class).getResults();
assertThat(resources).isEmpty();
}
@Test
@RunAsClient
public void test102_shouldDeleteSeveralTypes() {
Client client = ClientBuilder.newClient();
WebTarget target = client.target(baseUrl.toString()).path("types")
.queryParam("typeIds", "FOO")
.queryParam("typeIds", "BAR");
Response response = target
.request(MediaType.APPLICATION_JSON_TYPE)
.accept(MediaType.APPLICATION_JSON_TYPE)
.delete();
assertEquals(200, response.getStatus());
target = ClientBuilder.newClient().target(baseUrl.toString()).path("types/FOO");
response = target
.request(MediaType.APPLICATION_JSON_TYPE)
.accept(MediaType.APPLICATION_JSON_TYPE)
.get();
assertEquals(404, response.getStatus());
target = ClientBuilder.newClient().target(baseUrl.toString()).path("types/BAR");
response = target
.request(MediaType.APPLICATION_JSON_TYPE)
.accept(MediaType.APPLICATION_JSON_TYPE)
.get();
assertEquals(404, response.getStatus());
// Check that not everything was deleted
target = ClientBuilder.newClient().target(baseUrl.toString()).path("types");
response = target
.request(MediaType.APPLICATION_JSON_TYPE)
.accept(MediaType.APPLICATION_JSON_TYPE)
.get();
assertEquals(200, response.getStatus());
List<ResourceType> resources = (List<ResourceType>) response.readEntity(ResultSet.class).getResults();
assertThat(resources)
.extracting(ResourceType::getId)
.containsOnly("EAP");
}
@Test
@RunAsClient
public void test103_shouldDeleteAllTypes() {
Client client = ClientBuilder.newClient();
WebTarget target = client.target(baseUrl.toString()).path("types");
Response response = target
.request(MediaType.APPLICATION_JSON_TYPE)
.accept(MediaType.APPLICATION_JSON_TYPE)
.delete();
assertEquals(200, response.getStatus());
target = ClientBuilder.newClient().target(baseUrl.toString()).path("types");
response = target
.request(MediaType.APPLICATION_JSON_TYPE)
.accept(MediaType.APPLICATION_JSON_TYPE)
.get();
assertEquals(200, response.getStatus());
List<ResourceType> types = (List<ResourceType>) response.readEntity(ResultSet.class).getResults();
assertThat(types).isEmpty();
}
@Test
@RunAsClient
public void test104_shouldDeleteAResourceAndCheckIsNotIndexed() {
String idXaDs = "itest-rest-feed~Local DMR~/subsystem=datasources/xa-data-source=testXaDs";
String typeIdXaDs = "XA Datasource";
String parentIdXaDs = "itest-rest-feed~Local DMR~~";
String feedId = "itest-rest-feed";
int numIterations = 1000;
ResourceType xaDsType = ResourceType.builder().id(typeIdXaDs).build();
Inventory types = new Inventory(null, Arrays.asList(xaDsType));
Client client = ClientBuilder.newClient();
WebTarget target = client.target(baseUrl.toString()).path("import");
Response response = target
.request(MediaType.APPLICATION_JSON_TYPE)
.post(Entity.entity(types, MediaType.APPLICATION_JSON_TYPE));
assertEquals(200, response.getStatus());
for (int i = 0; i < numIterations; i++) {
String idXaDsX = idXaDs + "-" + i;
RawResource xaDs = RawResource.builder().id(idXaDsX)
.typeId(typeIdXaDs)
.parentId(parentIdXaDs)
.feedId(feedId)
.build();
Inventory inventory = new Inventory(Arrays.asList(xaDs), null);
client = ClientBuilder.newClient();
target = client.target(baseUrl.toString()).path("import");
response = target
.request(MediaType.APPLICATION_JSON_TYPE)
.post(Entity.entity(inventory, MediaType.APPLICATION_JSON_TYPE));
assertEquals(200, response.getStatus());
client = ClientBuilder.newClient();
target = client.target(baseUrl.toString())
.path("resources")
.queryParam("feedId", feedId);
response = target
.request(MediaType.APPLICATION_JSON_TYPE)
.get();
assertEquals(200, response.getStatus());
List<Resource> resources = (List<Resource>) response.readEntity(ResultSet.class).getResults();
assertThat(resources)
.extracting(Resource::getId)
.contains(idXaDsX);
target = client.target(baseUrl.toString())
.path("resources")
.queryParam("ids", idXaDsX);
response = target
.request(MediaType.APPLICATION_JSON_TYPE)
.delete();
assertEquals(200, response.getStatus());
client = ClientBuilder.newClient();
target = client.target(baseUrl.toString())
.path("resources")
.queryParam("feedId", feedId);
response = target
.request(MediaType.APPLICATION_JSON_TYPE)
.get();
assertEquals(200, response.getStatus());
resources = (List<Resource>) response.readEntity(ResultSet.class).getResults();
assertThat(resources)
.extracting(Resource::getId)
.doesNotContain(idXaDs);
client = ClientBuilder.newClient();
target = client.target(baseUrl.toString())
.path("resources")
.queryParam("feedId", feedId)
.queryParam("typeId", typeIdXaDs);
response = target
.request(MediaType.APPLICATION_JSON_TYPE)
.get();
assertEquals(200, response.getStatus());
resources = (List<Resource>) response.readEntity(ResultSet.class).getResults();
assertThat(resources)
.extracting(Resource::getId)
.doesNotContain(idXaDs);
}
}
@Test
@RunAsClient
public void test105_shouldCreateAPrometheusJsonConfig() {
String id = "my-test-agent";
String feedId = "my-test-feed";
String type = "Hawkular Java Agent WF10";
int numIterations = 1000;
String testPrometheusConfig = System.getProperty("test.prometheus.config");
for (int i = 0; i < numIterations; i++) {
RawResource agent = RawResource.builder()
.id(id + "-" + i)
.feedId(feedId + "-" + i)
.typeId(type)
.config("Metrics Endpoints", "localhost:1234")
.build();
Inventory inventory = new Inventory(Arrays.asList(agent), null);
Client client = ClientBuilder.newClient();
WebTarget target = client.target(baseUrl.toString()).path("import");
Response response = target
.request(MediaType.APPLICATION_JSON_TYPE)
.post(Entity.entity(inventory, MediaType.APPLICATION_JSON_TYPE));
assertEquals(200, response.getStatus());
assertTrue(new File(testPrometheusConfig, feedId + "-" + i + ".json").exists());
}
}
}
| |
// Copyright 2000-2020 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.codeInsight.daemon.impl.quickfix;
import com.google.common.collect.Lists;
import com.intellij.codeInsight.ExceptionUtil;
import com.intellij.codeInsight.FileModificationService;
import com.intellij.codeInsight.daemon.QuickFixBundle;
import com.intellij.codeInsight.intention.PsiElementBaseIntentionAction;
import com.intellij.codeInspection.util.IntentionName;
import com.intellij.openapi.command.WriteCommandAction;
import com.intellij.openapi.editor.Editor;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.Pass;
import com.intellij.psi.*;
import com.intellij.psi.codeStyle.CodeStyleManager;
import com.intellij.psi.codeStyle.JavaCodeStyleManager;
import com.intellij.psi.util.PsiUtil;
import com.intellij.refactoring.IntroduceTargetChooser;
import com.intellij.util.IncorrectOperationException;
import com.intellij.util.SmartList;
import org.jetbrains.annotations.Nls;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.*;
import java.util.stream.Collectors;
public class AddExceptionToExistingCatchFix extends PsiElementBaseIntentionAction {
private final PsiElement myErrorElement;
public AddExceptionToExistingCatchFix(PsiElement errorElement) {myErrorElement = errorElement;}
@Override
public void invoke(@NotNull Project project, Editor editor, @NotNull PsiElement element) throws IncorrectOperationException {
if (!FileModificationService.getInstance().preparePsiElementsForWrite(myErrorElement)) return;
Context context = Context.from(myErrorElement);
if (context == null) return;
List<? extends PsiClassType> unhandledExceptions = context.myExceptions;
List<? extends PsiCatchSection> catches = context.myCatches;
if (catches.size() == 1) {
PsiCatchSection selectedSection = catches.get(0);
addTypeToCatch(unhandledExceptions, selectedSection);
}
else {
IntroduceTargetChooser.showChooser(
editor,
catches,
new Pass<PsiCatchSection>() {
@Override
public void pass(PsiCatchSection section) {
addTypeToCatch(unhandledExceptions, section);
}
},
section -> Objects.requireNonNull(section.getCatchType()).getPresentableText(),
QuickFixBundle.message("add.exception.to.existing.catch.chooser.title"),
catchSection -> Objects.requireNonNull(((PsiCatchSection)catchSection).getParameter()).getTextRange()
);
}
}
private static List<PsiCatchSection> findSuitableSections(List<? extends PsiCatchSection> sections, @NotNull List<? extends PsiClassType> exceptionTypes, boolean isJava7OrHigher) {
List<PsiCatchSection> finalSections = new ArrayList<>();
for (PsiCatchSection section : Lists.reverse(sections)) {
finalSections.add(section);
PsiType sectionType = section.getCatchType();
if (sectionType == null) continue;
for (PsiType exceptionType : exceptionTypes) {
if (exceptionType.isAssignableFrom(sectionType)) {
return finalSections;
// adding type to any upper leads to compilation error
}
}
}
if (!isJava7OrHigher) {
// if we get to this point, this means, that we can't generify any catch clause, so we can't suggest a fix
return Collections.emptyList();
}
return finalSections;
}
private static void addTypeToCatch(@NotNull List<? extends PsiClassType> exceptionsToAdd, @NotNull PsiCatchSection catchSection) {
Project project = catchSection.getProject();
WriteCommandAction.runWriteCommandAction(project, QuickFixBundle.message("add.exception.to.existing.catch.family"), null, () -> {
if (!catchSection.isValid() || !exceptionsToAdd.stream().allMatch(type -> type.isValid())) return;
PsiParameter parameter = catchSection.getParameter();
if (parameter == null) return;
PsiTypeElement typeElement = parameter.getTypeElement();
if (typeElement == null) return;
PsiType parameterType = parameter.getType();
PsiElementFactory factory = JavaPsiFacade.getElementFactory(project);
String flattenText = getTypeText(exceptionsToAdd, parameter, parameterType, factory);
PsiElement newTypeElement = typeElement.replace(factory.createTypeElementFromText(flattenText, parameter));
CodeStyleManager.getInstance(project).reformat(JavaCodeStyleManager.getInstance(project).shortenClassReferences(newTypeElement));
});
}
private static String getTypeText(@NotNull List<? extends PsiClassType> exceptionsToAdd,
PsiParameter parameter,
PsiType parameterType,
PsiElementFactory factory) {
String typeText = parameterType.getCanonicalText() + " | " + exceptionsToAdd.stream()
.map(type -> type.getCanonicalText())
.collect(Collectors.joining(" | "));
PsiTypeElement element = factory.createTypeElementFromText(typeText, parameter);
List<PsiType> flatten = PsiDisjunctionType.flattenAndRemoveDuplicates(((PsiDisjunctionType)element.getType()).getDisjunctions());
return flatten.stream()
.map(type -> type.getCanonicalText())
.collect(Collectors.joining(" | "));
}
@Override
public boolean isAvailable(@NotNull Project project, Editor editor, @NotNull PsiElement element) {
Context context = Context.from(myErrorElement);
if (context != null) {
setText(context.getMessage());
return true;
}
return false;
}
@Nls
@NotNull
@Override
public String getFamilyName() {
return QuickFixBundle.message("add.exception.to.existing.catch.family");
}
private static final class Context {
private final List<? extends PsiCatchSection> myCatches;
private final List<? extends PsiClassType> myExceptions;
private Context(List<? extends PsiCatchSection> catches, List<? extends PsiClassType> exceptions) {
myCatches = catches;
myExceptions = exceptions;
}
@Nullable
static Context from(@NotNull PsiElement element) {
if (!element.isValid() || element instanceof PsiMethodReferenceExpression) return null;
boolean isJava7OrHigher = PsiUtil.isLanguageLevel7OrHigher(element);
List<PsiClassType> unhandledExceptions = new ArrayList<>(ExceptionUtil.getOwnUnhandledExceptions(element));
if (unhandledExceptions.isEmpty()) return null;
List<PsiTryStatement> tryStatements = getTryStatements(element);
List<PsiCatchSection> sections =
tryStatements.stream()
.flatMap(stmt -> findSuitableSections(Arrays.asList(stmt.getCatchSections()), unhandledExceptions, isJava7OrHigher).stream())
.filter(catchSection -> {
PsiParameter parameter = catchSection.getParameter();
if (parameter == null) return false;
return parameter.getTypeElement() != null;
})
.collect(Collectors.toList());
if (sections.isEmpty()) return null;
return new Context(sections, unhandledExceptions);
}
@NotNull
private static List<PsiTryStatement> getTryStatements(@NotNull PsiElement element) {
PsiElement current = element;
PsiElement parent = element.getParent();
List<PsiTryStatement> parents = new SmartList<>();
while (parent != null) {
if (parent instanceof PsiLambdaExpression || parent instanceof PsiMember || parent instanceof PsiFile) break;
if (parent instanceof PsiTryStatement) {
PsiTryStatement tryStatement = (PsiTryStatement)parent;
if (tryStatement.getFinallyBlock() != current && !(current instanceof PsiCatchSection)) {
parents.add((PsiTryStatement)parent);
}
}
current = parent;
parent = parent.getParent();
}
return parents;
}
private @IntentionName String getMessage() {
if (myCatches.size() == 1 && myExceptions.size() == 1) {
PsiClassType exceptionType = myExceptions.get(0);
PsiCatchSection catchSection = myCatches.get(0);
PsiParameter parameter = catchSection.getParameter();
assert parameter != null;
PsiType catchType = parameter.getType();
if (replacementNeeded(exceptionType, catchType)) {
return QuickFixBundle.message("add.exception.to.existing.catch.replacement", catchType.getPresentableText(), exceptionType.getPresentableText());
}
else {
return QuickFixBundle.message("add.exception.to.existing.catch.no.replacement", catchType.getPresentableText(), exceptionType.getPresentableText());
}
}
return QuickFixBundle.message("add.exception.to.existing.catch.generic");
}
}
private static boolean replacementNeeded(@NotNull PsiClassType newException, @NotNull PsiType catchType) {
if (catchType instanceof PsiDisjunctionType) {
PsiDisjunctionType disjunction = (PsiDisjunctionType)catchType;
for (PsiType type : disjunction.getDisjunctions()) {
if (newException.isAssignableFrom(type)) {
return true;
}
}
return false;
}
return newException.isAssignableFrom(catchType);
}
@Override
public boolean startInWriteAction() {
return false;
}
}
| |
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package org.toschu.laboraufgabe1.neuronalnetwork;
import java.io.BufferedInputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.util.Collections;
import java.util.LinkedList;
import java.util.List;
import java.util.Vector;
import org.toschu.laboraufgabe1.framework.Concept;
import org.toschu.laboraufgabe1.framework.FeatureVector;
import org.toschu.laboraufgabe1.framework.Learner;
import org.toschu.laboraufgabe1.neuronalnetwork.networks.Mapping3;
import org.toschu.laboraufgabe1.neuronalnetwork.networks.Mapping6;
import org.toschu.laboraufgabe1.neuronalnetwork.networks.OnlyLeft;
import org.toschu.laboraufgabe1.neuronalnetwork.networks.OnlyRight;
import org.toschu.laboraufgabe1.neuronalnetwork.networks.OnlyStop;
import org.toschu.laboraufgabe1.neuronalnetwork.networks.OnlyVorfahrtGewaehren;
import org.toschu.laboraufgabe1.neuronalnetwork.networks.OnlyVorfahrtStrasse;
import org.toschu.laboraufgabe1.neuronalnetwork.networks.OnlyVorfahrtvonRechts;
import org.toschu.repositoryapi.api.Implemented.JSONRepository;
import org.toschu.repositoryapi.api.Repository;
/**
*
* @author toschu
*/
public class PerzeptronNetworkEvaluator {
/**
* the percentage (between 0 und 100) of vectors from the data to be used
* for the test
*/
private int testRate = 50;
private int numberOfTests = 100;
public PerzeptronNetworkEvaluator() {
}
public PerzeptronNetworkEvaluator(String filename,
PerzeptronNetwork network,
int testRate, int numberOfTests, int learingRate) {
List<FeatureVector> vectors = readData(filename);
float success = 0;
float unknown = 0;
float wrong = 0;
this.numberOfTests = numberOfTests;
this.testRate = testRate;
System.out.println(network.getName());
int i = 0;
do {
Learner learner
= new PerzeptronLearner(network,
network.getMappingConceptToPerzeptron(),
learingRate);
vectors = mixData(vectors);
List<List<FeatureVector>> sets = extractTrainingData(vectors);
learner.learn(sets.get(0));
Vector<Integer> result = evaluate(sets.get(1), learner);
success += result.get(0) / (float) sets.get(1).size();
unknown += result.get(1) / (float) sets.get(1).size();
wrong += result.get(2) / (float) sets.get(1).size();
i++;
System.out.println("");
} while (i < numberOfTests);
System.out.println("Result after "
+ numberOfTests + " Test with "
+ vectors.size() + " FeatureVectors:");
System.out.println("Learning result: \n correct: "
+ (success / numberOfTests) * 100f + "%\n unknown: "
+ (unknown / numberOfTests) * 100f + "%\n wrong: "
+ (wrong / numberOfTests) * 100f + "%");
Repository<PerzeptronNetwork> repo = new JSONRepository<>(
new File(network.getClass().getSimpleName()),
PerzeptronNetwork.class);
}
/**
* Evaluate the reulst from the test for output or furthjer considerations
*
* @param result a Vector containing 3 values: a) right classification ba
* used learner, b) lerner could not decide or c) learner found wrong
* concept
*/
private void evalResult(Vector<Integer> result) {
// TODO hier muss mehr Auswertung passieren, insbes: Vertrauensintervalle etc
System.out.println("Learning result: \n correct: "
+ result.get(0) + "\n unknown: "
+ result.get(1) + "\n wrong: " + result.get(2));
}
/**
* evaluate the learner with a given test set.
*
* @param list: The set of test examples containing the correct concept
* @param learner: The learner to be tests
*
* @return a vector containing the test results: success, unknown, false
*/
private Vector<Integer> evaluate(List<FeatureVector> list, Learner learner) {
int success = 0;
int unknown = 0;
int fault = 0;
for (FeatureVector fv : list) {
Concept c = learner.classify(fv);
if (c.equals(Concept.Unknown)) {
unknown++;
} else if (c.equals(fv.getConcept())) {
success++;
} else {
fault++;
}
}
Vector<Integer> res = new Vector<>();
res.add(0, success);
res.add(1, unknown);
res.add(2, fault);
return res;
}
/**
*
* @param vectors a list of vectors
* @return list containing the same vectors as parameter but (usually) in
* different order
*/
private List<FeatureVector> mixData(List<FeatureVector> vectors) {
Collections.shuffle(vectors);
return vectors;
}
/**
* Split the set of festure vectors in a set of traing data and a set of
* test data. For representative results it is essential to mix the order of
* vectors before splitting the set
*
* @param vectors :a List fo Feature Vectors we can use for the test
* @return a List containt two Lists: first the training data, second the
* test data they are disjunct subsets of vector
*
*/
private List<List<FeatureVector>> extractTrainingData(
List<FeatureVector> vectors) {
List<List<FeatureVector>> result = new LinkedList<>();
List<FeatureVector> trainingData = new LinkedList<>();
List<FeatureVector> testData = new LinkedList<>();
int cut = (int) ((testRate / 100) * vectors.size());
trainingData.addAll(vectors.subList(0, cut));
testData.addAll(vectors.subList(cut + 1, vectors.size()));
result.add(trainingData);
result.add(testData);
return result;
}
/**
* read data from file
*
* @param filename the file with this name should contain a serialized
* List<FeatureVector> containt all the data
* @reeturn all the data
*/
private List<FeatureVector> readData(String filename) {
List<FeatureVector> vectors = null;
try {
ObjectInputStream in
= new ObjectInputStream(
new BufferedInputStream(
new FileInputStream(filename)));
vectors = (List<FeatureVector>) in.readObject();
in.close();
} catch (IOException | ClassNotFoundException t) {
System.out.println("Could not read Data from file: " + filename);
System.exit(1);
}
return vectors;
}
/**
* run the program with training set provided in file with name given in
* first parameter
*
* @param args 1. filename of Serialiszed List<FeatureVector>
*/
public static void main(String[] args) {
String filename = null;
if (args.length == 0) {
System.out.println("No data file provided, using dummy data: DummyData.dat");
filename = "TomsFeatureVectors.dat";
} else {
filename = args[0];
}
int[] testRates = {10, 20, 30, 40, 50, 60, 70, 80};
int[] learingTerms = {50, 10, 150, 200, 250, 300, 400, 500};
for (int i = 0; i < testRates.length; i++) {
for (int j = 0; j < learingTerms.length; j++) {
tests(filename, testRates[i], learingTerms[j]);
}
}
}
public static void tests(String filename, int testRate, int learingTerms) {
System.out.println("First Evaluation run:");
System.out.println("numberof Tests:\t100");
System.out.println("Testrate:\t" + testRate);
System.out.println("LearingTerms:\t" + learingTerms);
PerzeptronNetworkEvaluator perzeptronNetworkEvaluatorOnlyVorfahrtStrasse
= new PerzeptronNetworkEvaluator(
filename, new OnlyVorfahrtStrasse(),testRate, 100,learingTerms);
System.out.println("");
PerzeptronNetworkEvaluator perzeptronNetworkEvaluatorOnlyRight
= new PerzeptronNetworkEvaluator(
filename, new OnlyRight(),testRate, 100,learingTerms);
System.out.println("");
PerzeptronNetworkEvaluator perzeptronNetworkEvaluatorOnlyLeft
= new PerzeptronNetworkEvaluator(
filename, new OnlyLeft(),testRate, 100,learingTerms);
System.out.println("");
PerzeptronNetworkEvaluator perzeptronNetworkEvaluatorOnlyStop
= new PerzeptronNetworkEvaluator(
filename, new OnlyStop(),testRate, 100,learingTerms);
System.out.println("");
PerzeptronNetworkEvaluator perzeptronNetworkEvaluatorOnlyVorGew
= new PerzeptronNetworkEvaluator(
filename, new OnlyVorfahrtGewaehren(),testRate, 100,learingTerms);
System.out.println("");
PerzeptronNetworkEvaluator perzeptronNetworkEvaluatorOnlyVonRechts
= new PerzeptronNetworkEvaluator(
filename, new OnlyVorfahrtvonRechts(),testRate, 100,learingTerms);
System.out.println("");
PerzeptronNetworkEvaluator perzeptronNetworkEvaluatorMapping6
= new PerzeptronNetworkEvaluator(
filename, new Mapping6(),testRate, 100,learingTerms);
System.out.println("");
PerzeptronNetworkEvaluator perzeptronNetworkEvaluatorMapping3
= new PerzeptronNetworkEvaluator(
filename, new Mapping3(),testRate, 100,learingTerms);
System.out.println("");
}
}
| |
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package com.eas.client.forms.components.rt;
import com.eas.script.Scripts;
import java.awt.event.FocusEvent;
import java.beans.PropertyChangeEvent;
import java.beans.PropertyChangeListener;
import java.text.DecimalFormat;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.logging.Level;
import java.util.logging.Logger;
import java.util.regex.Pattern;
import javax.swing.JFormattedTextField;
import javax.swing.text.DefaultFormatterFactory;
import jdk.nashorn.api.scripting.JSObject;
import jdk.nashorn.internal.runtime.JSType;
/**
*
* @author mg
*/
public abstract class VFormattedField extends JFormattedTextField implements HasValue<Object>, HasEmptyText, HasEditable {
public static final int NUMBER = 0;
public static final int DATE = 1;
public static final int TIME = 2;
public static final int PERCENT = 3;
public static final int CURRENCY = 4;
public static final int MASK = 5;
public static final int REGEXP = 6;
public class PolymorphFormatter extends AbstractFormatter {
private String pattern;
private final OptimisticMaskFormatter maskFormatter = new OptimisticMaskFormatter();
private final SimpleDateFormat dateFormat = new SimpleDateFormat();
private final DecimalFormat numberFormat = new DecimalFormat();
private Pattern regexp;
public PolymorphFormatter() {
super();
maskFormatter.setValueContainsLiteralCharacters(true);
}
public String getPattern() {
return pattern;
}
public void setPattern(String aValue) {
if (pattern == null ? aValue != null : !pattern.equals(aValue)) {
pattern = aValue;
try {
maskFormatter.setMask(pattern);
} catch (ParseException ex) {
Logger.getLogger(VFormattedField.class.getName()).log(Level.WARNING, ex.getMessage());
}
try {
dateFormat.applyPattern(pattern);
} catch (Exception ex) {
Logger.getLogger(VFormattedField.class.getName()).log(Level.WARNING, ex.getMessage());
}
try {
numberFormat.applyPattern(pattern);
} catch (Exception ex) {
Logger.getLogger(VFormattedField.class.getName()).log(Level.WARNING, ex.getMessage());
}
try {
regexp = aValue != null && !aValue.isEmpty() ? Pattern.compile(aValue) : null;
} catch (Exception ex) {
Logger.getLogger(VFormattedField.class.getName()).log(Level.WARNING, ex.getMessage());
}
}
}
@Override
public Object stringToValue(String text) throws ParseException {
if (onParse != null) {
JSObject jsEvent = Scripts.getSpace().makeObj();
jsEvent.setMember("source", getPublished());
jsEvent.setMember("text", text);
try {
return Scripts.getSpace().toJava(onParse.call(getPublished(), new Object[]{jsEvent}));
} catch (Throwable t) {
throw new ParseException(text, 0);
}
} else {
switch (valueType) {
case DATE:
return dateFormat.parse(text);
case TIME:
return dateFormat.parse(text);
case NUMBER:
return numberFormat.parse(text);
case PERCENT:
return numberFormat.parse(text);
case CURRENCY:
return numberFormat.parse(text);
case MASK:
return maskFormatter.stringToValue(text);
case REGEXP:
boolean matches = regexp != null ? regexp.matcher(text).matches() : true;
if (matches) {
return text;
} else {
throw new ParseException(text, 0);
}
default:
return text;
}
}
}
@Override
public String valueToString(Object value) throws ParseException {
if (onFormat != null) {
JSObject jsEvent = Scripts.getSpace().makeObj();
jsEvent.setMember("source", getPublished());
jsEvent.setMember("value", Scripts.getSpace().toJs(value));
try {
return JSType.toString(onFormat.call(getPublished(), new Object[]{jsEvent}));
} catch (Throwable t) {
throw new ParseException(t.getMessage(), 0);
}
} else {
try {
switch (valueType) {
case DATE:
return dateFormat.format(value);
case TIME:
return dateFormat.format(value);
case NUMBER:
return numberFormat.format(value);
case PERCENT:
return numberFormat.format(value);
case CURRENCY:
return numberFormat.format(value);
case MASK:
return maskFormatter.valueToString(value);
case REGEXP:
return value != null ? value.toString() : "";
default:
return value != null ? value.toString() : "";
}
} catch (IllegalArgumentException ex) {
throw new ParseException(ex.getMessage(), 0);
}
}
}
}
protected PolymorphFormatter formatter = new PolymorphFormatter();
protected int valueType = REGEXP;
protected JSObject onFormat;
protected JSObject onParse;
protected boolean valueIsNull = true;
protected PropertyChangeListener valueIsNullClearer = (PropertyChangeEvent pce) -> {
valueIsNull = false;
};
public VFormattedField(Object aValue) {
super();
setFormatterFactory(new DefaultFormatterFactory(formatter));
setValue(aValue);
}
public VFormattedField() {
this(null);
}
protected abstract JSObject getPublished();
public int getValueType() {
return valueType;
}
public void setValueType(int aValue) {
if (valueType != aValue) {
valueType = aValue;
}
}
public String getFormat() {
return formatter.getPattern();
}
public void setFormat(String aValue) {
formatter.setPattern(aValue);
}
public JSObject getOnFormat() {
return onFormat;
}
public void setOnFormat(JSObject aValue) {
onFormat = aValue;
}
public JSObject getOnParse() {
return onParse;
}
public void setOnParse(JSObject aValue) {
onParse = aValue;
}
@Override
public String getText() {
return super.getText() != null ? super.getText() : "";
}
@Override
public void setText(String aValue) {
try {
super.setText(aValue != null ? aValue : "");
super.commitEdit();
} catch (ParseException ex) {
Logger.getLogger(VFormattedField.class.getName()).log(Level.WARNING, ex.getMessage());
}
}
@Override
public Object getValue() {
return valueIsNull ? null : super.getValue();
}
@Override
public void setValue(Object aValue) {
if (aValue instanceof Number) {
aValue = ((Number) aValue).doubleValue();
}
removeValueChangeListener(valueIsNullClearer);
try {
valueIsNull = aValue == null;
super.setValue(aValue);
} finally {
addValueChangeListener(valueIsNullClearer);
}
}
@Override
protected void firePropertyChange(String propertyName, Object oldValue, Object newValue) {
// Crazy Swing JFormattedField issues a value change while focus gaining
if (!VALUE_PROP_NAME.equals(propertyName) || processedFocusEvent == null || processedFocusEvent.getID() == FocusEvent.FOCUS_LOST) {
super.firePropertyChange(propertyName, oldValue, newValue);
}
}
private FocusEvent processedFocusEvent;
@Override
protected void processFocusEvent(FocusEvent e) {
// Crazy Swing JFormattedField issues a value change while focus gaining
processedFocusEvent = e;
try {
super.processFocusEvent(e);
} finally {
processedFocusEvent = null;
}
}
@Override
public void addValueChangeListener(PropertyChangeListener listener) {
super.addPropertyChangeListener(VALUE_PROP_NAME, listener);
}
@Override
public void removeValueChangeListener(PropertyChangeListener listener) {
super.removePropertyChangeListener(VALUE_PROP_NAME, listener);
}
protected String emptyText;
@Override
public String getEmptyText() {
return emptyText;
}
@Override
public void setEmptyText(String aValue) {
emptyText = aValue;
}
@Override
public boolean getEditable() {
return super.isEditable();
}
}
| |
/*
* Copyright 2019 B2i Healthcare Pte Ltd, http://b2i.sg
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.b2international.snowowl.snomed.api.rest.mergereview;
import static com.b2international.snowowl.snomed.api.rest.SnomedBranchingRestRequests.createBranch;
import static com.b2international.snowowl.snomed.api.rest.SnomedComponentRestRequests.createComponent;
import static com.b2international.snowowl.snomed.api.rest.SnomedComponentRestRequests.getComponent;
import static com.b2international.snowowl.snomed.api.rest.SnomedComponentRestRequests.updateComponent;
import static com.b2international.snowowl.snomed.api.rest.SnomedMergeReviewingRestRequests.createMergeReviewAndGetDetails;
import static com.b2international.snowowl.snomed.api.rest.SnomedRefSetRestRequests.updateRefSetComponent;
import static com.b2international.snowowl.snomed.api.rest.SnomedRestFixtures.createConceptRequestBody;
import static com.b2international.snowowl.snomed.api.rest.SnomedRestFixtures.createNewConcept;
import static com.b2international.snowowl.snomed.api.rest.SnomedRestFixtures.createNewDescription;
import static com.b2international.snowowl.snomed.api.rest.SnomedRestFixtures.createNewRelationship;
import static com.b2international.snowowl.snomed.api.rest.SnomedRestFixtures.createRefSetMemberRequestBody;
import static com.b2international.snowowl.test.commons.rest.RestExtensions.lastPathSegment;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import java.util.Map;
import java.util.Set;
import org.junit.Test;
import com.b2international.snowowl.core.api.IBranchPath;
import com.b2international.snowowl.datastore.BranchPathUtils;
import com.b2international.snowowl.snomed.SnomedConstants.Concepts;
import com.b2international.snowowl.snomed.api.mergereview.ISnomedBrowserMergeReviewDetail;
import com.b2international.snowowl.snomed.api.rest.AbstractSnomedApiTest;
import com.b2international.snowowl.snomed.api.rest.SnomedApiTestConstants;
import com.b2international.snowowl.snomed.api.rest.SnomedComponentType;
import com.b2international.snowowl.snomed.api.rest.domain.SnomedRefSetMemberRestInput;
import com.b2international.snowowl.snomed.common.SnomedRf2Headers;
import com.b2international.snowowl.snomed.core.domain.CharacteristicType;
import com.b2international.snowowl.snomed.core.domain.SnomedConcept;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
/**
* @since 6.15.1
*/
public class SnomedMergeReviewApprovedChangesTest extends AbstractSnomedApiTest {
@Test
public void mergeReviewAbsentForNewDescriptionVsChangedConceptProperty() {
final String conceptId = createNewConcept(branchPath);
final IBranchPath childBranch = BranchPathUtils.createPath(branchPath, "a");
createBranch(childBranch);
createNewDescription(childBranch, conceptId);
final Map<?, ?> updateRequestBody = ImmutableMap.<String, Object>builder()
.put("moduleId", Concepts.MODULE_ROOT)
.put("commitComment", "Changed module of concept")
.build();
updateComponent(branchPath, SnomedComponentType.CONCEPT, conceptId, updateRequestBody).statusCode(204);
final Set<ISnomedBrowserMergeReviewDetail> results = createMergeReviewAndGetDetails(childBranch, branchPath);
assertTrue(results.isEmpty());
}
@Test
public void mergeReviewAbsentForNewDescriptionVsChangedStatedRelationship() {
final String conceptId = createNewConcept(branchPath);
final String relationshipId = createNewRelationship(branchPath, conceptId, Concepts.PART_OF, Concepts.NAMESPACE_ROOT, CharacteristicType.STATED_RELATIONSHIP);
final IBranchPath childBranch = BranchPathUtils.createPath(branchPath, "a");
createBranch(childBranch);
createNewDescription(childBranch, conceptId);
final Map<?, ?> updateRequest = ImmutableMap.builder()
.put(SnomedRf2Headers.FIELD_TYPE_ID, Concepts.DEFINING_RELATIONSHIP)
.put("commitComment", "Updated relationship typeId")
.build();
updateComponent(branchPath, SnomedComponentType.RELATIONSHIP, relationshipId, updateRequest).statusCode(204);
final Set<ISnomedBrowserMergeReviewDetail> results = createMergeReviewAndGetDetails(childBranch, branchPath);
assertTrue(results.isEmpty());
}
@Test
public void mergeReviewAbsentForNewDescriptionVsChangedOwlAxiom() {
final String owlSubclassOfExpression = String.format("SubClassOf(:%s :%s)", Concepts.FULLY_SPECIFIED_NAME, Concepts.AMBIGUOUS);
final Map<?, ?> memberRequestBody = ImmutableMap.<String, Object>builder()
.put("moduleId", Concepts.MODULE_SCT_CORE)
.put("referenceSetId", Concepts.REFSET_OWL_AXIOM)
.put(SnomedRefSetMemberRestInput.ADDITIONAL_FIELDS, ImmutableMap.<String, Object>builder()
.put(SnomedRf2Headers.FIELD_OWL_EXPRESSION, owlSubclassOfExpression)
.build()
).build();
final Map<?, ?> conceptRequestBody = createConceptRequestBody(Concepts.ROOT_CONCEPT)
.put("members", ImmutableList.of(memberRequestBody))
.put("commitComment", "Created concept with owl axiom reference set member")
.build();
final String conceptId = lastPathSegment(createComponent(branchPath, SnomedComponentType.CONCEPT, conceptRequestBody)
.statusCode(201)
.extract().header("Location"));
final SnomedConcept concept = getComponent(branchPath, SnomedComponentType.CONCEPT, conceptId, "members()").extract().as(SnomedConcept.class);
final String memberId = concept.getMembers().stream().filter(member -> member.getRefsetId().equals(Concepts.REFSET_OWL_AXIOM)).findFirst().get().getId();
final IBranchPath childBranch = BranchPathUtils.createPath(branchPath, "a");
createBranch(childBranch);
createNewDescription(childBranch, conceptId);
final String newOwlExpression = String.format("SubClassOf(:%s :%s)", Concepts.FULLY_SPECIFIED_NAME, Concepts.NAMESPACE_ROOT);
final Map<?, ?> updateRequest = ImmutableMap.builder()
.put(SnomedRefSetMemberRestInput.ADDITIONAL_FIELDS, ImmutableMap.<String, Object>builder()
.putAll(ImmutableMap.<String, Object>builder()
.put(SnomedRf2Headers.FIELD_OWL_EXPRESSION, newOwlExpression)
.build())
.build())
.put("commitComment", "Updated reference set member")
.build();
updateRefSetComponent(branchPath, SnomedComponentType.MEMBER, memberId, updateRequest, false).statusCode(204);
final Set<ISnomedBrowserMergeReviewDetail> results = createMergeReviewAndGetDetails(childBranch, branchPath);
assertTrue(results.isEmpty());
}
@Test
public void mergeReviewAbsentForNewDescriptionVsChangedFsn() {
final String conceptId = createNewConcept(branchPath);
final SnomedConcept concept = getComponent(branchPath, SnomedComponentType.CONCEPT, conceptId, "fsn()").extract().as(SnomedConcept.class);
final String fsnId = concept.getFsn().getId();
final IBranchPath childBranch = BranchPathUtils.createPath(branchPath, "a");
createBranch(childBranch);
createNewDescription(childBranch, conceptId);
final Map<?, ?> updateRequest = ImmutableMap.builder()
.put(SnomedRf2Headers.FIELD_TERM, "Updated FSN term")
.put("commitComment", "Update description term")
.build();
updateComponent(branchPath, SnomedComponentType.DESCRIPTION, fsnId, updateRequest).statusCode(204);
final Set<ISnomedBrowserMergeReviewDetail> results = createMergeReviewAndGetDetails(childBranch, branchPath);
assertTrue(results.isEmpty());
}
@Test
public void mergeReviewAbsentForNewDescriptionVsChangedPt() {
final String conceptId = createNewConcept(branchPath);
final SnomedConcept concept = getComponent(branchPath, SnomedComponentType.CONCEPT, conceptId, "pt()").extract().as(SnomedConcept.class);
final String ptId = concept.getPt().getId();
final IBranchPath childBranch = BranchPathUtils.createPath(branchPath, "a");
createBranch(childBranch);
createNewDescription(childBranch, conceptId);
final Map<?, ?> updateRequest = ImmutableMap.builder()
.put(SnomedRf2Headers.FIELD_TERM, "Updated PT term")
.put("commitComment", "Update description term")
.build();
updateComponent(branchPath, SnomedComponentType.DESCRIPTION, ptId, updateRequest).statusCode(204);
final Set<ISnomedBrowserMergeReviewDetail> results = createMergeReviewAndGetDetails(childBranch, branchPath);
assertTrue(results.isEmpty());
}
@Test
public void mergeReviewAbsentForNewInferredRelationshipVsChangedConceptProperty() {
final String conceptId = createNewConcept(branchPath);
final IBranchPath childBranch = BranchPathUtils.createPath(branchPath, "a");
createBranch(childBranch);
createNewRelationship(childBranch, conceptId, Concepts.IS_A, Concepts.NAMESPACE_ROOT, CharacteristicType.INFERRED_RELATIONSHIP);
final Map<?, ?> updateRequestBody = ImmutableMap.<String, Object>builder()
.put("moduleId", Concepts.MODULE_ROOT)
.put("commitComment", "Changed module of concept")
.build();
updateComponent(branchPath, SnomedComponentType.CONCEPT, conceptId, updateRequestBody).statusCode(204);
final Set<ISnomedBrowserMergeReviewDetail> results = createMergeReviewAndGetDetails(childBranch, branchPath);
assertTrue(results.isEmpty());
}
@Test
public void mergeReviewAbsentForNewInferredRelationshipVsChangedStatedRelationship() {
final String conceptId = createNewConcept(branchPath);
final String relationshipId = createNewRelationship(branchPath, conceptId, Concepts.PART_OF, Concepts.NAMESPACE_ROOT, CharacteristicType.STATED_RELATIONSHIP);
final IBranchPath childBranch = BranchPathUtils.createPath(branchPath, "a");
createBranch(childBranch);
createNewRelationship(childBranch, conceptId, Concepts.IS_A, Concepts.NAMESPACE_ROOT, CharacteristicType.INFERRED_RELATIONSHIP);
final Map<?, ?> updateRequest = ImmutableMap.builder()
.put(SnomedRf2Headers.FIELD_TYPE_ID, Concepts.DEFINING_RELATIONSHIP)
.put("commitComment", "Updated relationship typeId")
.build();
updateComponent(branchPath, SnomedComponentType.RELATIONSHIP, relationshipId, updateRequest).statusCode(204);
final Set<ISnomedBrowserMergeReviewDetail> results = createMergeReviewAndGetDetails(childBranch, branchPath);
assertTrue(results.isEmpty());
}
@Test
public void mergeReviewAbsentForNewInferredRelationshipVsChangedOwlAxiom() {
final String owlSubclassOfExpression = String.format("SubClassOf(:%s :%s)", Concepts.FULLY_SPECIFIED_NAME, Concepts.AMBIGUOUS);
final Map<?, ?> memberRequestBody = ImmutableMap.<String, Object>builder()
.put("moduleId", Concepts.MODULE_SCT_CORE)
.put("referenceSetId", Concepts.REFSET_OWL_AXIOM)
.put(SnomedRefSetMemberRestInput.ADDITIONAL_FIELDS, ImmutableMap.<String, Object>builder()
.put(SnomedRf2Headers.FIELD_OWL_EXPRESSION, owlSubclassOfExpression)
.build()
).build();
final Map<?, ?> conceptRequestBody = createConceptRequestBody(Concepts.ROOT_CONCEPT)
.put("members", ImmutableList.of(memberRequestBody))
.put("commitComment", "Created concept with owl axiom reference set member")
.build();
final String conceptId = lastPathSegment(createComponent(branchPath, SnomedComponentType.CONCEPT, conceptRequestBody)
.statusCode(201)
.extract().header("Location"));
final SnomedConcept concept = getComponent(branchPath, SnomedComponentType.CONCEPT, conceptId, "members()").extract().as(SnomedConcept.class);
final String memberId = concept.getMembers().stream().filter(member -> member.getRefsetId().equals(Concepts.REFSET_OWL_AXIOM)).findFirst().get().getId();
final IBranchPath childBranch = BranchPathUtils.createPath(branchPath, "a");
createBranch(childBranch);
createNewRelationship(childBranch, conceptId, Concepts.IS_A, Concepts.NAMESPACE_ROOT, CharacteristicType.INFERRED_RELATIONSHIP);
final String newOwlExpression = String.format("SubClassOf(:%s :%s)", Concepts.FULLY_SPECIFIED_NAME, Concepts.NAMESPACE_ROOT);
final Map<?, ?> updateRequest = ImmutableMap.builder()
.put(SnomedRefSetMemberRestInput.ADDITIONAL_FIELDS, ImmutableMap.<String, Object>builder()
.putAll(ImmutableMap.<String, Object>builder()
.put(SnomedRf2Headers.FIELD_OWL_EXPRESSION, newOwlExpression)
.build())
.build())
.put("commitComment", "Updated reference set member")
.build();
updateRefSetComponent(branchPath, SnomedComponentType.MEMBER, memberId, updateRequest, false).statusCode(204);
final Set<ISnomedBrowserMergeReviewDetail> results = createMergeReviewAndGetDetails(childBranch, branchPath);
assertTrue(results.isEmpty());
}
@Test
public void mergeReviewAbsentForNewInferredRelationshipVsChangedFsn() {
final String conceptId = createNewConcept(branchPath);
final SnomedConcept concept = getComponent(branchPath, SnomedComponentType.CONCEPT, conceptId, "fsn()").extract().as(SnomedConcept.class);
final String fsnId = concept.getFsn().getId();
final IBranchPath childBranch = BranchPathUtils.createPath(branchPath, "a");
createBranch(childBranch);
createNewRelationship(childBranch, conceptId, Concepts.IS_A, Concepts.NAMESPACE_ROOT, CharacteristicType.INFERRED_RELATIONSHIP);
final Map<?, ?> updateRequest = ImmutableMap.builder()
.put(SnomedRf2Headers.FIELD_TERM, "Updated FSN term")
.put("commitComment", "Update description term")
.build();
updateComponent(branchPath, SnomedComponentType.DESCRIPTION, fsnId, updateRequest).statusCode(204);
final Set<ISnomedBrowserMergeReviewDetail> results = createMergeReviewAndGetDetails(childBranch, branchPath);
assertTrue(results.isEmpty());
}
@Test
public void mergeReviewAbsentForNewInferredRelationshipVsChangedPt() {
final String conceptId = createNewConcept(branchPath);
final SnomedConcept concept = getComponent(branchPath, SnomedComponentType.CONCEPT, conceptId, "pt()").extract().as(SnomedConcept.class);
final String ptId = concept.getPt().getId();
final IBranchPath childBranch = BranchPathUtils.createPath(branchPath, "a");
createBranch(childBranch);
createNewRelationship(childBranch, conceptId, Concepts.IS_A, Concepts.NAMESPACE_ROOT, CharacteristicType.INFERRED_RELATIONSHIP);
final Map<?, ?> updateRequest = ImmutableMap.builder()
.put(SnomedRf2Headers.FIELD_TERM, "Updated PT term")
.put("commitComment", "Update description term")
.build();
updateComponent(branchPath, SnomedComponentType.DESCRIPTION, ptId, updateRequest).statusCode(204);
final Set<ISnomedBrowserMergeReviewDetail> results = createMergeReviewAndGetDetails(childBranch, branchPath);
assertTrue(results.isEmpty());
}
@Test
public void mergeReviewAbsentForNewInferredRelationshipVsNewInferredRelationship() {
final String conceptId = createNewConcept(branchPath);
final IBranchPath childBranch = BranchPathUtils.createPath(branchPath, "a");
createBranch(childBranch);
createNewRelationship(childBranch, conceptId, Concepts.IS_A, Concepts.NAMESPACE_ROOT, CharacteristicType.INFERRED_RELATIONSHIP);
createNewRelationship(branchPath, conceptId, Concepts.IS_A, Concepts.MODULE_ROOT, CharacteristicType.INFERRED_RELATIONSHIP);
final Set<ISnomedBrowserMergeReviewDetail> results = createMergeReviewAndGetDetails(childBranch, branchPath);
assertTrue(results.isEmpty());
}
@Test
public void mergeReviewAbsentForNewInferredRelationshipVsNewStatedRelationship() {
final String conceptId = createNewConcept(branchPath);
final IBranchPath childBranch = BranchPathUtils.createPath(branchPath, "a");
createBranch(childBranch);
createNewRelationship(childBranch, conceptId, Concepts.IS_A, Concepts.NAMESPACE_ROOT, CharacteristicType.INFERRED_RELATIONSHIP);
createNewRelationship(branchPath, conceptId, Concepts.IS_A, Concepts.MODULE_ROOT, CharacteristicType.STATED_RELATIONSHIP);
final Set<ISnomedBrowserMergeReviewDetail> results = createMergeReviewAndGetDetails(childBranch, branchPath);
assertTrue(results.isEmpty());
}
@Test
public void mergeReviewAbsentForNewInferredRelationshipVsNewOwlAxiom() {
final String conceptId = createNewConcept(branchPath);
final IBranchPath childBranch = BranchPathUtils.createPath(branchPath, "a");
createBranch(childBranch);
createNewRelationship(childBranch, conceptId, Concepts.IS_A, Concepts.NAMESPACE_ROOT, CharacteristicType.INFERRED_RELATIONSHIP);
final String owlSubclassOfExpression = String.format("SubClassOf(:%s :%s)", Concepts.FULLY_SPECIFIED_NAME, Concepts.AMBIGUOUS);
final Map<?, ?> requestBody = createRefSetMemberRequestBody(Concepts.REFSET_OWL_AXIOM, conceptId)
.put(SnomedRefSetMemberRestInput.ADDITIONAL_FIELDS, ImmutableMap.<String, Object>builder()
.put(SnomedRf2Headers.FIELD_OWL_EXPRESSION, owlSubclassOfExpression)
.build())
.put("commitComment", "Created new OWL Axiom reference set member")
.build();
lastPathSegment(createComponent(branchPath, SnomedComponentType.MEMBER, requestBody)
.statusCode(201)
.extract().header("Location"));
final Set<ISnomedBrowserMergeReviewDetail> results = createMergeReviewAndGetDetails(childBranch, branchPath);
assertTrue(results.isEmpty());
}
@Test
public void mergeReviewAbsentForNewInferredRelationshipVsNewFsn() {
final String conceptId = createNewConcept(branchPath);
final SnomedConcept concept = getComponent(branchPath, SnomedComponentType.CONCEPT, conceptId, "fsn()").extract().as(SnomedConcept.class);
final String originalFsnId = concept.getFsn().getId();
final String descriptionId = createNewDescription(branchPath, conceptId, Concepts.FULLY_SPECIFIED_NAME, SnomedApiTestConstants.UK_ACCEPTABLE_MAP);
final IBranchPath childBranch = BranchPathUtils.createPath(branchPath, "a");
createBranch(childBranch);
createNewRelationship(childBranch, conceptId, Concepts.IS_A, Concepts.NAMESPACE_ROOT, CharacteristicType.INFERRED_RELATIONSHIP);
final Map<?, ?> originalRequestBody = ImmutableMap.builder()
.put("acceptability", SnomedApiTestConstants.UK_ACCEPTABLE_MAP)
.put("commitComment", "Updated description acceptability")
.build();
updateComponent(branchPath, SnomedComponentType.DESCRIPTION, originalFsnId, originalRequestBody).statusCode(204);
final Map<?, ?> newRequestBody = ImmutableMap.builder()
.put("acceptability", SnomedApiTestConstants.UK_PREFERRED_MAP)
.put("commitComment", "Updated description acceptability")
.build();
updateComponent(branchPath, SnomedComponentType.DESCRIPTION, descriptionId, newRequestBody).statusCode(204);
assertEquals(descriptionId, getComponent(branchPath, SnomedComponentType.CONCEPT, conceptId, "fsn()").extract().as(SnomedConcept.class).getFsn().getId());
final Set<ISnomedBrowserMergeReviewDetail> results = createMergeReviewAndGetDetails(childBranch, branchPath);
assertTrue(results.isEmpty());
}
@Test
public void mergeReviewAbsentForNewInferredRelationshipVsNewPt() {
final String conceptId = createNewConcept(branchPath);
final SnomedConcept concept = getComponent(branchPath, SnomedComponentType.CONCEPT, conceptId, "pt()").extract().as(SnomedConcept.class);
final String originalPtId = concept.getPt().getId();
final String descriptionId = createNewDescription(branchPath, conceptId, Concepts.SYNONYM, SnomedApiTestConstants.UK_ACCEPTABLE_MAP);
final IBranchPath childBranch = BranchPathUtils.createPath(branchPath, "a");
createBranch(childBranch);
createNewRelationship(childBranch, conceptId, Concepts.IS_A, Concepts.NAMESPACE_ROOT, CharacteristicType.INFERRED_RELATIONSHIP);
final Map<?, ?> originalRequestBody = ImmutableMap.builder()
.put("acceptability", SnomedApiTestConstants.UK_ACCEPTABLE_MAP)
.put("commitComment", "Updated description acceptability")
.build();
updateComponent(branchPath, SnomedComponentType.DESCRIPTION, originalPtId, originalRequestBody).statusCode(204);
final Map<?, ?> newRequestBody = ImmutableMap.builder()
.put("acceptability", SnomedApiTestConstants.UK_PREFERRED_MAP)
.put("commitComment", "Updated description acceptability")
.build();
updateComponent(branchPath, SnomedComponentType.DESCRIPTION, descriptionId, newRequestBody).statusCode(204);
assertEquals(descriptionId, getComponent(branchPath, SnomedComponentType.CONCEPT, conceptId, "pt()").extract().as(SnomedConcept.class).getPt().getId());
final Set<ISnomedBrowserMergeReviewDetail> results = createMergeReviewAndGetDetails(childBranch, branchPath);
assertTrue(results.isEmpty());
}
}
| |
import java.util.*;
import com.sun.xml.internal.messaging.saaj.packaging.mime.util.OutputUtil;
public class PicoIsAwesomewSource {
int i;
int[][] intList;//items
int[] freqList;//frequency of occurrence
String[] stringList;
int values;
// private int rows=0;
// private int cols=0;
int rows=0;
int cols=0;
// int valueToFill;
int stop=0; //value to indicate if last change of index was successful or not
//if stop=1 then something you went to an out of bounds index
String string;
int frequency;
ArrayList<ArrayList<SourcenResults>> words;
Map<ArrayList<SourcenResults>, Integer> map; // mapping of arrays to frequencies
Set<ArrayList<SourcenResults>> sortedSet; // set of keys in map sorted by value
SourcenResults current; // new ArrayList<Integer>();
//current=node to be sorted by frequency
ArrayList<Integer> currentOut=null;
PicoIsAwesomewSource(){
// initalise fields
i = -1;
// valueToFill=-1;
values = 100;
words = new ArrayList<ArrayList<SourcenResults>>();
map = new HashMap<ArrayList<SourcenResults>, Integer>();
sortedSet = new TreeSet<ArrayList<SourcenResults>>(new ValueComparator(map));
}
void add(int val){
currentOut.add(val);
}
// deletes everything
void startArray(String sourceCode){
current = new SourcenResults();
current.sources.add(sourceCode);
currentOut=current.output;
}
void endArray(){
try {throw new Exception("finish this part!");} catch (Exception e) {}
// if(!currentOut.isEmpty()) words.add(new ArrayList<SourcenResults>(current));
}
void countFreq(){
try {throw new Exception("finish this part!");} catch (Exception e) {}
for (ArrayList<SourcenResults> a : words) {
if (!map.containsKey(a)) {
map.put(a, 1);
} else {
map.put(a, map.get(a) + 1);
}
}
sortedSet.addAll(map.keySet());
}
// prints the keys and values in map in order sorted by value
//need to first call countFreq() for this to work the first time!
void printAll(){
for (ArrayList<SourcenResults> a : sortedSet) {
System.out.print(a + " : " +map.get(a)+"\n");
}
}
void show(){
countFreq();
printAll();
}
int getFeq(){
return (frequency);
}
String getString(){
return (string);
}
void generateLists(){
int i = 0;
int ii = 0;
countFreq();
int max=0;
int m=0;
// int valueToFill=-1;
// sortedSet.addAll(map.keySet());//please don't add this it just doubles the size
for (ArrayList<SourcenResults> a : sortedSet) {
m=a.size();
if(m>max) max=m;
}
cols=sortedSet.size();
rows=max;
// System.out.println("IM IN GEN LIST! ["+cols+", "+rows+"]");
intList = new int[cols][rows];
freqList=new int[cols];
stringList=new String[cols];
try {throw new Exception("finish this part!");} catch (Exception e) {}
// for (ArrayList<Integer> a : sortedSet) {
//// System.out.println(a);
// i=0;
// string = "[";
//// list[ii]=new int[]=[NAN];
//// Arrays.fill(intList[ii],valueToFill);
// freqList[ii]=map.get(a);
// for(Integer b : a){
// intList[ii][i] = b;
// string+= " "+b;
// i += 1;
// }
// string+="]";
// stringList[ii]=string;
// ii += 1;
// }
}
//changes String and frequency
int next(){
if ((i+1)<intList.length){
i++;
frequency=freqList[i];
string=stringList[i];
stop=0;
return 1;
}
stop=1;
return 0;
}
int previous(){
if (i>0){
i--;
frequency=freqList[i];
string=stringList[i];
stop=0;
return 1;
}
stop=1;
return 0;
}
int goto_i(int val){
if ((val>=0) && (val<intList.length)){
i=val;
frequency=freqList[i];
string=stringList[i];
stop=0;
return 1;
}
stop=1;
return 0;
}
// string = "[";
// updateFreq();
//// System.out.println("[ ");
// if ((i+1)<intList.length){
// i += 1;
// for(int o = 0; o < intList[i].length; o++){
// int t = intList[i][o];
// if(t==valueToFill){string+="]";i+=1;return;}//return s;
// string+= " "+t;
//
//// System.out.println(" "+t);
// }
// }
//// else stop=1;
// string+="]";
//// System.out.println(s);
// i+=1;
// return;//return s;
// }
}
class ValueComparator implements Comparator<ArrayList<SourcenResults>>{
//Comparator<ArrayList<Integer>>,
Map<ArrayList<SourcenResults>, Integer> base;
public ValueComparator(Map<ArrayList<SourcenResults>, Integer> base) {
this.base = base;
}
// Note: this comparator imposes orderings that are inconsistent with equals.
public int compare(ArrayList<SourcenResults> a, ArrayList<SourcenResults> b) {
if (base.get(a) >= base.get(b)) {
return -1;
} else {
return 1;
} // returning 0 would merge keys
}
}
| |
///Copyright 2003-2005 Arthur van Hoff, Rick Blair
//Licensed under Apache License version 2.0
//Original license LGPL
package javax.jmdns.impl;
import java.io.IOException;
import java.net.DatagramPacket;
import java.net.InetAddress;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import java.util.logging.Level;
import java.util.logging.Logger;
/**
* Parse an incoming DNS message into its components.
*
* @version %I%, %G%
* @author Arthur van Hoff, Werner Randelshofer, Pierre Frisch, Daniel Bobbert
*/
public final class DNSIncoming {
private static Logger logger = Logger.getLogger(DNSIncoming.class.getName());
// This is a hack to handle a bug in the BonjourConformanceTest
// It is sending out target strings that don't follow the "domain name"
// format.
public static boolean USE_DOMAIN_NAME_FORMAT_FOR_SRV_TARGET = true;
// Implementation note: This vector should be immutable.
// If a client of DNSIncoming changes the contents of this vector,
// we get undesired results. To fix this, we have to migrate to
// the Collections API of Java 1.2. i.e we replace Vector by List.
// final static Vector EMPTY = new Vector();
private DatagramPacket packet;
private int off;
private int len;
private byte data[];
int id;
private int flags;
private int numQuestions;
int numAnswers;
private int numAuthorities;
private int numAdditionals;
private long receivedTime;
private List questions;
List answers;
/**
* Parse a message from a datagram packet.
*/
DNSIncoming(DatagramPacket packet) throws IOException {
this.packet = packet;
InetAddress source = packet.getAddress();
this.data = packet.getData();
this.len = packet.getLength();
this.off = packet.getOffset();
this.questions = Collections.EMPTY_LIST;
this.answers = Collections.EMPTY_LIST;
this.receivedTime = System.currentTimeMillis();
try {
id = readUnsignedShort();
flags = readUnsignedShort();
numQuestions = readUnsignedShort();
numAnswers = readUnsignedShort();
numAuthorities = readUnsignedShort();
numAdditionals = readUnsignedShort();
// parse questions
if (numQuestions > 0) {
questions = Collections.synchronizedList(new ArrayList(numQuestions));
for (int i = 0; i < numQuestions; i++) {
DNSQuestion question = new DNSQuestion(readName(), readUnsignedShort(),
readUnsignedShort());
questions.add(question);
}
}
// parse answers
int n = numAnswers + numAuthorities + numAdditionals;
if (n > 0) {
answers = Collections.synchronizedList(new ArrayList(n));
for (int i = 0; i < n; i++) {
String domain = readName();
int type = readUnsignedShort();
int clazz = readUnsignedShort();
int ttl = readInt();
int len = readUnsignedShort();
int end = off + len;
DNSRecord rec = null;
switch (type) {
case DNSConstants.TYPE_A: // IPv4
case DNSConstants.TYPE_AAAA: // IPv6 FIXME [PJYF Oct 14 2004] This has not been tested
rec = new DNSRecord.Address(domain, type, clazz, ttl, readBytes(off, len));
break;
case DNSConstants.TYPE_CNAME:
case DNSConstants.TYPE_PTR:
String service = "";
try {
service = readName();
} catch (IOException e) {
// there was a problem reading the service name
e.printStackTrace();
}
rec = new DNSRecord.Pointer(domain, type, clazz, ttl, service);
break;
case DNSConstants.TYPE_TXT:
rec = new DNSRecord.Text(domain, type, clazz, ttl, readBytes(off, len));
break;
case DNSConstants.TYPE_SRV:
int priority = readUnsignedShort();
int weight = readUnsignedShort();
int port = readUnsignedShort();
String target = "";
try {
// This is a hack to handle a bug in the BonjourConformanceTest
// It is sending out target strings that don't follow the "domain name"
// format.
if (USE_DOMAIN_NAME_FORMAT_FOR_SRV_TARGET) {
target = readName();
} else {
target = readNonNameString();
}
} catch (IOException e) {
// this can happen if the type of the label
// cannot be handled.
// down below the offset gets advanced to the end
// of the record
e.printStackTrace();
}
rec = new DNSRecord.Service(domain, type, clazz, ttl,
priority, weight, port, target);
break;
case DNSConstants.TYPE_HINFO:
// Maybe we should do something with those
break;
default:
logger.finer("DNSIncoming() unknown type:" + type);
break;
}
if (rec != null) {
rec.setRecordSource(source);
// Add a record, if we were able to create one.
answers.add(rec);
} else {
// Addjust the numbers for the skipped record
if (answers.size() < numAnswers) {
numAnswers--;
} else {
if (answers.size() < numAnswers + numAuthorities) {
numAuthorities--;
} else {
if (answers.size() < numAnswers + numAuthorities + numAdditionals) {
numAdditionals--;
}
}
}
}
off = end;
}
}
} catch (IOException e) {
logger.log(Level.WARNING, "DNSIncoming() dump " + print(true) + "\n exception ", e);
throw e;
}
}
/**
* Check if the message is a query.
*/
boolean isQuery() {
return (flags & DNSConstants.FLAGS_QR_MASK) == DNSConstants.FLAGS_QR_QUERY;
}
/**
* Check if the message is truncated.
*/
public boolean isTruncated() {
return (flags & DNSConstants.FLAGS_TC) != 0;
}
/**
* Check if the message is a response.
*/
boolean isResponse() {
return (flags & DNSConstants.FLAGS_QR_MASK) == DNSConstants.FLAGS_QR_RESPONSE;
}
private int get(int off) throws IOException {
if ((off < 0) || (off >= len)) {
throw new IOException("parser error: offset=" + off);
}
return data[off] & 0xFF;
}
private int readUnsignedShort() throws IOException {
return (get(off++) << 8) + get(off++);
}
private int readInt() throws IOException {
return (readUnsignedShort() << 16) + readUnsignedShort();
}
private byte[] readBytes(int off, int len) throws IOException {
byte bytes[] = new byte[len];
System.arraycopy(data, off, bytes, 0, len);
return bytes;
}
private void readUTF(StringBuffer buf, int off, int len) throws IOException {
for (int end = off + len; off < end; ) {
int ch = get(off++);
switch (ch >> 4) {
case 0:
case 1:
case 2:
case 3:
case 4:
case 5:
case 6:
case 7:
// 0xxxxxxx
break;
case 12:
case 13:
// 110x xxxx 10xx xxxx
ch = ((ch & 0x1F) << 6) | (get(off++) & 0x3F);
break;
case 14:
// 1110 xxxx 10xx xxxx 10xx xxxx
ch = ((ch & 0x0f) << 12) | ((get(off++) & 0x3F) << 6) | (get(off++) & 0x3F);
break;
default:
// 10xx xxxx, 1111 xxxx
ch = ((ch & 0x3F) << 4) | (get(off++) & 0x0f);
break;
}
buf.append((char) ch);
}
}
private String readNonNameString() throws IOException {
StringBuffer buf = new StringBuffer();
int off = this.off;
int len = get(off++);
readUTF(buf, off, len);
return buf.toString();
}
private String readName() throws IOException {
StringBuffer buf = new StringBuffer();
int off = this.off;
int next = -1;
int first = off;
while (true) {
int len = get(off++);
if (len == 0) {
break;
}
switch (len & 0xC0) {
case 0x00:
//buf.append("[" + off + "]");
readUTF(buf, off, len);
off += len;
buf.append('.');
break;
case 0xC0:
//buf.append("<" + (off - 1) + ">");
if (next < 0) {
next = off + 1;
}
off = ((len & 0x3F) << 8) | get(off++);
if (off >= first) {
throw new IOException("bad domain name: possible circular name detected." +
" name start: " + first +
" bad offset: 0x" + Integer.toHexString(off));
}
first = off;
break;
default:
throw new IOException(
"unsupported dns label type: '" + Integer.toHexString(len & 0xC0) + "' at " + (off
- 1));
}
}
this.off = (next >= 0) ? next : off;
return buf.toString();
}
/**
* Debugging.
*/
String print(boolean dump) {
StringBuffer buf = new StringBuffer();
buf.append(toString() + "\n");
for (Iterator iterator = questions.iterator(); iterator.hasNext(); ) {
buf.append(" ques:" + iterator.next() + "\n");
}
int count = 0;
for (Iterator iterator = answers.iterator(); iterator.hasNext(); count++) {
if (count < numAnswers) {
buf.append(" answ:");
} else {
if (count < numAnswers + numAuthorities) {
buf.append(" auth:");
} else {
buf.append(" addi:");
}
}
buf.append(iterator.next() + "\n");
}
if (dump) {
for (int off = 0, len = packet.getLength(); off < len; off += 32) {
int n = Math.min(32, len - off);
if (off < 10) {
buf.append(' ');
}
if (off < 100) {
buf.append(' ');
}
buf.append(off);
buf.append(':');
for (int i = 0; i < n; i++) {
if ((i % 8) == 0) {
buf.append(' ');
}
buf.append(Integer.toHexString((data[off + i] & 0xF0) >> 4));
buf.append(Integer.toHexString((data[off + i] & 0x0F) >> 0));
}
buf.append("\n");
buf.append(" ");
for (int i = 0; i < n; i++) {
if ((i % 8) == 0) {
buf.append(' ');
}
buf.append(' ');
int ch = data[off + i] & 0xFF;
buf.append(((ch > ' ') && (ch < 127)) ? (char) ch : '.');
}
buf.append("\n");
// limit message size
if (off + 32 >= 256) {
buf.append("....\n");
break;
}
}
}
return buf.toString();
}
public String toString() {
StringBuffer buf = new StringBuffer();
buf.append(isQuery() ? "dns[query," : "dns[response,");
if (packet.getAddress() != null) {
buf.append(packet.getAddress().getHostAddress());
}
buf.append(':');
buf.append(packet.getPort());
buf.append(",len=");
buf.append(packet.getLength());
buf.append(",id=0x");
buf.append(Integer.toHexString(id));
if (flags != 0) {
buf.append(",flags=0x");
buf.append(Integer.toHexString(flags));
if ((flags & DNSConstants.FLAGS_QR_RESPONSE) != 0) {
buf.append(":r");
}
if ((flags & DNSConstants.FLAGS_AA) != 0) {
buf.append(":aa");
}
if ((flags & DNSConstants.FLAGS_TC) != 0) {
buf.append(":tc");
}
}
if (numQuestions > 0) {
buf.append(",questions=");
buf.append(numQuestions);
}
if (numAnswers > 0) {
buf.append(",answers=");
buf.append(numAnswers);
}
if (numAuthorities > 0) {
buf.append(",authorities=");
buf.append(numAuthorities);
}
if (numAdditionals > 0) {
buf.append(",additionals=");
buf.append(numAdditionals);
}
buf.append("]");
return buf.toString();
}
/**
* Appends answers to this Incoming.
*
* @throws IllegalArgumentException If not a query or if Truncated.
*/
void append(DNSIncoming that) {
if (this.isQuery() && this.isTruncated() && that.isQuery()) {
if (that.numQuestions > 0) {
if (Collections.EMPTY_LIST.equals(this.questions)) {
this.questions = Collections.synchronizedList(new ArrayList(that.numQuestions));
}
this.questions.addAll(that.questions);
this.numQuestions += that.numQuestions;
}
if (Collections.EMPTY_LIST.equals(answers)) {
answers = Collections.synchronizedList(new ArrayList());
}
if (that.numAnswers > 0) {
this.answers.addAll(this.numAnswers, that.answers.subList(0, that.numAnswers));
this.numAnswers += that.numAnswers;
}
if (that.numAuthorities > 0) {
this.answers.addAll(this.numAnswers + this.numAuthorities,
that.answers.subList(that.numAnswers, that.numAnswers + that.numAuthorities));
this.numAuthorities += that.numAuthorities;
}
if (that.numAdditionals > 0) {
this.answers.addAll(that.answers.subList(that.numAnswers + that.numAuthorities,
that.numAnswers + that.numAuthorities + that.numAdditionals));
this.numAdditionals += that.numAdditionals;
}
} else {
throw new IllegalArgumentException();
}
}
public int elapseSinceArrival() {
return (int) (System.currentTimeMillis() - receivedTime);
}
public List getQuestions() {
return questions;
}
public List getAnswers() {
return answers;
}
}
| |
// Copyright 2021 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
/**
* OperatingSystemTargeting.java
*
* This file was auto-generated from WSDL
* by the Apache Axis 1.4 Mar 02, 2009 (07:08:06 PST) WSDL2Java emitter.
*/
package com.google.api.ads.admanager.axis.v202111;
/**
* Represents operating systems that are being targeted or excluded
* by the
* {@link LineItem}.
*/
public class OperatingSystemTargeting implements java.io.Serializable {
/* Indicates whether operating systems should be targeted or excluded.
* This
* attribute is optional and defaults to {@code true}. */
private java.lang.Boolean isTargeted;
/* Operating systems that are being targeted or excluded by the
* {@link LineItem}. */
private com.google.api.ads.admanager.axis.v202111.Technology[] operatingSystems;
public OperatingSystemTargeting() {
}
public OperatingSystemTargeting(
java.lang.Boolean isTargeted,
com.google.api.ads.admanager.axis.v202111.Technology[] operatingSystems) {
this.isTargeted = isTargeted;
this.operatingSystems = operatingSystems;
}
@Override
public String toString() {
return com.google.common.base.MoreObjects.toStringHelper(this.getClass())
.omitNullValues()
.add("isTargeted", getIsTargeted())
.add("operatingSystems", getOperatingSystems())
.toString();
}
/**
* Gets the isTargeted value for this OperatingSystemTargeting.
*
* @return isTargeted * Indicates whether operating systems should be targeted or excluded.
* This
* attribute is optional and defaults to {@code true}.
*/
public java.lang.Boolean getIsTargeted() {
return isTargeted;
}
/**
* Sets the isTargeted value for this OperatingSystemTargeting.
*
* @param isTargeted * Indicates whether operating systems should be targeted or excluded.
* This
* attribute is optional and defaults to {@code true}.
*/
public void setIsTargeted(java.lang.Boolean isTargeted) {
this.isTargeted = isTargeted;
}
/**
* Gets the operatingSystems value for this OperatingSystemTargeting.
*
* @return operatingSystems * Operating systems that are being targeted or excluded by the
* {@link LineItem}.
*/
public com.google.api.ads.admanager.axis.v202111.Technology[] getOperatingSystems() {
return operatingSystems;
}
/**
* Sets the operatingSystems value for this OperatingSystemTargeting.
*
* @param operatingSystems * Operating systems that are being targeted or excluded by the
* {@link LineItem}.
*/
public void setOperatingSystems(com.google.api.ads.admanager.axis.v202111.Technology[] operatingSystems) {
this.operatingSystems = operatingSystems;
}
public com.google.api.ads.admanager.axis.v202111.Technology getOperatingSystems(int i) {
return this.operatingSystems[i];
}
public void setOperatingSystems(int i, com.google.api.ads.admanager.axis.v202111.Technology _value) {
this.operatingSystems[i] = _value;
}
private java.lang.Object __equalsCalc = null;
public synchronized boolean equals(java.lang.Object obj) {
if (!(obj instanceof OperatingSystemTargeting)) return false;
OperatingSystemTargeting other = (OperatingSystemTargeting) obj;
if (obj == null) return false;
if (this == obj) return true;
if (__equalsCalc != null) {
return (__equalsCalc == obj);
}
__equalsCalc = obj;
boolean _equals;
_equals = true &&
((this.isTargeted==null && other.getIsTargeted()==null) ||
(this.isTargeted!=null &&
this.isTargeted.equals(other.getIsTargeted()))) &&
((this.operatingSystems==null && other.getOperatingSystems()==null) ||
(this.operatingSystems!=null &&
java.util.Arrays.equals(this.operatingSystems, other.getOperatingSystems())));
__equalsCalc = null;
return _equals;
}
private boolean __hashCodeCalc = false;
public synchronized int hashCode() {
if (__hashCodeCalc) {
return 0;
}
__hashCodeCalc = true;
int _hashCode = 1;
if (getIsTargeted() != null) {
_hashCode += getIsTargeted().hashCode();
}
if (getOperatingSystems() != null) {
for (int i=0;
i<java.lang.reflect.Array.getLength(getOperatingSystems());
i++) {
java.lang.Object obj = java.lang.reflect.Array.get(getOperatingSystems(), i);
if (obj != null &&
!obj.getClass().isArray()) {
_hashCode += obj.hashCode();
}
}
}
__hashCodeCalc = false;
return _hashCode;
}
// Type metadata
private static org.apache.axis.description.TypeDesc typeDesc =
new org.apache.axis.description.TypeDesc(OperatingSystemTargeting.class, true);
static {
typeDesc.setXmlType(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "OperatingSystemTargeting"));
org.apache.axis.description.ElementDesc elemField = new org.apache.axis.description.ElementDesc();
elemField.setFieldName("isTargeted");
elemField.setXmlName(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "isTargeted"));
elemField.setXmlType(new javax.xml.namespace.QName("http://www.w3.org/2001/XMLSchema", "boolean"));
elemField.setMinOccurs(0);
elemField.setNillable(false);
typeDesc.addFieldDesc(elemField);
elemField = new org.apache.axis.description.ElementDesc();
elemField.setFieldName("operatingSystems");
elemField.setXmlName(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "operatingSystems"));
elemField.setXmlType(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "Technology"));
elemField.setMinOccurs(0);
elemField.setNillable(false);
elemField.setMaxOccursUnbounded(true);
typeDesc.addFieldDesc(elemField);
}
/**
* Return type metadata object
*/
public static org.apache.axis.description.TypeDesc getTypeDesc() {
return typeDesc;
}
/**
* Get Custom Serializer
*/
public static org.apache.axis.encoding.Serializer getSerializer(
java.lang.String mechType,
java.lang.Class _javaType,
javax.xml.namespace.QName _xmlType) {
return
new org.apache.axis.encoding.ser.BeanSerializer(
_javaType, _xmlType, typeDesc);
}
/**
* Get Custom Deserializer
*/
public static org.apache.axis.encoding.Deserializer getDeserializer(
java.lang.String mechType,
java.lang.Class _javaType,
javax.xml.namespace.QName _xmlType) {
return
new org.apache.axis.encoding.ser.BeanDeserializer(
_javaType, _xmlType, typeDesc);
}
}
| |
/*
* Copyright 1999-2010 Luca Garulli (l.garulli--at--orientechnologies.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.orientechnologies.orient.core.db.tool;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.io.OutputStreamWriter;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.concurrent.Callable;
import java.util.zip.GZIPOutputStream;
import com.orientechnologies.common.log.OLogManager;
import com.orientechnologies.orient.core.OConstants;
import com.orientechnologies.orient.core.command.OCommandOutputListener;
import com.orientechnologies.orient.core.config.OStorageConfiguration;
import com.orientechnologies.orient.core.db.record.ODatabaseRecord;
import com.orientechnologies.orient.core.iterator.ORecordIteratorCluster;
import com.orientechnologies.orient.core.metadata.schema.OClass;
import com.orientechnologies.orient.core.metadata.schema.OProperty;
import com.orientechnologies.orient.core.metadata.schema.OSchemaProxy;
import com.orientechnologies.orient.core.metadata.schema.OSchemaShared;
import com.orientechnologies.orient.core.record.ORecordInternal;
import com.orientechnologies.orient.core.record.impl.ODocument;
import com.orientechnologies.orient.core.serialization.serializer.OJSONWriter;
import com.orientechnologies.orient.core.storage.impl.local.OClusterLogical;
import com.orientechnologies.orient.core.storage.impl.local.OStorageLocal;
import com.orientechnologies.orient.core.type.tree.provider.OMVRBTreeMapProvider;
/**
* Export data from a database to a file.
*
* @author Luca Garulli (l.garulli--at--orientechnologies.com)
*
*/
public class ODatabaseExport extends ODatabaseImpExpAbstract {
private OJSONWriter writer;
private long recordExported;
public static final int VERSION = 2;
public ODatabaseExport(final ODatabaseRecord iDatabase, final String iFileName, final OCommandOutputListener iListener)
throws IOException {
super(iDatabase, iFileName, iListener);
if (!fileName.endsWith(".gz")) {
fileName += ".gz";
}
final File f = new File(fileName);
f.mkdirs();
if (f.exists())
f.delete();
writer = new OJSONWriter(new OutputStreamWriter(new GZIPOutputStream(new FileOutputStream(fileName))));
writer.beginObject();
iDatabase.getLevel1Cache().setEnable(false);
iDatabase.getLevel2Cache().setEnable(false);
}
public ODatabaseExport(final ODatabaseRecord iDatabase, final OutputStream iOutputStream, final OCommandOutputListener iListener)
throws IOException {
super(iDatabase, "streaming", iListener);
writer = new OJSONWriter(new OutputStreamWriter(iOutputStream));
writer.beginObject();
iDatabase.getLevel1Cache().setEnable(false);
iDatabase.getLevel2Cache().setEnable(false);
}
public ODatabaseExport exportDatabase() {
database.callInLock(new Callable<Object>() {
public Object call() {
try {
listener.onMessage("\nStarted export of database '" + database.getName() + "' to " + fileName + "...");
database.getLevel1Cache().setEnable(false);
database.getLevel2Cache().setEnable(false);
long time = System.currentTimeMillis();
if (includeInfo)
exportInfo();
exportClusters();
if (includeSchema)
exportSchema();
if (includeRecords)
exportRecords();
listener.onMessage("\n\nDatabase export completed in " + (System.currentTimeMillis() - time) + "ms");
writer.flush();
} catch (Exception e) {
e.printStackTrace();
throw new ODatabaseExportException("Error on exporting database '" + database.getName() + "' to: " + fileName, e);
} finally {
close();
}
return null;
}
}, false);
return this;
}
public long exportRecords() throws IOException {
long totalRecords = 0;
int level = 1;
listener.onMessage("\nExporting records...");
writer.beginCollection(level, true, "records");
int exportedClusters = 0;
int maxClusterId = getMaxClusterId();
for (int i = 0; exportedClusters <= maxClusterId; ++i) {
String clusterName = database.getClusterNameById(i);
exportedClusters++;
final long recordTot;
if (clusterName != null) {
// CHECK IF THE CLUSTER IS INCLUDED
if (includeClusters != null) {
if (!includeClusters.contains(clusterName))
continue;
} else if (excludeClusters != null) {
if (excludeClusters.contains(clusterName))
continue;
}
if (excludeClusters != null && excludeClusters.contains(clusterName))
continue;
recordTot = database.countClusterElements(clusterName);
} else
recordTot = 0;
listener.onMessage("\n- Cluster " + (clusterName != null ? "'" + clusterName + "'" : "NULL") + " (id=" + i + ")...");
long recordNum = 0;
if (clusterName != null)
for (ORecordIteratorCluster<ORecordInternal<?>> it = database.browseCluster(clusterName); it.hasNext();) {
ORecordInternal<?> rec = null;
try {
rec = it.next();
if (rec instanceof ODocument) {
// CHECK IF THE CLASS OF THE DOCUMENT IS INCLUDED
ODocument doc = (ODocument) rec;
if (includeClasses != null) {
if (!includeClasses.contains(doc.getClassName()))
continue;
} else if (excludeClasses != null) {
if (excludeClasses.contains(doc.getClassName()))
continue;
}
}
exportRecord(recordTot, recordNum++, rec);
} catch (Throwable t) {
if (rec != null) {
final byte[] buffer = rec.toStream();
OLogManager
.instance()
.error(
this,
"Error on exporting record #%s. It seems corrupted; size: %d bytes, raw content (as string):\n==========\n%s\n==========",
t, rec.getIdentity(), buffer.length, new String(buffer));
}
}
}
listener.onMessage("OK (records=" + recordTot + ")");
totalRecords += recordTot;
}
writer.endCollection(level, true);
listener.onMessage("\n\nDone. Exported " + totalRecords + " records\n");
return totalRecords;
}
public void close() {
database.declareIntent(null);
if (writer == null)
return;
try {
writer.endObject();
writer.close();
writer = null;
} catch (IOException e) {
}
}
private void exportClusters() throws IOException {
listener.onMessage("\nExporting clusters...");
writer.beginCollection(1, true, "clusters");
int exportedClusters = 0;
int maxClusterId = getMaxClusterId();
for (int clusterId = 0; clusterId <= maxClusterId; ++clusterId) {
final String clusterName = database.getClusterNameById(clusterId);
if (clusterName != null)
// CHECK IF THE CLUSTER IS INCLUDED
if (includeClusters != null) {
if (!includeClusters.contains(clusterName))
continue;
} else if (excludeClusters != null) {
if (excludeClusters.contains(clusterName))
continue;
}
writer.beginObject(2, true, null);
writer.writeAttribute(0, false, "name", clusterName != null ? clusterName : "");
writer.writeAttribute(0, false, "id", clusterId);
writer.writeAttribute(0, false, "type", clusterName != null ? database.getClusterType(clusterName) : "");
if (clusterName != null && database.getStorage() instanceof OStorageLocal)
if (database.getClusterType(clusterName).equals("LOGICAL")) {
OClusterLogical cluster = (OClusterLogical) database.getStorage().getClusterById(clusterId);
writer.writeAttribute(0, false, "rid", cluster.getRID());
}
exportedClusters++;
writer.endObject(2, false);
}
listener.onMessage("OK (" + exportedClusters + " clusters)");
writer.endCollection(1, true);
}
protected int getMaxClusterId() {
int totalCluster = -1;
for (String clusterName : database.getClusterNames()) {
if (database.getClusterIdByName(clusterName) > totalCluster)
totalCluster = database.getClusterIdByName(clusterName);
}
return totalCluster;
}
private void exportInfo() throws IOException {
listener.onMessage("\nExporting database info...");
writer.beginObject(1, true, "info");
writer.writeAttribute(2, true, "name", database.getName().replace('\\', '/'));
writer.writeAttribute(2, true, "default-cluster-id", database.getDefaultClusterId());
writer.writeAttribute(2, true, "exporter-version", VERSION);
writer.writeAttribute(2, true, "engine-version", OConstants.ORIENT_VERSION);
final String engineBuild = OConstants.getBuildNumber();
if (engineBuild != null)
writer.writeAttribute(2, true, "engine-build", engineBuild);
writer.writeAttribute(2, true, "storage-config-version", OStorageConfiguration.CURRENT_VERSION);
writer.writeAttribute(2, true, "schema-version", OSchemaShared.CURRENT_VERSION_NUMBER);
writer.writeAttribute(2, true, "mvrbtree-version", OMVRBTreeMapProvider.CURRENT_PROTOCOL_VERSION);
writer.endObject(1, true);
listener.onMessage("OK");
}
private void exportSchema() throws IOException {
listener.onMessage("\nExporting schema...");
writer.beginObject(1, true, "schema");
OSchemaProxy s = (OSchemaProxy) database.getMetadata().getSchema();
writer.writeAttribute(2, true, "version", s.getVersion());
if (!s.getClasses().isEmpty()) {
writer.beginCollection(2, true, "classes");
final List<OClass> classes = new ArrayList<OClass>(s.getClasses());
Collections.sort(classes);
for (OClass cls : classes) {
writer.beginObject(3, true, null);
writer.writeAttribute(0, false, "name", cls.getName());
writer.writeAttribute(0, false, "default-cluster-id", cls.getDefaultClusterId());
writer.writeAttribute(0, false, "cluster-ids", cls.getClusterIds());
if (cls.getSuperClass() != null)
writer.writeAttribute(0, false, "super-class", cls.getSuperClass().getName());
if (cls.getShortName() != null)
writer.writeAttribute(0, false, "short-name", cls.getShortName());
if (!cls.properties().isEmpty()) {
writer.beginCollection(4, true, "properties");
final List<OProperty> properties = new ArrayList<OProperty>(cls.declaredProperties());
Collections.sort(properties);
for (OProperty p : properties) {
writer.beginObject(5, true, null);
writer.writeAttribute(0, false, "name", p.getName());
writer.writeAttribute(0, false, "type", p.getType().toString());
writer.writeAttribute(0, false, "mandatory", p.isMandatory());
writer.writeAttribute(0, false, "not-null", p.isNotNull());
if (p.getLinkedClass() != null)
writer.writeAttribute(0, false, "linked-class", p.getLinkedClass().getName());
if (p.getLinkedType() != null)
writer.writeAttribute(0, false, "linked-type", p.getLinkedType().toString());
if (p.getMin() != null)
writer.writeAttribute(0, false, "min", p.getMin());
if (p.getMax() != null)
writer.writeAttribute(0, false, "max", p.getMax());
writer.endObject(0, false);
}
writer.endCollection(4, true);
}
writer.endObject(3, true);
}
writer.endCollection(2, true);
}
writer.endObject(1, true);
listener.onMessage("OK (" + s.getClasses().size() + " classes)");
}
private void exportRecord(long recordTot, long recordNum, ORecordInternal<?> rec) throws IOException {
if (rec == null)
return;
if (rec.getIdentity().isValid())
rec.reload();
if (recordExported > 0)
writer.append(",");
writer.append(rec.toJSON("rid,type,version,class,attribSameRow,indent:4,keepTypes"));
recordExported++;
recordNum++;
if (recordTot > 10 && (recordNum + 1) % (recordTot / 10) == 0)
listener.onMessage(".");
}
}
| |
/*
* Copyright (C) 2013 Priboi Tiberiu
* Copyright (C) 2013 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.cwp.chart;
import android.content.Context;
import android.util.AttributeSet;
import android.view.GestureDetector;
import android.view.MotionEvent;
import android.view.View;
import android.view.ViewConfiguration;
/**
* The folding layout where the number of folds, the anchor point and the
* orientation of the fold can be specified. Each of these parameters can be
* modified individually and updates and resets the fold to a default (unfolded)
* state. The fold factor varies between 0 (completely unfolded flat image) to
* 1.0 (completely folded, non-visible image).
*
* This layout throws an exception if there is more than one child added to the
* view. For more complicated view hierarchy's inside the folding layout, the
* views should all be nested inside 1 parent layout.
*
* This layout folds the contents of its child in real time. By applying matrix
* transformations when drawing to canvas, the contents of the child may change
* as the fold takes place. It is important to note that there are jagged edges
* about the perimeter of the layout as a result of applying transformations to
* a rectangle. This can be avoided by having the child of this layout wrap its
* content inside a 1 pixel transparent border. This will cause an anti-aliasing
* like effect and smoothen out the edges.
*
*/
public class FoldingLayout extends BaseFoldingLayout {
private final String FOLDING_VIEW_EXCEPTION_MESSAGE = "Folding Layout can only 1 child at "
+ "most";
private GestureDetector mScrollGestureDetector;
FoldingLayout that = null;
private int mTranslation = 0;
private int mParentPositionY = -1;
private int mTouchSlop = -1;
private boolean mDidNotStartScroll = true;
public FoldingLayout(Context context) {
super(context);
init(context, null);
that = this;
}
public FoldingLayout(Context context, AttributeSet attrs) {
super(context, attrs);
init(context, attrs);
that = this;
}
public FoldingLayout(Context context, AttributeSet attrs, int defStyle) {
super(context, attrs, defStyle);
init(context, attrs);
that = this;
}
public void init(Context context, AttributeSet attrs) {
mScrollGestureDetector = new GestureDetector(context,
new ScrollGestureDetector());
mTouchSlop = ViewConfiguration.get(context).getScaledTouchSlop();
setAnchorFactor(0);
super.init(context, attrs);
}
@Override
protected boolean addViewInLayout(View child, int index,
LayoutParams params, boolean preventRequestLayout) {
throwCustomException(getChildCount());
boolean returnValue = super.addViewInLayout(child, index, params,
preventRequestLayout);
return returnValue;
}
/**
* The custom exception to be thrown so as to limit the number of views in
* this layout to at most one.
*/
private class NumberOfFoldingLayoutChildrenException extends
RuntimeException {
/**
*
*/
private static final long serialVersionUID = 1L;
public NumberOfFoldingLayoutChildrenException(String message) {
super(message);
}
}
/**
* Throws an exception if the number of views added to this layout exceeds
* one.
*/
private void throwCustomException(int numOfChildViews) {
if (numOfChildViews == 1) {
throw new NumberOfFoldingLayoutChildrenException(
FOLDING_VIEW_EXCEPTION_MESSAGE);
}
}
/** This class uses user touch events to fold and unfold the folding view. */
private class ScrollGestureDetector extends
GestureDetector.SimpleOnGestureListener {
@Override
public boolean onDown(MotionEvent e) {
mDidNotStartScroll = true;
return true;
}
/**
* All the logic here is used to determine by what factor the paper view
* should be folded in response to the user's touch events. The logic
* here uses vertical scrolling to fold a vertically oriented view and
* horizontal scrolling to fold a horizontally oriented fold. Depending
* on where the anchor point of the fold is, movements towards or away
* from the anchor point will either fold or unfold the paper
* respectively.
*
* The translation logic here also accounts for the touch slop when a
* new user touch begins, but before a scroll event is first invoked.
*/
@Override
public boolean onScroll(MotionEvent e1, MotionEvent e2,
float distanceX, float distanceY) {
int touchSlop = 0;
float factor;
if (mOrientation == Orientation.VERTICAL) {
factor = Math.abs((float) (mTranslation)
/ (float) (that.getHeight()));
if (e2.getY() - mParentPositionY <= that.getHeight()
&& e2.getY() - mParentPositionY >= 0) {
if ((e2.getY() - mParentPositionY) > that.getHeight()
* getAnchorFactor()) {
mTranslation -= (int) distanceY;
touchSlop = distanceY < 0 ? -mTouchSlop : mTouchSlop;
} else {
mTranslation += (int) distanceY;
touchSlop = distanceY < 0 ? mTouchSlop : -mTouchSlop;
}
mTranslation = mDidNotStartScroll ? mTranslation
+ touchSlop : mTranslation;
if (mTranslation < -that.getHeight()) {
mTranslation = -that.getHeight();
}
}
} else {
factor = Math.abs(((float) mTranslation)
/ ((float) that.getWidth()));
if (e2.getRawX() > that.getWidth() * getAnchorFactor()) {
mTranslation -= (int) distanceX;
touchSlop = distanceX < 0 ? -mTouchSlop : mTouchSlop;
} else {
mTranslation += (int) distanceX;
touchSlop = distanceX < 0 ? mTouchSlop : -mTouchSlop;
}
mTranslation = mDidNotStartScroll ? mTranslation + touchSlop
: mTranslation;
if (mTranslation < -that.getWidth()) {
mTranslation = -that.getWidth();
}
}
mDidNotStartScroll = false;
if (mTranslation > 0) {
mTranslation = 0;
}
that.setFoldFactor(factor);
return true;
}
}
@Override
public boolean onTouchEvent(MotionEvent me) {
return mScrollGestureDetector.onTouchEvent(me);
}
}
| |
package ca.edchipman.silverstripepdt.views;
import java.lang.reflect.Array;
import java.util.ArrayList;
import org.eclipse.core.resources.IProject;
import org.eclipse.core.resources.IResource;
import org.eclipse.core.runtime.CoreException;
import org.eclipse.core.runtime.IAdaptable;
import org.eclipse.core.runtime.NullProgressMonitor;
import org.eclipse.dltk.core.DLTKCore;
import org.eclipse.dltk.core.IField;
import org.eclipse.dltk.core.IModelElement;
import org.eclipse.dltk.core.IScriptProject;
import org.eclipse.dltk.core.IType;
import org.eclipse.dltk.core.ModelException;
import org.eclipse.dltk.core.index2.search.ISearchEngine.MatchRule;
import org.eclipse.dltk.core.search.IDLTKSearchScope;
import org.eclipse.dltk.core.search.SearchEngine;
import org.eclipse.dltk.internal.ui.typehierarchy.SubTypeHierarchyViewer.SubTypeHierarchyContentProvider;
import org.eclipse.dltk.internal.ui.typehierarchy.TypeHierarchyLifeCycle;
import org.eclipse.jface.action.Action;
import org.eclipse.jface.action.IToolBarManager;
import org.eclipse.jface.resource.FontDescriptor;
import org.eclipse.jface.text.TextSelection;
import org.eclipse.jface.viewers.ISelection;
import org.eclipse.jface.viewers.IStructuredSelection;
import org.eclipse.php.internal.core.documentModel.dom.ElementImplForPHP;
import org.eclipse.php.internal.core.model.PHPModelAccess;
import org.eclipse.php.internal.core.preferences.CorePreferencesSupport;
import org.eclipse.php.internal.core.typeinference.PHPModelUtils;
import org.eclipse.swt.SWT;
import org.eclipse.swt.SWTError;
import org.eclipse.swt.widgets.Composite;
import org.eclipse.swt.widgets.Display;
import org.eclipse.swt.widgets.Event;
import org.eclipse.swt.widgets.Listener;
import org.eclipse.ui.IEditorInput;
import org.eclipse.ui.IEditorPart;
import org.eclipse.ui.IFileEditorInput;
import org.eclipse.ui.IPartListener2;
import org.eclipse.ui.ISelectionListener;
import org.eclipse.ui.ISharedImages;
import org.eclipse.ui.IWorkbench;
import org.eclipse.ui.IWorkbenchPage;
import org.eclipse.ui.IWorkbenchPart;
import org.eclipse.ui.IWorkbenchPartReference;
import org.eclipse.ui.IWorkbenchWindow;
import org.eclipse.ui.PlatformUI;
import org.eclipse.ui.part.ViewPart;
import org.eclipse.swt.browser.Browser;
import org.eclipse.swt.browser.ProgressEvent;
import org.eclipse.swt.browser.ProgressListener;
import org.eclipse.swt.layout.GridLayout;
import org.eclipse.swt.widgets.Label;
import org.eclipse.swt.layout.GridData;
import org.eclipse.swt.custom.StackLayout;
import org.eclipse.swt.graphics.Color;
import org.eclipse.swt.graphics.Font;
import org.eclipse.jface.fieldassist.ControlDecoration;
import org.eclipse.swt.widgets.ProgressBar;
import org.eclipse.swt.custom.ScrolledComposite;
import ca.edchipman.silverstripepdt.SilverStripeNature;
import ca.edchipman.silverstripepdt.SilverStripePluginImages;
@SuppressWarnings("restriction")
public class TasksViewer extends ViewPart {
public static final String ID = "ca.edchipman.silverstripepdt.views.TasksViewer"; //$NON-NLS-1$
private RefreshAction refreshAction;
private Composite fViewStack;
private StackLayout fViewStackLayout;
private Composite fTasksView;
private Label fErrorLabel;
private Browser fTasksBrowser;
private Composite fErrorView;
private Composite fTasksList;
private ProgressBar progressBar;
private ArrayList<SilverStripeTask> projectTasks;
private TaskProgressListener progressListener;
private IProject fLastProject;
private boolean projectTasksLoading;
public TasksViewer() {
}
/**
* Create contents of the view part.
* @param parent
*/
@Override
public void createPartControl(Composite parent) {
fViewStack = new Composite(parent, SWT.NONE);
fViewStackLayout=new StackLayout();
fViewStack.setLayout(fViewStackLayout);
fTasksView = new Composite(fViewStack, SWT.NONE);
fTasksView.setLayout(new GridLayout(2, false));
ScrolledComposite scrolledComposite = new ScrolledComposite(fTasksView, SWT.BORDER | SWT.V_SCROLL);
scrolledComposite.setBackground(parent.getBackground());
GridData gd_scrolledComposite = new GridData(SWT.FILL, SWT.FILL, false, true, 1, 1);
gd_scrolledComposite.widthHint = 260;
scrolledComposite.setLayoutData(gd_scrolledComposite);
fTasksList = new Composite(scrolledComposite, SWT.NONE);
GridLayout gl_fTasksList = new GridLayout(1, false);
gl_fTasksList.marginHeight = 10;
gl_fTasksList.marginRight = 5;
fTasksList.setLayout(gl_fTasksList);
fTasksList.setLayoutData(new GridData(GridData.FILL_BOTH));
fTasksList.setBackground(parent.getBackground());
scrolledComposite.setContent(fTasksList);
Composite rightComp = new Composite(fTasksView, SWT.NONE);
GridLayout gl_rightComp = new GridLayout(1, false);
gl_rightComp.marginWidth = 0;
gl_rightComp.marginHeight = 0;
rightComp.setLayout(gl_rightComp);
rightComp.setLayoutData(new GridData(SWT.FILL, SWT.FILL, true, true, 1, 1));
progressBar = new ProgressBar(rightComp, SWT.SMOOTH | SWT.HORIZONTAL);
progressBar.setLayoutData(new GridData(SWT.FILL, SWT.CENTER, false, false, 1, 1));
progressBar.setVisible(false);
progressBar.setMinimum(0);
boolean browserInitError=false;
try {
fTasksBrowser = new Browser(rightComp, SWT.NONE);
fTasksBrowser.setLayoutData(new GridData(SWT.FILL, SWT.FILL, true, true, 1, 1));
progressListener=new TaskProgressListener(fTasksBrowser, progressBar);
fTasksBrowser.addProgressListener(progressListener);
} catch (SWTError error) {
if(progressListener!=null) {
fTasksBrowser.removeProgressListener(progressListener);
}
fTasksBrowser = null;
browserInitError=true;
}
fErrorView = new Composite(fViewStack, SWT.NONE);
fErrorView.setLayout(new GridLayout(1, false));
fErrorLabel = new Label(fErrorView, SWT.CENTER);
fErrorLabel.setLayoutData(new GridData(SWT.CENTER, SWT.CENTER, true, true, 1, 1));
fErrorLabel.setAlignment(SWT.CENTER);
if(browserInitError) {
fErrorLabel.setText("SWT Browser control is not available. Please refer to: http://www.eclipse.org/swt/faq.php#whatisbrowser for more information.");
}else {
fErrorLabel.setText("No project is selected");
}
ControlDecoration controlDecoration = new ControlDecoration(fErrorLabel, SWT.LEFT | SWT.TOP);
controlDecoration.setImage(PlatformUI.getWorkbench().getSharedImages().getImage(ISharedImages.IMG_OBJS_ERROR_TSK));
createActions();
initializeToolBar();
//Find the current project
IResource currentSelection=extractSelection(getSite().getWorkbenchWindow().getSelectionService().getSelection());
if(currentSelection!=null) {
IProject project=currentSelection.getProject();
try {
if(project.hasNature(SilverStripeNature.ID)) {
String siteBase=CorePreferencesSupport.getInstance().getProjectSpecificPreferencesValue("silverstripe_site_base", null, project);
if(siteBase!=null && siteBase.isEmpty()==false) {
refreshAction.setEnabled(true);
fLastProject=project;
this.getSite().getPage().addPartListener(viewPartListener);
}else {
refreshAction.setEnabled(false);
fErrorLabel.setText("You have not set the site base for this project, you can set this on the project's preferences for SilverStripe");
if(fViewStackLayout.topControl!=fErrorView) {
fViewStackLayout.topControl=fErrorView;
fViewStack.layout();
}
}
}else {
refreshAction.setEnabled(false);
fErrorLabel.setText("You must add SilverStripe Support to your project to use this view");
if(fViewStackLayout.topControl!=fErrorView) {
fViewStackLayout.topControl=fErrorView;
fViewStack.layout();
}
}
} catch (CoreException e) {
e.printStackTrace();
}
}else {
fViewStackLayout.topControl=fErrorView;
fViewStack.layout();
}
//Listen for the selection of files to change, if the browser is working
if(browserInitError==false) {
getSite().getWorkbenchWindow().getSelectionService().addSelectionListener(listener);
}
}
public void dispose() {
getSite().getWorkbenchWindow().getSelectionService().removeSelectionListener(listener);
if(projectTasks!=null && projectTasks.isEmpty()==false) {
for(SilverStripeTask task : projectTasks) {
task.dispose();
}
projectTasks.removeAll(projectTasks);
projectTasks=null;
}
super.dispose();
}
/**
* Create the actions.
*/
private void createActions() {
// Create the actions
}
/**
* Initialize the toolbar.
*/
private void initializeToolBar() {
refreshAction = new RefreshAction(this);
IToolBarManager tbm = getViewSite().getActionBars().getToolBarManager();
tbm.add(refreshAction);
}
@Override
public void setFocus() {
// Set the focus
}
/**
* Refreshes the list of tasks based on the current project
*/
public void refreshTasks() {
if(projectTasksLoading) {
return;
}
projectTasksLoading=true;
if(projectTasks!=null && projectTasks.isEmpty()==false) {
for(SilverStripeTask task : projectTasks) {
task.dispose();
}
projectTasks.removeAll(projectTasks);
fTasksList.setSize(fTasksList.computeSize(fTasksList.getParent().getClientArea().width, SWT.FILL));
fTasksList.layout(true);
}else {
projectTasks=new ArrayList<SilverStripeTask>();
}
String siteBase=CorePreferencesSupport.getInstance().getProjectSpecificPreferencesValue("silverstripe_site_base", null, fLastProject);
if(siteBase!=null && siteBase.isEmpty()==false) {
String finalURL=siteBase;
if(finalURL.substring(finalURL.length()-1).equals("/")==false) {
finalURL=finalURL.concat("/");
}
finalURL=finalURL.concat("dev/tasks");
IScriptProject project=DLTKCore.create(fLastProject);
IModelElement[] elements=new IModelElement[] { project };
IDLTKSearchScope scope=SearchEngine.createSearchScope(elements, IDLTKSearchScope.SYSTEM_LIBRARIES, project.getLanguageToolkit());
IType[] buildTypes=PHPModelAccess.getDefault().findTypes("BuildTask", MatchRule.EXACT, 0, 0, scope, new NullProgressMonitor());
buildTypes=this.concatTypeArray(buildTypes, PHPModelAccess.getDefault().findTypes("MigrationTask", MatchRule.EXACT, 0, 0, scope, new NullProgressMonitor()));
if(buildTypes.length>1) {
IType buildTaskType=buildTypes[0];
IType migrationTaskType=buildTypes[1];
try {
TypeHierarchyLifeCycle lifecycle=new TypeHierarchyLifeCycle();
lifecycle.doHierarchyRefresh(buildTaskType, null);
SubTypeHierarchyContentProvider provider=new SubTypeHierarchyContentProvider(lifecycle);
Object[] decendents=provider.getChildren(buildTaskType);
decendents=this.concatObjectArray(decendents, provider.getChildren(migrationTaskType));
if(decendents.length>0) {
for(Object taskObj : decendents) {
if(taskObj instanceof IType) {
IType task=(IType) taskObj;
//Skip the MigrationTask class
if(task.getFullyQualifiedName().equals("MigrationTask")) {
continue;
}
String taskTitle=PHPModelUtils.extractElementName(task.getFullyQualifiedName());
String taskURL=finalURL.concat("/"+taskTitle);
String taskDesc="No description";
IField taskTitleField=task.getField("$title");
if(taskTitleField!=null && taskTitleField.exists()) {
taskTitle=taskTitleField.getSource().replaceFirst("(?s)^(.*)(\\s*)=(\\s*)(\"|')(.*)(\"|')$", "$5").trim().replaceAll("(\n|\r)", " ").replaceAll("(\\s+|\t+)", " ");
}
IField taskDescField=task.getField("$description");
if(taskDescField!=null && taskDescField.exists()) {
taskDesc=taskDescField.getSource().replaceFirst("(?s)^(.*)(\\s*)=(\\s*)(\"|')(.*)(\"|')$", "$5").trim().replaceAll("(\n|\r)", " ").replaceAll("(\\s+|\t+)", " ");
}
if(task.getSuperClasses()[0].equals("MigrationTask")) {
projectTasks.add(new SilverStripeTask(fTasksList, taskTitle.concat(" (up)"), taskURL, taskDesc));
projectTasks.add(new SilverStripeTask(fTasksList, taskTitle.concat(" (down)"), taskURL.concat("?Direction=down"), taskDesc));
}else {
projectTasks.add(new SilverStripeTask(fTasksList, taskTitle, taskURL, taskDesc));
}
}
}
//Clean up
lifecycle.freeHierarchy();
provider.dispose();
if(fViewStackLayout.topControl!=fTasksView) {
fViewStackLayout.topControl=fTasksView;
fViewStack.layout();
}
fTasksList.setSize(fTasksList.computeSize(fTasksList.getParent().getClientArea().width, SWT.DEFAULT));
fTasksList.layout(true, true);
}else {
fErrorLabel.setText("Could not find any build tasks");
if(fViewStackLayout.topControl!=fErrorView) {
fViewStackLayout.topControl=fErrorView;
fViewStack.layout();
}
}
} catch (ModelException e) {
fErrorLabel.setText("Error loading build tasks");
if(fViewStackLayout.topControl!=fErrorView) {
fViewStackLayout.topControl=fErrorView;
fViewStack.layout();
}
e.printStackTrace();
}
}
}
projectTasksLoading=false;
}
/**
* Concatenates two IType arrays into one
* @param leftArray First array to concatenate
* @param rightArray Second array to concatenate
* @return Resulting array
*/
private IType[] concatTypeArray(IType[] leftArray, IType[] rightArray) {
int leftLen = leftArray.length;
int rightLen = rightArray.length;
IType[] result = (IType[]) Array.newInstance(leftArray.getClass().getComponentType(), leftLen+rightLen);
System.arraycopy(leftArray, 0, result, 0, leftLen);
System.arraycopy(rightArray, 0, result, leftLen, rightLen);
return result;
}
/**
* Concatenates two Object arrays into one
* @param leftArray First array to concatenate
* @param rightArray Second array to concatenate
* @return Resulting array
*/
private Object[] concatObjectArray(Object[] leftArray, Object[] rightArray) {
int leftLen = leftArray.length;
int rightLen = rightArray.length;
Object[] result = (Object[]) Array.newInstance(leftArray.getClass().getComponentType(), leftLen+rightLen);
System.arraycopy(leftArray, 0, result, 0, leftLen);
System.arraycopy(rightArray, 0, result, leftLen, rightLen);
return result;
}
/**
* Runs the task on the webserver
* @param taskURL
*/
protected void runTask(String taskURL) {
progressBar.setVisible(true);
progressBar.setMaximum(1);
progressBar.setSelection(0);
fTasksBrowser.setUrl(taskURL);
}
/**
* Handles when the selection changes in the platform
* @param _project Project the selection belongs to
*/
protected void handleSelectionChange(IProject _project) {
try {
if(_project.hasNature(SilverStripeNature.ID)) {
String siteBase=CorePreferencesSupport.getInstance().getProjectSpecificPreferencesValue("silverstripe_site_base", null, _project);
if(siteBase!=null && siteBase.isEmpty()==false) {
refreshAction.setEnabled(true);
if(fLastProject==null || _project.getName().equals(fLastProject.getName())==false) {
fTasksBrowser.setUrl("about:blank");
fLastProject=_project;
if(fViewStackLayout.topControl!=fTasksView) {
fViewStackLayout.topControl=fTasksView;
fViewStack.layout();
}
this.refreshTasks();
}
}else {
refreshAction.setEnabled(false);
fErrorLabel.setText("You have not set the site base for this project, you can set this on the project's preferences for SilverStripe");
if(fViewStackLayout.topControl!=fErrorView) {
fViewStackLayout.topControl=fErrorView;
fViewStack.layout();
}
}
}else {
refreshAction.setEnabled(false);
fErrorLabel.setText("You must add SilverStripe Support to your project to use this view");
if(fViewStackLayout.topControl!=fErrorView) {
fViewStackLayout.topControl=fErrorView;
fViewStack.layout();
}
}
} catch (CoreException e) {
e.printStackTrace();
}
}
/**
* Gets the resource from the selection
* @param sel Selection to find resource from
* @return Returns the selected resource
*/
protected final IResource extractSelection(ISelection sel) {
if(!(sel instanceof IStructuredSelection) || (sel instanceof TextSelection)) {
IWorkbench iworkbench=PlatformUI.getWorkbench();
if (iworkbench==null) {
return null;
}
IWorkbenchWindow iworkbenchwindow=iworkbench.getActiveWorkbenchWindow();
if (iworkbenchwindow==null) {
return null;
}
IWorkbenchPage iworkbenchpage=iworkbenchwindow.getActivePage();
if(iworkbenchpage==null) {
return null;
}
IEditorPart ieditorpart=iworkbenchpage.getActiveEditor();
if(ieditorpart==null) {
return null;
}
return extractResource(ieditorpart);
}
IStructuredSelection ss=(IStructuredSelection) sel;
Object element=ss.getFirstElement();
if(element instanceof IResource) {
return (IResource) element;
}
//If not IAdabptable get resource from the active editor
if(!(element instanceof IAdaptable) || (element instanceof ElementImplForPHP)) {
IWorkbench iworkbench=PlatformUI.getWorkbench();
if (iworkbench==null) {
return null;
}
IWorkbenchWindow iworkbenchwindow=iworkbench.getActiveWorkbenchWindow();
if (iworkbenchwindow==null) {
return null;
}
IWorkbenchPage iworkbenchpage=iworkbenchwindow.getActivePage();
if(iworkbenchpage==null) {
return null;
}
IEditorPart ieditorpart=iworkbenchpage.getActiveEditor();
if(ieditorpart==null) {
return null;
}
return extractResource(ieditorpart);
}
IAdaptable adaptable=(IAdaptable) element;
Object adapter=adaptable.getAdapter(IResource.class);
return (IResource) adapter;
}
/**
* Extracts the resource from the active editor part
* @param editor Editor part
* @return Returns the current editor parts resource
*/
private IResource extractResource(IEditorPart editor) {
IEditorInput input=editor.getEditorInput();
if(!(input instanceof IFileEditorInput)) {
return null;
}
return ((IFileEditorInput) input).getFile();
}
/**
* Selection listener
*/
private ISelectionListener listener=new ISelectionListener() {
public void selectionChanged(IWorkbenchPart sourcepart, ISelection selection) {
if(sourcepart.getSite().getId().equals(TasksViewer.ID)) {
return;
}
IResource resource=extractSelection(selection);
if(resource!=null) {
IProject _project=resource.getProject();
if(_project!=null) {
try {
if(_project.isOpen() && _project.hasNature(SilverStripeNature.ID)) {
handleSelectionChange(_project);
}
}catch (CoreException e) {
e.printStackTrace();
}
}
}
}
};
/**
* View part listener
*/
private IPartListener2 viewPartListener=new IPartListener2() {
@Override
public void partVisible(IWorkbenchPartReference partRef) {
if(partRef.getId().equals(TasksViewer.ID)) {
Display.getCurrent().asyncExec(new Runnable() {
@Override
public void run() {
TasksViewer.this.refreshTasks();
}
});
TasksViewer.this.getSite().getPage().removePartListener(viewPartListener);
viewPartListener=null;
}
}
@Override
public void partOpened(IWorkbenchPartReference partRef) {}
@Override
public void partInputChanged(IWorkbenchPartReference partRef) {}
@Override
public void partHidden(IWorkbenchPartReference partRef) {}
@Override
public void partDeactivated(IWorkbenchPartReference partRef) {}
@Override
public void partClosed(IWorkbenchPartReference partRef) {}
@Override
public void partBroughtToTop(IWorkbenchPartReference partRef) {}
@Override
public void partActivated(IWorkbenchPartReference partRef) {}
};
private class RefreshAction extends Action {
private TasksViewer tasksViewer;
public RefreshAction(TasksViewer _tasksViewer) {
this.tasksViewer=_tasksViewer;
this.setText("Refresh");
this.setDescription("Refresh Tasks List");
this.setImageDescriptor(SilverStripePluginImages.IMG_REFRESH);
this.setEnabled(false);
}
@Override
public void run() {
tasksViewer.refreshTasks();
}
}
private class TaskProgressListener implements ProgressListener {
private ProgressBar progress;
private Browser browser;
/**
* @param bar Progress bar to update
*/
public TaskProgressListener(Browser browser, ProgressBar bar) {
this.browser=browser;
progress = bar;
}
/**
* Handles when the progress changes
* @param ProgressEvent Progress event data
*/
public void changed(ProgressEvent event) {
if(progressBar.isDisposed()==false && event.total>0) {
if(event.total>progress.getMaximum()) {
progressBar.setMaximum(event.total);
}
if(event.current<progress.getMaximum()) {
progress.setVisible(true);
progress.setSelection(event.current);
}else {
progress.setSelection(event.current);
}
}
}
/**
* Handles when the progress completes
* @param ProgressEvent Progress event data
*/
public void completed(ProgressEvent event) {
progress.setVisible(false);
progress.setMaximum(1);
progress.setSelection(0);
browser.evaluate("var links=document.getElementsByTagName('a');" +
"for(var i=0;i<links.length;i++) { "+
"links[i].href='#'; "+
"}");
}
}
private class SilverStripeTask extends Composite {
private Label fTaskTitleLbl;
private Label fTaskDescLbl;
private String taskURL;
private Color defaultBackground;
private Color hoverBackgroundColor;
private Listener mouseClickListener;
private Listener mouseExitListener;
private Listener mouseEnterListener;
public SilverStripeTask(Composite parent, String title, String url, String description) {
super(parent, SWT.NONE);
defaultBackground=parent.getBackground();
hoverBackgroundColor=Display.getCurrent().getSystemColor(SWT.COLOR_LIST_SELECTION);
this.setLayoutData(new GridData(SWT.FILL, SWT.TOP, true, false));
this.setLayout(new GridLayout(1, false));
this.setBackground(defaultBackground);
taskURL=url;
fTaskTitleLbl=new Label(this, SWT.WRAP | SWT.NO_BACKGROUND);
fTaskTitleLbl.setLayoutData(new GridData(SWT.FILL, SWT.TOP, true, false));
fTaskTitleLbl.setText(title);
FontDescriptor boldDescriptor=FontDescriptor.createFrom(fTaskTitleLbl.getFont()).setStyle(SWT.BOLD);
Font boldFont = boldDescriptor.createFont(fTaskTitleLbl.getDisplay());
fTaskTitleLbl.setFont(boldFont);
fTaskTitleLbl.setBackground(defaultBackground);
fTaskDescLbl=new Label(this, SWT.WRAP);
fTaskDescLbl.setLayoutData(new GridData(SWT.FILL, SWT.TOP, true, false));
fTaskDescLbl.setText(description);
fTaskDescLbl.setBackground(defaultBackground);
//Bind listeners
mouseClickListener=new Listener() {
@Override
public void handleEvent(Event e) {
TasksViewer.this.runTask(taskURL);
}
};
mouseEnterListener=new Listener() {
@Override
public void handleEvent(Event e) {
SilverStripeTask.this.setBackground(hoverBackgroundColor);
fTaskTitleLbl.setBackground(hoverBackgroundColor);
fTaskDescLbl.setBackground(hoverBackgroundColor);
}
};
mouseExitListener=new Listener() {
@Override
public void handleEvent(Event e) {
SilverStripeTask.this.setBackground(defaultBackground);
fTaskTitleLbl.setBackground(defaultBackground);
fTaskDescLbl.setBackground(defaultBackground);
}
};
//Bind Listeners
this.addListener(SWT.MouseUp, mouseClickListener);
this.addListener(SWT.MouseEnter, mouseEnterListener);
this.addListener(SWT.MouseExit, mouseExitListener);
fTaskTitleLbl.addListener(SWT.MouseUp, mouseClickListener);
fTaskTitleLbl.addListener(SWT.MouseEnter, mouseEnterListener);
fTaskTitleLbl.addListener(SWT.MouseExit, mouseExitListener);
fTaskDescLbl.addListener(SWT.MouseUp, mouseClickListener);
fTaskDescLbl.addListener(SWT.MouseEnter, mouseEnterListener);
fTaskDescLbl.addListener(SWT.MouseExit, mouseExitListener);
}
public void dispose() {
mouseClickListener=null;
mouseEnterListener=null;
mouseExitListener=null;
//Cleanup colors
defaultBackground.dispose();
hoverBackgroundColor.dispose();
super.dispose();
}
}
}
| |
/*
* Copyright 2015-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.facebook.buck.jvm.java.intellij;
import static org.junit.Assert.assertThat;
import com.facebook.buck.jvm.core.JavaPackageFinder;
import com.facebook.buck.jvm.java.DefaultJavaPackageFinder;
import com.facebook.buck.model.BuildTarget;
import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.ImmutableSortedSet;
import org.hamcrest.Matchers;
import org.junit.Test;
import java.nio.file.Path;
import java.nio.file.Paths;
public class IjSourceRootSimplifierTest {
private static IjFolder buildFolder(String path, AbstractIjFolder.Type type) {
return IjFolder.builder()
.setPath(Paths.get(path))
.setType(type)
.setWantsPackagePrefix(true)
.setInputs(ImmutableSortedSet.<Path>of())
.build();
}
private static IjFolder buildExcludeFolder(String path) {
return IjFolder.builder()
.setPath(Paths.get(path))
.setType(AbstractIjFolder.Type.EXCLUDE_FOLDER)
.setWantsPackagePrefix(false)
.setInputs(ImmutableSortedSet.<Path>of())
.build();
}
private static IjFolder buildSourceFolder(String path) {
return buildFolder(path, AbstractIjFolder.Type.SOURCE_FOLDER);
}
private static IjFolder buildNoPrefixSourceFolder(String path) {
return IjFolder.builder()
.setPath(Paths.get(path))
.setType(AbstractIjFolder.Type.SOURCE_FOLDER)
.setWantsPackagePrefix(false)
.setInputs(ImmutableSortedSet.<Path>of())
.build();
}
private static IjFolder buildTestFolder(String path) {
return buildFolder(path, AbstractIjFolder.Type.TEST_FOLDER);
}
private static JavaPackageFinder fakePackageFinder() {
return fakePackageFinder(ImmutableMap.<Path, Path>of());
}
private static JavaPackageFinder fakePackageFinder(final ImmutableMap<Path, Path> packageMap) {
return new JavaPackageFinder() {
@Override
public Path findJavaPackageFolder(Path pathRelativeToProjectRoot) {
// The Path given here is a path to a file, not a folder.
pathRelativeToProjectRoot =
Preconditions.checkNotNull(pathRelativeToProjectRoot.getParent());
if (packageMap.containsKey(pathRelativeToProjectRoot)) {
return packageMap.get(pathRelativeToProjectRoot);
}
return pathRelativeToProjectRoot;
}
@Override
public String findJavaPackage(Path pathRelativeToProjectRoot) {
return DefaultJavaPackageFinder.findJavaPackageWithPackageFolder(
findJavaPackageFolder(pathRelativeToProjectRoot));
}
@Override
public String findJavaPackage(BuildTarget buildTarget) {
return findJavaPackage(buildTarget.getBasePath().resolve("removed"));
}
};
}
@Test
public void testSameTypeAndPackageAreMerged() {
IjSourceRootSimplifier simplifier = new IjSourceRootSimplifier(fakePackageFinder());
IjFolder left = buildSourceFolder("src/left");
IjFolder right = buildSourceFolder("src/right");
assertThat(
simplifier.simplify(SimplificationLimit.of(0), ImmutableSet.of(left, right)),
Matchers.contains(buildSourceFolder("src")));
}
@Test
public void testSinglePathElement() {
IjSourceRootSimplifier simplifier = new IjSourceRootSimplifier(fakePackageFinder());
IjFolder src = buildSourceFolder("src");
assertThat(
simplifier.simplify(SimplificationLimit.of(0), ImmutableSet.of(src)),
Matchers.contains(src));
}
@Test
public void testSimplificationLimit() {
IjSourceRootSimplifier simplifier = new IjSourceRootSimplifier(fakePackageFinder());
IjFolder folder = buildSourceFolder("a/b/c/d/e/f/g");
assertThat(
simplifier.simplify(SimplificationLimit.of(2), ImmutableSet.of(folder)),
Matchers.contains(buildSourceFolder("a/b")));
assertThat(
simplifier.simplify(SimplificationLimit.of(4), ImmutableSet.of(folder)),
Matchers.contains(buildSourceFolder("a/b/c/d")));
assertThat(
simplifier.simplify(SimplificationLimit.of(10), ImmutableSet.of(folder)),
Matchers.contains(buildSourceFolder("a/b/c/d/e/f/g")));
}
@Test
public void testComplexPathElement() {
IjSourceRootSimplifier simplifier = new IjSourceRootSimplifier(fakePackageFinder());
IjFolder src = buildSourceFolder("src/a/b/c/d");
assertThat(
simplifier.simplify(SimplificationLimit.of(0), ImmutableSet.of(src)),
Matchers.contains(buildSourceFolder("src")));
}
@Test
public void testDifferentTypeAreNotMerged() {
IjSourceRootSimplifier simplifier = new IjSourceRootSimplifier(fakePackageFinder());
IjFolder leftSource = buildSourceFolder("src/left");
IjFolder rightTest = buildTestFolder("src/right");
assertThat(
simplifier.simplify(SimplificationLimit.of(0), ImmutableSet.of(leftSource, rightTest)),
Matchers.containsInAnyOrder(leftSource, rightTest));
}
@Test
public void testDifferentTypeAreNotMergedWhileSameOnesAre() {
IjSourceRootSimplifier simplifier = new IjSourceRootSimplifier(fakePackageFinder());
IjFolder aaaSource = buildSourceFolder("a/a/a");
IjFolder aaaaSource = buildSourceFolder("a/a/a/a");
IjFolder aabSource = buildSourceFolder("a/a/b");
IjFolder abSource = buildSourceFolder("a/b");
IjFolder acTest = buildTestFolder("a/c");
IjFolder adaTest = buildTestFolder("a/d/a");
ImmutableSet<IjFolder> mergedFolders = simplifier.simplify(
SimplificationLimit.of(0),
ImmutableSet.of(aaaSource, aaaaSource, aabSource, abSource, acTest, adaTest));
IjFolder aaSource = buildSourceFolder("a/a");
IjFolder adTest = buildTestFolder("a/d");
assertThat(
mergedFolders,
Matchers.containsInAnyOrder(aaSource, abSource, acTest, adTest));
}
@Test
public void testDifferentPackageHierarchiesAreNotMerged() {
IjSourceRootSimplifier simplifier = new IjSourceRootSimplifier(
fakePackageFinder(ImmutableMap.of(
Paths.get("src/left"), Paths.get("onething"),
Paths.get("src/right"), Paths.get("another"))));
IjFolder leftSource = buildSourceFolder("src/left");
IjFolder rightSource = buildTestFolder("src/right");
assertThat(
simplifier.simplify(SimplificationLimit.of(0), ImmutableSet.of(leftSource, rightSource)),
Matchers.containsInAnyOrder(leftSource, rightSource));
}
@Test
public void testShortPackagesAreNotMerged() {
IjSourceRootSimplifier simplifier = new IjSourceRootSimplifier(
fakePackageFinder(ImmutableMap.of(
Paths.get("r/x/a/a"), Paths.get("a/a"),
Paths.get("r/x/a/b"), Paths.get("a/b"))));
IjFolder aSource = buildSourceFolder("r/x/a/a");
IjFolder bSource = buildSourceFolder("r/x/a/b");
assertThat(
simplifier.simplify(SimplificationLimit.of(0), ImmutableSet.of(aSource, bSource)),
Matchers.contains(buildSourceFolder("r/x")));
}
@Test
public void testExcludeFoldersAreIgnored() {
// While flattening source folder hierarchies is fine within certain bounds given the
// information available in the set of IjFolders and their package information, it is not
// possible to do anything with exclude folders at this level of abstraction.
// That's fine though as the IjTemplateDataPreparer generates excludes at the highest possible
// location in the file tree, so they don't need to be merged.
IjSourceRootSimplifier simplifier = new IjSourceRootSimplifier(fakePackageFinder());
IjFolder leftSource = buildSourceFolder("src/left");
IjFolder aExclude = buildExcludeFolder("src/a");
IjFolder aaExclude = buildExcludeFolder("src/a/a");
assertThat(
simplifier.simplify(
SimplificationLimit.of(0),
ImmutableSet.of(leftSource, aExclude, aaExclude)),
Matchers.containsInAnyOrder(buildSourceFolder("src"), aExclude, aaExclude));
}
@Test
public void textPrefixlessSourcesAreMergedToHighestRoot() {
IjSourceRootSimplifier simplifier = new IjSourceRootSimplifier(fakePackageFinder());
IjFolder aFolder = buildNoPrefixSourceFolder("src/a/b");
IjFolder aaFolder = buildNoPrefixSourceFolder("src/a/a");
IjFolder bFolder = buildNoPrefixSourceFolder("src/b");
assertThat(
simplifier.simplify(
SimplificationLimit.of(0),
ImmutableSet.of(aFolder, aaFolder, bFolder)),
Matchers.contains(buildNoPrefixSourceFolder("src")));
}
@Test
public void textPrefixAndPrefixlessSourcesDontMerge() {
IjSourceRootSimplifier simplifier = new IjSourceRootSimplifier(fakePackageFinder());
IjFolder aFolder = buildNoPrefixSourceFolder("src/a/b");
IjFolder aaFolder = buildSourceFolder("src/a/a");
IjFolder bFolder = buildNoPrefixSourceFolder("src/b");
assertThat(
simplifier.simplify(
SimplificationLimit.of(0),
ImmutableSet.of(aFolder, aaFolder, bFolder)),
Matchers.containsInAnyOrder(aFolder, aaFolder, bFolder));
}
}
| |
/*
* Licensed to The Apereo Foundation under one or more contributor license
* agreements. See the NOTICE file distributed with this work for
* additional information regarding copyright ownership.
*
* The Apereo Foundation licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except in
* compliance with the License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
*
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.unitime.timetable.onlinesectioning.model;
import java.io.Externalizable;
import java.io.IOException;
import java.io.ObjectInput;
import java.io.ObjectOutput;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.cpsolver.coursett.Constants;
import org.cpsolver.coursett.model.Lecture;
import org.cpsolver.coursett.model.Placement;
import org.cpsolver.coursett.model.RoomLocation;
import org.cpsolver.coursett.model.TimeLocation;
import org.cpsolver.ifs.util.DistanceMetric;
import org.cpsolver.studentsct.model.Section;
import org.infinispan.commons.marshall.Externalizer;
import org.infinispan.commons.marshall.SerializeWith;
import org.unitime.timetable.model.Assignment;
import org.unitime.timetable.model.ClassInstructor;
import org.unitime.timetable.model.Class_;
import org.unitime.timetable.model.CourseOffering;
import org.unitime.timetable.model.Location;
import org.unitime.timetable.onlinesectioning.OnlineSectioningHelper;
/**
* @author Tomas Muller
*/
@SerializeWith(XSection.XSectionSerializer.class)
public class XSection implements Serializable, Comparable<XSection>, Externalizable {
private static final long serialVersionUID = 1L;
private Long iUniqueId = null;
private String iName = null;
private Map<Long, String> iNameByCourse = new HashMap<Long, String>();
private Long iSubpartId = null;
private Long iParentId = null;
private int iLimit = 0;
private String iNote = null;
private XTime iTime = null;
private List<XRoom> iRooms = new ArrayList<XRoom>();
private List<XInstructor> iInstructors = new ArrayList<XInstructor>();
private boolean iAllowOverlap = false;
private String iInstructionalType = null;
private String iSubpartName = null;
private String iExternalId = null;
private Map<Long, String> iExternalIdByCourse = new HashMap<Long, String>();
private boolean iEnabledForScheduling = true;
private boolean iCancelled = false;
public XSection() {
}
public XSection(ObjectInput in) throws IOException, ClassNotFoundException {
readExternal(in);
}
public XSection(Class_ clazz, OnlineSectioningHelper helper) {
iUniqueId = clazz.getUniqueId();
iAllowOverlap = clazz.getSchedulingSubpart().isStudentAllowOverlap();
iName = (clazz.getClassSuffix() == null ? clazz.getSectionNumber(helper.getHibSession()) + clazz.getSchedulingSubpart().getSchedulingSubpartSuffix(helper.getHibSession()) : clazz.getClassSuffix());
iInstructionalType = clazz.getSchedulingSubpart().getItypeDesc();
iSubpartName = clazz.getSchedulingSubpart().getItype().getAbbv().trim();
Assignment assignment = clazz.getCommittedAssignment();
iEnabledForScheduling = clazz.isEnabledForStudentScheduling();
iCancelled = clazz.isCancelled();
if (!clazz.isEnabledForStudentScheduling()) {
iLimit = 0;
} else if (clazz.getSchedulingSubpart().getInstrOfferingConfig().isUnlimitedEnrollment()) {
iLimit = -1;
} else {
iLimit = clazz.getMaxExpectedCapacity();
if (clazz.getExpectedCapacity() < clazz.getMaxExpectedCapacity() && assignment != null && !assignment.getRooms().isEmpty()) {
int roomSize = Integer.MAX_VALUE;
for (Location room: assignment.getRooms())
roomSize = Math.min(roomSize, room.getCapacity() == null ? 0 : room.getCapacity());
int roomLimit = (int) Math.floor(roomSize / (clazz.getRoomRatio() == null ? 1.0f : clazz.getRoomRatio()));
iLimit = Math.min(Math.max(clazz.getExpectedCapacity(), roomLimit), clazz.getMaxExpectedCapacity());
}
if (iLimit >= 9999) iLimit = -1;
}
iParentId = (clazz.getParentClass() == null ? null : clazz.getParentClass().getUniqueId());
iSubpartId = clazz.getSchedulingSubpart().getUniqueId();
iNote = clazz.getSchedulePrintNote();
iExternalId = clazz.getExternalUniqueId();
if (iExternalId == null)
iExternalId = clazz.getClassLabel();
for (CourseOffering course: clazz.getSchedulingSubpart().getInstrOfferingConfig().getInstructionalOffering().getCourseOfferings()) {
iNameByCourse.put(course.getUniqueId(), clazz.getClassSuffix(course));
String extId = clazz.getExternalId(course);
if (extId == null)
extId = clazz.getClassLabel(course);
iExternalIdByCourse.put(course.getUniqueId(), extId);
}
iNameByCourse.put(-1l, clazz.getSectionNumberString(helper.getHibSession()));
if (assignment != null) {
iTime = new XTime(assignment, helper.getExactTimeConversion(), helper.getDatePatternFormat());
for (Location room: assignment.getRooms())
iRooms.add(new XRoom(room));
}
if (clazz.isDisplayInstructor())
for (ClassInstructor ci: clazz.getClassInstructors()) {
iInstructors.add(new XInstructor(ci.getInstructor(), helper));
}
}
public XSection(Section section) {
iUniqueId = section.getId();
iAllowOverlap = section.getSubpart().isAllowOverlap();
iName = section.getName();
iNameByCourse = section.getNameByCourse();
iSubpartName = section.getSubpart().getName();
iLimit = section.getLimit();
iParentId = (section.getParent() == null ? null : section.getParent().getId());
iSubpartId = section.getSubpart().getId();
iInstructionalType = section.getSubpart().getInstructionalType();
iNote = section.getNote();
iTime = section.getTime() == null ? null : new XTime(section.getTime());
iCancelled = section.isCancelled();
if (section.getNrRooms() > 0)
for (RoomLocation room: section.getRooms())
iRooms.add(new XRoom(room));
if (section.getChoice() != null && section.getChoice().getInstructorNames() != null) {
String[] ids = section.getChoice().getInstructorIds().split(":");
int i = 0;
for (String instructor: section.getChoice().getInstructorNames().split(":")) {
String[] nameEmail = instructor.split("\\|");
iInstructors.add(new XInstructor(
Long.valueOf(ids[i]),
null,
nameEmail[0],
nameEmail.length > 1 ? nameEmail[1] : null));
i++;
}
}
}
/** For testing only! */
@Deprecated
public XSection(String externalId) {
iExternalId = externalId;
}
/** Section id */
public Long getSectionId() { return iUniqueId; }
/**
* Section limit. This is defines the maximal number of students that can be
* enrolled into this section at the same time. It is -1 in the case of an
* unlimited section
*/
public int getLimit() { return iLimit; }
/** Section name */
public String getName() { return iName; }
/** Scheduling subpart to which this section belongs */
public Long getSubpartId() { return iSubpartId; }
/**
* Parent section of this section (can be null). If there is a parent
* section defined, a student that is enrolled in this section has to be
* enrolled in the parent section as well. Also, the same relation needs to
* be defined between subpart of this section and the subpart of the parent
* section.
*/
public Long getParentId() { return iParentId; }
/** Time placement of the section. */
public XTime getTime() {
return iTime;
}
/** Number of rooms in which the section meet. */
public int getNrRooms() {
return (iRooms == null ? 0 : iRooms.size());
}
/**
* Room placement -- list of
* {@link org.cpsolver.coursett.model.RoomLocation}
*/
public List<XRoom> getRooms() {
return iRooms;
}
public String toRoomString(String delim) {
String ret = "";
for (XRoom room: iRooms) {
if (!ret.isEmpty()) ret += delim;
ret += room.getName();
}
return ret;
}
@Override
public String toString() {
return getSubpartName() + " " + getName() + " " + (getTime() == null ? "Arrange Hours" : getTime().toString()) + (getNrRooms() == 0 ? "" : " " + toRoomString(", "));
}
public String toString(Long coruseId) {
return getSubpartName() + " " + getName(coruseId) + " " + (getTime() == null ? "Arrange Hours" : getTime().toString()) + (getNrRooms() == 0 ? "" : " " + toRoomString(", "));
}
/**
* Return course-dependent section name
*/
public String getName(long courseId) {
if (iNameByCourse == null) return getName();
String name = iNameByCourse.get(courseId);
return (name == null ? getName() : name);
}
/**
* Return course-dependent external id
*/
public String getExternalId(long courseId) {
if (iExternalIdByCourse == null) return iExternalId;
String externalId = iExternalIdByCourse.get(courseId);
return (externalId == null ? iExternalId : externalId);
}
/**
* Return course-dependent section names
*/
public Map<Long, String> getNameByCourse() { return iNameByCourse; }
@Override
public boolean equals(Object o) {
if (o == null || !(o instanceof XSection)) return false;
return getSectionId().equals(((XSection)o).getSectionId());
}
@Override
public int hashCode() {
return (int) (getSectionId() ^ (getSectionId() >>> 32));
}
/**
* Section note
*/
public String getNote() { return iNote; }
/**
* Instructors
*/
public List<XInstructor> getInstructors() { return iInstructors; }
public String getInstructorIds() {
if (iInstructors == null || iInstructors.isEmpty()) return null;
StringBuffer ret = new StringBuffer();
for (XInstructor instructor: iInstructors) {
if (ret.length() > 0) ret.append(":");
ret.append(instructor.getIntructorId());
}
return ret.toString();
}
public String getInstructorNames() {
if (iInstructors == null || iInstructors.isEmpty()) return null;
StringBuffer ret = new StringBuffer();
for (XInstructor instructor: iInstructors) {
if (ret.length() > 0) ret.append(":");
ret.append(instructor.getName() + "|" + (instructor.getEmail() == null ? "" : instructor.getEmail()));
}
return ret.toString();
}
/**
* Returns true if student conflicts between this section and the given one are to be ignored
*/
public boolean isToIgnoreStudentConflictsWith(Collection<XDistribution> distributions, Long sectionId) {
if (distributions == null) return false;
for (XDistribution distribution: distributions)
if (distribution.getDistributionType() == XDistributionType.IngoreConflicts &&
distribution.getSectionIds().contains(getSectionId()) &&
distribution.getSectionIds().contains(sectionId))
return true;
return false;
}
@Override
public int compareTo(XSection section) {
return getSectionId().compareTo(section.getSectionId());
}
private int getDistanceInMinutes(DistanceMetric m, List<XRoom> other) {
int dist = 0;
for (XRoom r1 : getRooms())
for (XRoom r2 : other) {
dist = Math.max(dist, r1.getDistanceInMinutes(m, r2));
}
return dist;
}
public int getDistanceInMinutes(XSection other, DistanceMetric m) {
if (getNrRooms() == 0 || other.getNrRooms() == 0) return 0;
XTime t1 = getTime();
XTime t2 = other.getTime();
if (t1 == null || t2 == null || !t1.shareDays(t2) || !t1.shareWeeks(t2)) return 0;
int a1 = t1.getSlot(), a2 = t2.getSlot();
if (m.doComputeDistanceConflictsBetweenNonBTBClasses()) {
if (a1 + t1.getLength() <= a2) {
int dist = getDistanceInMinutes(m, other.getRooms());
if (dist > t1.getBreakTime() + Constants.SLOT_LENGTH_MIN * (a2 - a1 - t1.getLength()))
return dist;
}
} else {
if (a1 + t1.getLength() == a2)
return getDistanceInMinutes(m, other.getRooms());
}
return 0;
}
/** Return true if overlaps are allowed, but the number of overlapping slots should be minimized. */
public boolean isAllowOverlap() {
return iAllowOverlap;
}
public String getSubpartName() {
return iSubpartName;
}
public String getInstructionalType() {
return iInstructionalType;
}
public boolean isEnabledForScheduling() {
return iEnabledForScheduling;
}
public boolean isCancelled() {
return iCancelled;
}
/**
* True, if this section overlaps with the given assignment in time and
* space
*/
public boolean isOverlapping(Collection<XDistribution> distributions, XSection other) {
if (isAllowOverlap() || other.isAllowOverlap()) return false;
if (getTime() == null || other.getTime() == null) return false;
if (isToIgnoreStudentConflictsWith(distributions, other.getSectionId())) return false;
return getTime().hasIntersection(other.getTime());
}
/**
* True, if this section overlaps with one of the given set of assignments
* in time and space
*/
public boolean isOverlapping(Collection<XDistribution> distributions, Collection<XSection> assignments) {
if (isAllowOverlap()) return false;
if (getTime() == null || assignments == null)
return false;
for (XSection assignment : assignments) {
if (assignment.isAllowOverlap())
continue;
if (assignment.getTime() == null)
continue;
if (isToIgnoreStudentConflictsWith(distributions, assignment.getSectionId()))
continue;
if (getTime().hasIntersection(assignment.getTime()))
return true;
}
return false;
}
public Placement toPlacement() {
if (getTime() == null) return null;
List<RoomLocation> rooms = new ArrayList<RoomLocation>();
for (XRoom r: getRooms())
rooms.add(new RoomLocation(r.getUniqueId(), r.getName(), null, 0, 0, r.getX(), r.getY(), r.getIgnoreTooFar(), null));
return new Placement(
new Lecture(getSectionId(), null, getSubpartId(), getName(), new ArrayList<TimeLocation>(), new ArrayList<RoomLocation>(), getNrRooms(), null, getLimit(), getLimit(), 1.0),
getTime().toTimeLocation(),
rooms);
}
@Override
public void readExternal(ObjectInput in) throws IOException, ClassNotFoundException {
iUniqueId = in.readLong();
iName = (String)in.readObject();
int nrNames = in.readInt();
iNameByCourse.clear();
for (int i = 0; i < nrNames; i++)
iNameByCourse.put(in.readLong(), (String)in.readObject());
iSubpartId = in.readLong();
iParentId = in.readLong();
if (iParentId < 0) iParentId = null;
iLimit = in.readInt();
iNote = (String)in.readObject();
iTime = (in.readBoolean() ? new XTime(in) : null);
int nrRooms = in.readInt();
iRooms.clear();
for (int i = 0; i < nrRooms; i++)
iRooms.add(new XRoom(in));
int nrInstructors = in.readInt();
iInstructors.clear();
for (int i = 0; i < nrInstructors; i++)
iInstructors.add(new XInstructor(in));
iAllowOverlap = in.readBoolean();
iInstructionalType = (String)in.readObject();
iSubpartName = (String)in.readObject();
iExternalId = (String)in.readObject();
int nrExtIds = in.readInt();
iExternalIdByCourse.clear();
for (int i = 0; i < nrExtIds; i++)
iExternalIdByCourse.put(in.readLong(), (String)in.readObject());
iEnabledForScheduling = in.readBoolean();
iCancelled = in.readBoolean();
}
@Override
public void writeExternal(ObjectOutput out) throws IOException {
out.writeLong(iUniqueId);
out.writeObject(iName);
out.writeInt(iNameByCourse.size());
for (Map.Entry<Long, String> entry: iNameByCourse.entrySet()) {
out.writeLong(entry.getKey());
out.writeObject(entry.getValue());
}
out.writeLong(iSubpartId);
out.writeLong(iParentId == null ? -1l : iParentId);
out.writeInt(iLimit);
out.writeObject(iNote);
out.writeBoolean(iTime != null);
if (iTime != null)
iTime.writeExternal(out);
out.writeInt(iRooms.size());
for (XRoom room: iRooms)
room.writeExternal(out);
out.writeInt(iInstructors.size());
for (XInstructor instructor: iInstructors)
instructor.writeExternal(out);
out.writeBoolean(iAllowOverlap);
out.writeObject(iInstructionalType);
out.writeObject(iSubpartName);
out.writeObject(iExternalId);
out.writeInt(iExternalIdByCourse.size());
for (Map.Entry<Long, String> entry: iExternalIdByCourse.entrySet()) {
out.writeLong(entry.getKey());
out.writeObject(entry.getValue());
}
out.writeBoolean(iEnabledForScheduling);
out.writeBoolean(iCancelled);
}
public static class XSectionSerializer implements Externalizer<XSection> {
private static final long serialVersionUID = 1L;
@Override
public void writeObject(ObjectOutput output, XSection object) throws IOException {
object.writeExternal(output);
}
@Override
public XSection readObject(ObjectInput input) throws IOException, ClassNotFoundException {
return new XSection(input);
}
}
}
| |
/*
Copyright 2009 Wallace Wadge
This file is part of BoneCP.
BoneCP is free software: you can redistribute it and/or modify
it under the terms of the GNU Lesser General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
BoneCP is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with BoneCP. If not, see <http://www.gnu.org/licenses/>.
*/
package org.itas.core.dbpool;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import java.io.File;
import java.io.IOException;
import java.io.PrintWriter;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.sql.Connection;
import java.sql.SQLException;
import java.sql.Statement;
import org.itas.core.dbpool.hooks.CoverageHook;
import org.itas.core.dbpool.hooks.CustomHook;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Ignore;
import org.junit.Test;
import com.jolbox.bonecp.CommonTestUtils;
@SuppressWarnings("all")
public class TestSystemTests {
private static BoneCPConfig config;
@BeforeClass
public static void setup() throws ClassNotFoundException{
Class.forName("org.hsqldb.jdbcDriver");
config = CommonTestUtils.getConfigClone();
}
@Before
public void beforeTest(){
config.setJdbcUrl(CommonTestUtils.url);
config.setUsername(CommonTestUtils.username);
config.setPassword(CommonTestUtils.password);
config.setIdleConnectionTestPeriod(10000);
config.setIdleMaxAge(0);
config.setStatementsCacheSize(0);
config.setReleaseHelperThreads(0);
config.setStatementsCachedPerConnection(30);
}
/** Mostly for code coverage.
* @throws IOException
* @throws NoSuchMethodException
* @throws SecurityException
* @throws InvocationTargetException
* @throws IllegalAccessException
* @throws IllegalArgumentException
* @throws ClassNotFoundException */
@Test
public void testDataSource() throws SQLException, IOException, SecurityException, NoSuchMethodException, IllegalArgumentException, IllegalAccessException, InvocationTargetException, ClassNotFoundException {
config.setAcquireIncrement(5);
config.setMinConnectionsPerPartition(30);
config.setMaxConnectionsPerPartition(100);
config.setPartitionCount(1);
BoneCPDataSource dsb = new BoneCPDataSource(config);
dsb.setPartitionCount(1);
dsb.setAcquireRetryDelay(-1);
dsb.setAcquireRetryAttempts(0);
dsb.setMaxConnectionsPerPartition(100);
dsb.setMinConnectionsPerPartition(30);
dsb.setTransactionRecoveryEnabled(true);
dsb.setConnectionHook(new CoverageHook());
dsb.setLazyInit(false);
dsb.setStatementsCachedPerConnection(30);
dsb.setStatementsCacheSize(30);
dsb.setReleaseHelperThreads(0);
dsb.setDriverClass("org.hsqldb.jdbcDriver");
dsb.isWrapperFor(String.class);
dsb.setIdleMaxAge(0L);
dsb.setAcquireIncrement(5);
dsb.setIdleConnectionTestPeriod(0L);
dsb.setConnectionTestStatement("test");
dsb.setInitSQL(CommonTestUtils.TEST_QUERY);
dsb.setCloseConnectionWatch(true);
dsb.setLogStatementsEnabled(false);
dsb.getConnection().close();
assertNotNull(dsb.getConfig());
assertNotNull(dsb.toString());
dsb.setConnectionHookClassName("bad class name");
assertEquals("bad class name", dsb.getConnectionHookClassName());
assertNull(dsb.getConnectionHook());
dsb.setConnectionHookClassName("com.jolbox.bonecp.hooks.CustomHook");
assertTrue(dsb.getConnectionHook() instanceof CustomHook);
File tmp = File.createTempFile("bonecp", "");
dsb.setLogWriter(new PrintWriter(tmp));
assertNotNull(dsb.getLogWriter());
try {
dsb.setLoginTimeout(0);
fail("Should throw exception");
} catch (UnsupportedOperationException e) {
// do nothing
}
try {
dsb.getLoginTimeout();
fail("Should throw exception");
} catch (UnsupportedOperationException e) {
// do nothing
}
try {
dsb.getConnection("test", "test");
fail("Should throw exception");
} catch (UnsupportedOperationException e) {
// do nothing
}
BoneCPDataSource dsb2 = new BoneCPDataSource(); // empty constructor test
dsb2.setDriverClass("inexistent");
try{
dsb2.getConnection();
fail("Should fail");
} catch (SQLException e){
// do nothing
}
assertNull(dsb.unwrap(String.class));
assertEquals("org.hsqldb.jdbcDriver", dsb.getDriverClass());
dsb.setClassLoader(getClass().getClassLoader());
dsb.loadClass("java.lang.String");
assertEquals(getClass().getClassLoader(), dsb.getClassLoader());
}
@Test(expected=SQLException.class)
public void testDBConnectionInvalidJDBCurl() throws SQLException{
CommonTestUtils.logTestInfo("Test trying to start up with an invalid URL.");
config.setJdbcUrl("invalid JDBC URL");
new BoneCP(config);
CommonTestUtils.logPass();
}
@Test(expected=SQLException.class)
public void testDBConnectionInvalidUsername() throws SQLException{
CommonTestUtils.logTestInfo("Test trying to start up with an invalid username/pass combo.");
config.setUsername("non existent");
new BoneCP(config);
CommonTestUtils.logPass();
}
@Test
public void testConnectionGivenButDBLost() throws SQLException{
config.setAcquireIncrement(5);
config.setMinConnectionsPerPartition(30);
config.setMaxConnectionsPerPartition(100);
config.setPartitionCount(1);
BoneCP dsb = new BoneCP(config);
Connection con = dsb.getConnection();
// kill off the db...
String sql = "SHUTDOWN"; // hsqldb interprets this as a request to terminate
Statement stmt = con.createStatement();
stmt.executeUpdate(sql);
stmt.close();
stmt = con.createStatement();
try{
stmt.execute(CommonTestUtils.TEST_QUERY);
fail("Connection should have been marked as broken");
} catch (Exception e){
assertTrue(((ConnectionHandle)con).isPossiblyBroken());
}
con.close();
}
@Test
public void testGetReleaseSingleThread() throws InterruptedException, SQLException{
CommonTestUtils.logTestInfo("Test simple get/release connection from 1 partition");
config.setMinConnectionsPerPartition(30);
config.setMaxConnectionsPerPartition(100);
config.setAcquireIncrement(5);
config.setPartitionCount(1);
BoneCP dsb = new BoneCP(config);
for (int i=0; i<60; i++){
Connection conn = dsb.getConnection();
conn.close();
}
assertEquals(0, dsb.getTotalLeased());
assertEquals(30, dsb.getTotalFree());
dsb.shutdown();
CommonTestUtils.logPass();
}
/** Test that requesting connections from a partition that is empty will fetch it from other partitions that still have connections. */
@Test
public void testPartitionDrain() throws InterruptedException, SQLException{
CommonTestUtils.logTestInfo("Test connections obtained from alternate partition");
config.setAcquireIncrement(1);
config.setMinConnectionsPerPartition(10);
config.setMaxConnectionsPerPartition(10);
config.setPartitionCount(2);
BoneCP dsb = new BoneCP(config);
for (int i=0; i < 20; i++){
dsb.getConnection();
}
assertEquals(20, dsb.getTotalLeased());
assertEquals(0, dsb.getTotalFree());
dsb.close();
CommonTestUtils.logPass();
}
@Test
public void testMultithreadSinglePartition() throws InterruptedException, SQLException{
CommonTestUtils.logTestInfo("Test multiple threads hitting a single partition concurrently");
config.setAcquireIncrement(5);
config.setMinConnectionsPerPartition(30);
config.setMaxConnectionsPerPartition(100);
config.setPartitionCount(1);
BoneCPDataSource dsb = new BoneCPDataSource(config);
dsb.setDriverClass("org.hsqldb.jdbcDriver");
CommonTestUtils.startThreadTest(100, 100, dsb, 0, false);
assertEquals(0, dsb.getTotalLeased());
dsb.close();
CommonTestUtils.logPass();
}
@Test
public void testMultithreadMultiPartition() throws InterruptedException, SQLException{
CommonTestUtils.logTestInfo("Test multiple threads hitting a multiple partitions concurrently");
config.setAcquireIncrement(5);
config.setMinConnectionsPerPartition(10);
config.setMaxConnectionsPerPartition(25);
config.setPartitionCount(5);
config.setReleaseHelperThreads(0);
BoneCPDataSource dsb = new BoneCPDataSource(config);
dsb.setDriverClass("org.hsqldb.jdbcDriver");
CommonTestUtils.startThreadTest(100, 1000, dsb, 0, false);
assertEquals(0, dsb.getTotalLeased());
dsb.close();
CommonTestUtils.logPass();
}
@Test
public void testMultithreadMultiPartitionWithConstantWorkDelay() throws InterruptedException, SQLException{
CommonTestUtils.logTestInfo("Test multiple threads hitting a partition and doing some work on each connection");
config.setAcquireIncrement(1);
config.setMinConnectionsPerPartition(10);
config.setMaxConnectionsPerPartition(10);
config.setPartitionCount(1);
BoneCPDataSource dsb = new BoneCPDataSource(config);
dsb.setDriverClass("org.hsqldb.jdbcDriver");
CommonTestUtils.startThreadTest(15, 10, dsb, 50, false);
assertEquals(0, dsb.getTotalLeased());
dsb.close();
CommonTestUtils.logPass();
}
@Test
public void testMultithreadMultiPartitionWithRandomWorkDelay() throws InterruptedException, SQLException{
CommonTestUtils.logTestInfo("Test multiple threads hitting a partition and doing some work of random duration on each connection");
config.setAcquireIncrement(5);
config.setMinConnectionsPerPartition(10);
config.setMaxConnectionsPerPartition(25);
config.setPartitionCount(5);
BoneCPDataSource dsb = new BoneCPDataSource(config);
dsb.setDriverClass("org.hsqldb.jdbcDriver");
CommonTestUtils.startThreadTest(100, 10, dsb, -50, false);
assertEquals(0, dsb.getTotalLeased());
dsb.close();
CommonTestUtils.logPass();
}
/** Tests that new connections are created on the fly. */
@Test
public void testConnectionCreate() throws InterruptedException, SQLException{
CommonTestUtils.logTestInfo("Tests that new connections are created on the fly");
config.setMinConnectionsPerPartition(10);
config.setMaxConnectionsPerPartition(20);
config.setAcquireIncrement(5);
config.setPartitionCount(1);
config.setReleaseHelperThreads(0);
BoneCP dsb = new BoneCP(config);
assertEquals(10, dsb.getTotalCreatedConnections());
assertEquals(0, dsb.getTotalLeased());
for (int i=0; i < 10; i++){
dsb.getConnection();
}
assertEquals(10, dsb.getTotalLeased());
for (int i=0; i < 60; i++) {
Thread.yield();
Thread.sleep(2000); // give time for pool watch thread to fire up
if (dsb.getTotalCreatedConnections() == 15) {
break;
}
}
assertEquals(15, dsb.getTotalCreatedConnections());
assertEquals(10, dsb.getTotalLeased());
assertEquals(5, dsb.getTotalFree());
dsb.shutdown();
CommonTestUtils.logPass();
}
@Test
public void testClosedConnection() throws InterruptedException, SQLException{
BoneCP dsb = null ;
CommonTestUtils.logTestInfo("Tests that closed connections trigger exceptions if use is attempted.");
config.setMinConnectionsPerPartition(10);
config.setMaxConnectionsPerPartition(20);
config.setAcquireIncrement(5);
config.setPartitionCount(1);
try{
dsb = new BoneCP(config);
Connection conn = dsb.getConnection();
conn.prepareCall(CommonTestUtils.TEST_QUERY);
conn.close();
try{
conn.prepareCall(CommonTestUtils.TEST_QUERY);
fail("Should have thrown an exception");
} catch (SQLException e){
CommonTestUtils.logPass();
}
} finally{
dsb.shutdown();
}
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.