gt stringclasses 1
value | context stringlengths 2.05k 161k |
|---|---|
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.versioning;
import org.elasticsearch.action.bulk.BulkResponse;
import org.elasticsearch.action.delete.DeleteResponse;
import org.elasticsearch.action.index.IndexResponse;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.common.lucene.uid.Versions;
import org.elasticsearch.index.VersionType;
import org.elasticsearch.index.engine.DocumentAlreadyExistsException;
import org.elasticsearch.index.engine.VersionConflictEngineException;
import org.elasticsearch.test.ElasticsearchIntegrationTest;
import org.junit.Test;
import java.util.HashMap;
import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertThrows;
import static org.hamcrest.Matchers.equalTo;
/**
*
*/
public class SimpleVersioningTests extends ElasticsearchIntegrationTest {
@Test
public void testExternalVersioningInitialDelete() throws Exception {
createIndex("test");
ensureGreen();
// Note - external version doesn't throw version conflicts on deletes of non existent records. This is different from internal versioning
DeleteResponse deleteResponse = client().prepareDelete("test", "type", "1").setVersion(17).setVersionType(VersionType.EXTERNAL).execute().actionGet();
assertThat(deleteResponse.isFound(), equalTo(false));
// this should conflict with the delete command transaction which told us that the object was deleted at version 17.
assertThrows(
client().prepareIndex("test", "type", "1").setSource("field1", "value1_1").setVersion(13).setVersionType(VersionType.EXTERNAL).execute(),
VersionConflictEngineException.class
);
IndexResponse indexResponse = client().prepareIndex("test", "type", "1").setSource("field1", "value1_1").setVersion(18).
setVersionType(VersionType.EXTERNAL).execute().actionGet();
assertThat(indexResponse.getVersion(), equalTo(18L));
}
@Test
public void testExternalVersioning() throws Exception {
createIndex("test");
ensureGreen();
IndexResponse indexResponse = client().prepareIndex("test", "type", "1").setSource("field1", "value1_1").setVersion(12).setVersionType(VersionType.EXTERNAL).execute().actionGet();
assertThat(indexResponse.getVersion(), equalTo(12l));
indexResponse = client().prepareIndex("test", "type", "1").setSource("field1", "value1_1").setVersion(14).setVersionType(VersionType.EXTERNAL).execute().actionGet();
assertThat(indexResponse.getVersion(), equalTo(14l));
assertThrows(client().prepareIndex("test", "type", "1").setSource("field1", "value1_1").setVersion(13).setVersionType(VersionType.EXTERNAL).execute(),
VersionConflictEngineException.class);
client().admin().indices().prepareRefresh().execute().actionGet();
for (int i = 0; i < 10; i++) {
assertThat(client().prepareGet("test", "type", "1").execute().actionGet().getVersion(), equalTo(14l));
}
// deleting with a lower version fails.
assertThrows(
client().prepareDelete("test", "type", "1").setVersion(2).setVersionType(VersionType.EXTERNAL).execute(),
VersionConflictEngineException.class);
// Delete with a higher version deletes all versions up to the given one.
DeleteResponse deleteResponse = client().prepareDelete("test", "type", "1").setVersion(17).setVersionType(VersionType.EXTERNAL).execute().actionGet();
assertThat(deleteResponse.isFound(), equalTo(true));
assertThat(deleteResponse.getVersion(), equalTo(17l));
// Deleting with a lower version keeps on failing after a delete.
assertThrows(
client().prepareDelete("test", "type", "1").setVersion(2).setVersionType(VersionType.EXTERNAL).execute(),
VersionConflictEngineException.class);
// But delete with a higher version is OK.
deleteResponse = client().prepareDelete("test", "type", "1").setVersion(18).setVersionType(VersionType.EXTERNAL).execute().actionGet();
assertThat(deleteResponse.isFound(), equalTo(false));
assertThat(deleteResponse.getVersion(), equalTo(18l));
// TODO: This behavior breaks rest api returning http status 201, good news is that it this is only the case until deletes GC kicks in.
indexResponse = client().prepareIndex("test", "type", "1").setSource("field1", "value1_1").setVersion(19).setVersionType(VersionType.EXTERNAL).execute().actionGet();
assertThat(indexResponse.getVersion(), equalTo(19l));
deleteResponse = client().prepareDelete("test", "type", "1").setVersion(20).setVersionType(VersionType.EXTERNAL).execute().actionGet();
assertThat(deleteResponse.isFound(), equalTo(true));
assertThat(deleteResponse.getVersion(), equalTo(20l));
// Make sure that the next delete will be GC. Note we do it on the index settings so it will be cleaned up
HashMap<String,Object> newSettings = new HashMap<String, Object>();
newSettings.put("index.gc_deletes",-1);
client().admin().indices().prepareUpdateSettings("test").setSettings(newSettings).execute().actionGet();
Thread.sleep(300); // gc works based on estimated sampled time. Give it a chance...
// And now we have previous version return -1
indexResponse = client().prepareIndex("test", "type", "1").setSource("field1", "value1_1").setVersion(20).setVersionType(VersionType.EXTERNAL).execute().actionGet();
assertThat(indexResponse.getVersion(), equalTo(20l));
}
@Test
public void testInternalVersioningInitialDelete() throws Exception {
createIndex("test");
ensureGreen();
assertThrows(client().prepareDelete("test", "type", "1").setVersion(17).execute(),
VersionConflictEngineException.class);
IndexResponse indexResponse = client().prepareIndex("test", "type", "1").setSource("field1", "value1_1")
.setCreate(true).execute().actionGet();
assertThat(indexResponse.getVersion(), equalTo(1l));
}
@Test
public void testInternalVersioning() throws Exception {
createIndex("test");
ensureGreen();
IndexResponse indexResponse = client().prepareIndex("test", "type", "1").setSource("field1", "value1_1").execute().actionGet();
assertThat(indexResponse.getVersion(), equalTo(1l));
indexResponse = client().prepareIndex("test", "type", "1").setSource("field1", "value1_2").setVersion(1).execute().actionGet();
assertThat(indexResponse.getVersion(), equalTo(2l));
assertThrows(
client().prepareIndex("test", "type", "1").setSource("field1", "value1_1").setVersion(1).execute(),
VersionConflictEngineException.class);
assertThrows(
client().prepareIndex("test", "type", "1").setSource("field1", "value1_1").setVersion(1).execute(),
VersionConflictEngineException.class);
assertThrows(
client().prepareIndex("test", "type", "1").setCreate(true).setSource("field1", "value1_1").setVersion(1).execute(),
VersionConflictEngineException.class);
assertThrows(
client().prepareIndex("test", "type", "1").setCreate(true).setSource("field1", "value1_1").setVersion(1).execute(),
VersionConflictEngineException.class);
assertThrows(
client().prepareIndex("test", "type", "1").setCreate(true).setSource("field1", "value1_1").setVersion(2).execute(),
DocumentAlreadyExistsException.class);
assertThrows(
client().prepareIndex("test", "type", "1").setCreate(true).setSource("field1", "value1_1").setVersion(2).execute(),
DocumentAlreadyExistsException.class);
assertThrows(client().prepareDelete("test", "type", "1").setVersion(1).execute(), VersionConflictEngineException.class);
assertThrows(client().prepareDelete("test", "type", "1").setVersion(1).execute(), VersionConflictEngineException.class);
client().admin().indices().prepareRefresh().execute().actionGet();
for (int i = 0; i < 10; i++) {
assertThat(client().prepareGet("test", "type", "1").execute().actionGet().getVersion(), equalTo(2l));
}
// search with versioning
for (int i = 0; i < 10; i++) {
SearchResponse searchResponse = client().prepareSearch().setQuery(matchAllQuery()).setVersion(true).execute().actionGet();
assertThat(searchResponse.getHits().getAt(0).version(), equalTo(2l));
}
// search without versioning
for (int i = 0; i < 10; i++) {
SearchResponse searchResponse = client().prepareSearch().setQuery(matchAllQuery()).execute().actionGet();
assertThat(searchResponse.getHits().getAt(0).version(), equalTo(Versions.NOT_FOUND));
}
DeleteResponse deleteResponse = client().prepareDelete("test", "type", "1").setVersion(2).execute().actionGet();
assertThat(deleteResponse.isFound(), equalTo(true));
assertThat(deleteResponse.getVersion(), equalTo(3l));
assertThrows(client().prepareDelete("test", "type", "1").setVersion(2).execute(), VersionConflictEngineException.class);
// This is intricate - the object was deleted but a delete transaction was with the right version. We add another one
// and thus the transaction is increased.
deleteResponse = client().prepareDelete("test", "type", "1").setVersion(3).execute().actionGet();
assertThat(deleteResponse.isFound(), equalTo(false));
assertThat(deleteResponse.getVersion(), equalTo(4l));
}
@Test
public void testSimpleVersioningWithFlush() throws Exception {
createIndex("test");
ensureGreen();
IndexResponse indexResponse = client().prepareIndex("test", "type", "1").setSource("field1", "value1_1").execute().actionGet();
assertThat(indexResponse.getVersion(), equalTo(1l));
client().admin().indices().prepareFlush().execute().actionGet();
indexResponse = client().prepareIndex("test", "type", "1").setSource("field1", "value1_2").setVersion(1).execute().actionGet();
assertThat(indexResponse.getVersion(), equalTo(2l));
client().admin().indices().prepareFlush().execute().actionGet();
assertThrows(client().prepareIndex("test", "type", "1").setSource("field1", "value1_1").setVersion(1).execute(),
VersionConflictEngineException.class);
assertThrows(client().prepareIndex("test", "type", "1").setSource("field1", "value1_1").setVersion(1).execute(),
VersionConflictEngineException.class);
assertThrows(client().prepareIndex("test", "type", "1").setCreate(true).setSource("field1", "value1_1").setVersion(1).execute(),
VersionConflictEngineException.class);
assertThrows(client().prepareIndex("test", "type", "1").setCreate(true).setSource("field1", "value1_1").setVersion(1).execute(),
VersionConflictEngineException.class);
assertThrows(client().prepareDelete("test", "type", "1").setVersion(1).execute(), VersionConflictEngineException.class);
assertThrows(client().prepareDelete("test", "type", "1").setVersion(1).execute(), VersionConflictEngineException.class);
client().admin().indices().prepareRefresh().execute().actionGet();
for (int i = 0; i < 10; i++) {
assertThat(client().prepareGet("test", "type", "1").execute().actionGet().getVersion(), equalTo(2l));
}
for (int i = 0; i < 10; i++) {
SearchResponse searchResponse = client().prepareSearch().setQuery(matchAllQuery()).setVersion(true).execute().actionGet();
assertThat(searchResponse.getHits().getAt(0).version(), equalTo(2l));
}
}
@Test
public void testVersioningWithBulk() {
createIndex("test");
ensureGreen();
BulkResponse bulkResponse = client().prepareBulk().add(client().prepareIndex("test", "type", "1").setSource("field1", "value1_1")).execute().actionGet();
assertThat(bulkResponse.hasFailures(), equalTo(false));
assertThat(bulkResponse.getItems().length, equalTo(1));
IndexResponse indexResponse = bulkResponse.getItems()[0].getResponse();
assertThat(indexResponse.getVersion(), equalTo(1l));
}
}
| |
/*
(C) Copyright 2013-2016 The RISCOSS Project Consortium
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
/**
* @author Alberto Siena
**/
package eu.riscoss.server;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import javax.ws.rs.DELETE;
import javax.ws.rs.DefaultValue;
import javax.ws.rs.GET;
import javax.ws.rs.HeaderParam;
import javax.ws.rs.POST;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.QueryParam;
import com.google.gson.Gson;
import com.google.gson.JsonArray;
import com.google.gson.JsonObject;
import eu.riscoss.db.RiscossDB;
import eu.riscoss.db.RiscossElements;
import eu.riscoss.db.SearchParams;
import eu.riscoss.shared.JEntityNode;
import eu.riscoss.shared.JLayerContextualInfo;
import eu.riscoss.shared.JLayerNode;
import eu.riscoss.shared.RiscossUtil;
@Path("layers")
public class LayersManager {
Gson gson = new Gson();
public LayersManager() {
}
@GET @Path("/{domain}/list")
@Info("Returns a list of existing layers")
public String list(
@PathParam("domain") @Info("The work domain") String domain,
@HeaderParam("token") @Info("The authentication token") String token
) throws Exception {
JsonArray a = new JsonArray();
RiscossDB db = null;
try {
db = DBConnector.openDB( domain, token );
for( String layer : db.layerNames() ) {
JsonObject o = new JsonObject();
o.addProperty( "name", layer );
a.add( o );
}
}
catch( Exception ex ) {
throw ex;
}
finally {
DBConnector.closeDB( db );
}
return a.toString();
}
@GET @Path("/{domain}/search")
@Info("Returns a list of layers that match the specified parameters")
public String search(
@PathParam("domain") @Info("The selected domain") String domain,
@HeaderParam("token") @Info("The authentication token") String token,
@QueryParam("query") String query
) throws Exception {
return searchNew( domain, token, query, "0", "0");
}
@GET @Path("/{domain}/search-layers")
@Info("Returns a list of layers that match the specified parameters")
public String searchNew(
@PathParam("domain") @Info("The selected domain") String domain,
@HeaderParam("token") @Info("The authentication token") String token,
@DefaultValue("") @QueryParam("query") @Info("The actual query (on the layer name)") String query,
@DefaultValue("0") @QueryParam("from") @Info("Index of the first layer (for pagination") String strFrom,
@DefaultValue("0") @QueryParam("max") @Info("Amount of layers to search") String strMax
) throws Exception {
List<JLayerNode> result = new ArrayList<JLayerNode>();
RiscossDB db = null;
try {
db = DBConnector.openDB( domain, token );
SearchParams params = new SearchParams();
params.setMax( strMax );
params.setFrom( strFrom );
List<String> layers = new ArrayList<String>();
Collection<String> list = db.findLayers(query, params);
for (String name : list) {
JLayerNode jd = new JLayerNode();
jd.name = name;
result.add(jd);
}
}
catch( Exception ex ) {
throw ex;
}
finally {
DBConnector.closeDB(db);
}
return new Gson().toJson( result );
}
// //@POST @Path("{domain}/new")
// @Deprecated
// public void createNew_old(
// @PathParam("domain") String domain,
// @HeaderParam("token") String token,
// @QueryParam("name") String name,
// @QueryParam("parent") String parentName
// ) throws Exception {
// //attention:filename sanitation is not directly notified to the user
// name = RiscossUtil.sanitize(name.trim());
//
// parentName = parentName.trim();
//
// RiscossDB db = null;
// try {
// db = DBConnector.openDB( domain, token );
// db.addLayer( name, parentName );
// }
// catch( Exception ex ) {
// throw ex;
// }
// finally {
// DBConnector.closeDB( db );
// }
// }
@POST @Path("{domain}/create")
@Info("Creates a new layer")
public void createNew(
@PathParam("domain") @Info("The work domain") String domain,
@HeaderParam("token") @Info("The authentication token") String token,
@QueryParam("name") @Info("The name of the layer") String name,
@QueryParam("parent") @Info("The name of the layer immediately above the new one, or an empty string if the new layer has to be the topmost one")
String parentName
) throws Exception {
//attention:filename sanitation is not directly notified to the user
name = RiscossUtil.sanitize(name.trim());
parentName = parentName.trim();
RiscossDB db = null;
try {
db = DBConnector.openDB( domain, token );
db.addLayer( name, parentName );
}
catch( Exception ex ) {
throw ex;
}
finally {
DBConnector.closeDB( db );
}
}
@POST @Path("{domain}/{layer}/edit-parent")
@Info("Edits the parent of an existing layer")
public void editParent(
@PathParam("domain") @Info("The work domain") String domain,
@HeaderParam("token") @Info("The authentication token") String token,
@PathParam("layer") @Info("The name of the layer to edit") String name,
@QueryParam("newParent") @Info("The name of the new parent") String parent
) throws Exception {
RiscossDB db = null;
try {
db = DBConnector.openDB( domain, token);
db.editParent(name, parent);
} catch (Exception ex) {
throw ex;
} finally {
DBConnector.closeDB( db );
}
}
@DELETE @Path("{domain}/{layer}/delete")
@Info("Deltes an existing layer")
public void deleteLayer(
@PathParam("domain") @Info("The work domain") String domain,
@HeaderParam("token") @Info("The authentication token") String token,
@PathParam("layer") @Info("The name of the layer to delete") String name
) throws Exception {
RiscossDB db = null;
try {
db = DBConnector.openDB( domain, token );
if( db.entities( name ).size() > 0 ) {
throw new Exception( "You can not delete a layer that still contains entities. You must delete all the entities befor being able to delete this layer." );
}
db.removeLayer( name );
}
catch( Exception ex ) {
throw ex;
}
finally {
DBConnector.closeDB( db );
}
}
@GET @Path( "{domain}/{layer}/ci" )
@Info("Returns the contextual information associated to a layer")
public String getContextualInfo(
@PathParam("domain") @Info("The work domain") String domain,
@HeaderParam("token") @Info("The authentication token") String token,
@PathParam("layer") @Info("The name of the layer") String layer
) throws Exception {
RiscossDB db = null;
try {
db = DBConnector.openDB( domain, token );
String json = db.getLayerData( layer, "ci" );
if( json == null ) {
JLayerContextualInfo info = new JLayerContextualInfo();
json = gson.toJson( info );
}
return json;
}
catch( Exception ex ) {
throw ex;
}
finally {
DBConnector.closeDB( db );
}
}
@POST @Path( "{domain}/{layer}/ci" )
@Info("Associates contextual information to a layer")
public void setContextualInfo(
@PathParam("domain") @Info("The work domain") String domain,
@HeaderParam("token") @Info("The authentication token") String token,
@PathParam("layer") @Info("The name of the layer") String layer,
String json
) throws Exception {
RiscossDB db = null;
try {
db = DBConnector.openDB( domain, token );
db.setLayerData( layer, "ci", json );
}
catch( Exception ex ) {
throw ex;
}
finally {
DBConnector.closeDB( db );
}
}
@POST @Path("{domain}/{layer}/rename")
@Info("Changes the name of a layer")
public void editLayer(
@PathParam("domain") @Info("The work domain") String domain,
@HeaderParam("token") @Info("The authentication token") String token,
@PathParam("layer") @Info("The name of an existing layer") String name,
@QueryParam("newname") @Info("The new name of the layer") String newName
) throws Exception {
RiscossDB db = null;
try {
db = DBConnector.openDB( domain, token );
db.renameLayer( name, newName );
}
catch( Exception ex ) {
throw ex;
}
finally {
DBConnector.closeDB( db );
}
}
@GET @Path("/{domain}/{layer}/scope")
@Info("Returns the scope of a layer; a scope is an ordered set that contains the target layer and its sub-layers")
public String getScope(
@PathParam("domain") @Info("The work domain") String domain,
@HeaderParam("token") @Info("The authentication token") String token,
@PathParam("layer") @Info("The name of an existing layer") String layer
) throws Exception {
RiscossDB db = null;
try {
db = DBConnector.openDB( domain, token );
List<String> scope = db.getScope( layer );
return gson.toJson( scope );
}
catch( Exception ex ) {
throw ex;
}
finally {
DBConnector.closeDB( db );
}
}
@GET @Path("/{domain}/{layer}/description")
public String getDescription(
@PathParam("domain") @Info("The selected domain") String domain,
@PathParam("layer") @Info("The name of an existing layer") String entity,
@HeaderParam("token") @Info("The authentication token") String token
) throws Exception {
RiscossDB db = null;
try {
db = DBConnector.openDB(domain, token);
return db.getProperty( RiscossElements.LAYER, entity, "description", "" );
} catch (Exception e) {
throw e;
} finally {
DBConnector.closeDB(db);
}
}
@POST @Path("/{domain}/{layer}/description")
public void setDescription(
@PathParam("domain") @Info("The selected domain") String domain,
@PathParam("layer") @Info("The name of an existing layer") String layer,
@HeaderParam("token") @Info("The authentication token") String token,
@Info("The description string to be set") String description
) throws Exception {
RiscossDB db = null;
try {
db = DBConnector.openDB(domain, token);
db.setProperty( RiscossElements.LAYER, layer, "description", description );
} catch (Exception e) {
throw e;
} finally {
DBConnector.closeDB(db);
}
}
}
| |
package com.sun.corba.se.spi.activation;
/**
* com/sun/corba/se/spi/activation/_RepositoryImplBase.java .
* Generated by the IDL-to-Java compiler (portable), version "3.2"
* from /HUDSON3/workspace/8-2-build-linux-amd64/jdk8u121/8372/corba/src/share/classes/com/sun/corba/se/spi/activation/activation.idl
* Monday, December 12, 2016 4:37:46 PM PST
*/
public abstract class _RepositoryImplBase extends org.omg.CORBA.portable.ObjectImpl
implements Repository, org.omg.CORBA.portable.InvokeHandler
{
// Constructors
public _RepositoryImplBase ()
{
}
private static java.util.Hashtable _methods = new java.util.Hashtable ();
static
{
_methods.put ("registerServer", new Integer (0));
_methods.put ("unregisterServer", new Integer (1));
_methods.put ("getServer", new Integer (2));
_methods.put ("isInstalled", new Integer (3));
_methods.put ("install", new Integer (4));
_methods.put ("uninstall", new Integer (5));
_methods.put ("listRegisteredServers", new Integer (6));
_methods.put ("getApplicationNames", new Integer (7));
_methods.put ("getServerID", new Integer (8));
}
public org.omg.CORBA.portable.OutputStream _invoke (String $method,
org.omg.CORBA.portable.InputStream in,
org.omg.CORBA.portable.ResponseHandler $rh)
{
org.omg.CORBA.portable.OutputStream out = null;
Integer __method = (Integer)_methods.get ($method);
if (__method == null)
throw new org.omg.CORBA.BAD_OPERATION (0, org.omg.CORBA.CompletionStatus.COMPLETED_MAYBE);
switch (__method.intValue ())
{
// always uninstalled.
case 0: // activation/Repository/registerServer
{
try {
com.sun.corba.se.spi.activation.RepositoryPackage.ServerDef serverDef = com.sun.corba.se.spi.activation.RepositoryPackage.ServerDefHelper.read (in);
int $result = (int)0;
$result = this.registerServer (serverDef);
out = $rh.createReply();
out.write_long ($result);
} catch (ServerAlreadyRegistered $ex) {
out = $rh.createExceptionReply ();
ServerAlreadyRegisteredHelper.write (out, $ex);
} catch (BadServerDefinition $ex) {
out = $rh.createExceptionReply ();
BadServerDefinitionHelper.write (out, $ex);
}
break;
}
// unregister server definition
case 1: // activation/Repository/unregisterServer
{
try {
int serverId = ServerIdHelper.read (in);
this.unregisterServer (serverId);
out = $rh.createReply();
} catch (ServerNotRegistered $ex) {
out = $rh.createExceptionReply ();
ServerNotRegisteredHelper.write (out, $ex);
}
break;
}
// get server definition
case 2: // activation/Repository/getServer
{
try {
int serverId = ServerIdHelper.read (in);
com.sun.corba.se.spi.activation.RepositoryPackage.ServerDef $result = null;
$result = this.getServer (serverId);
out = $rh.createReply();
com.sun.corba.se.spi.activation.RepositoryPackage.ServerDefHelper.write (out, $result);
} catch (ServerNotRegistered $ex) {
out = $rh.createExceptionReply ();
ServerNotRegisteredHelper.write (out, $ex);
}
break;
}
// Return whether the server has been installed
case 3: // activation/Repository/isInstalled
{
try {
int serverId = ServerIdHelper.read (in);
boolean $result = false;
$result = this.isInstalled (serverId);
out = $rh.createReply();
out.write_boolean ($result);
} catch (ServerNotRegistered $ex) {
out = $rh.createExceptionReply ();
ServerNotRegisteredHelper.write (out, $ex);
}
break;
}
// if the server is currently marked as installed.
case 4: // activation/Repository/install
{
try {
int serverId = ServerIdHelper.read (in);
this.install (serverId);
out = $rh.createReply();
} catch (ServerNotRegistered $ex) {
out = $rh.createExceptionReply ();
ServerNotRegisteredHelper.write (out, $ex);
} catch (ServerAlreadyInstalled $ex) {
out = $rh.createExceptionReply ();
ServerAlreadyInstalledHelper.write (out, $ex);
}
break;
}
// if the server is currently marked as uninstalled.
case 5: // activation/Repository/uninstall
{
try {
int serverId = ServerIdHelper.read (in);
this.uninstall (serverId);
out = $rh.createReply();
} catch (ServerNotRegistered $ex) {
out = $rh.createExceptionReply ();
ServerNotRegisteredHelper.write (out, $ex);
} catch (ServerAlreadyUninstalled $ex) {
out = $rh.createExceptionReply ();
ServerAlreadyUninstalledHelper.write (out, $ex);
}
break;
}
// list registered servers
case 6: // activation/Repository/listRegisteredServers
{
int $result[] = null;
$result = this.listRegisteredServers ();
out = $rh.createReply();
ServerIdsHelper.write (out, $result);
break;
}
// servers.
case 7: // activation/Repository/getApplicationNames
{
String $result[] = null;
$result = this.getApplicationNames ();
out = $rh.createReply();
com.sun.corba.se.spi.activation.RepositoryPackage.StringSeqHelper.write (out, $result);
break;
}
// Find the ServerID associated with the given application name.
case 8: // activation/Repository/getServerID
{
try {
String applicationName = in.read_string ();
int $result = (int)0;
$result = this.getServerID (applicationName);
out = $rh.createReply();
out.write_long ($result);
} catch (ServerNotRegistered $ex) {
out = $rh.createExceptionReply ();
ServerNotRegisteredHelper.write (out, $ex);
}
break;
}
default:
throw new org.omg.CORBA.BAD_OPERATION (0, org.omg.CORBA.CompletionStatus.COMPLETED_MAYBE);
}
return out;
} // _invoke
// Type-specific CORBA::Object operations
private static String[] __ids = {
"IDL:activation/Repository:1.0"};
public String[] _ids ()
{
return (String[])__ids.clone ();
}
} // class _RepositoryImplBase
| |
/*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.ec2.model;
import java.io.Serializable;
import javax.annotation.Generated;
import com.amazonaws.AmazonWebServiceResult;
/**
*
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class DescribeTransitGatewayAttachmentsResult extends com.amazonaws.AmazonWebServiceResult<com.amazonaws.ResponseMetadata> implements Serializable,
Cloneable {
/**
* <p>
* Information about the attachments.
* </p>
*/
private com.amazonaws.internal.SdkInternalList<TransitGatewayAttachment> transitGatewayAttachments;
/**
* <p>
* The token to use to retrieve the next page of results. This value is <code>null</code> when there are no more
* results to return.
* </p>
*/
private String nextToken;
/**
* <p>
* Information about the attachments.
* </p>
*
* @return Information about the attachments.
*/
public java.util.List<TransitGatewayAttachment> getTransitGatewayAttachments() {
if (transitGatewayAttachments == null) {
transitGatewayAttachments = new com.amazonaws.internal.SdkInternalList<TransitGatewayAttachment>();
}
return transitGatewayAttachments;
}
/**
* <p>
* Information about the attachments.
* </p>
*
* @param transitGatewayAttachments
* Information about the attachments.
*/
public void setTransitGatewayAttachments(java.util.Collection<TransitGatewayAttachment> transitGatewayAttachments) {
if (transitGatewayAttachments == null) {
this.transitGatewayAttachments = null;
return;
}
this.transitGatewayAttachments = new com.amazonaws.internal.SdkInternalList<TransitGatewayAttachment>(transitGatewayAttachments);
}
/**
* <p>
* Information about the attachments.
* </p>
* <p>
* <b>NOTE:</b> This method appends the values to the existing list (if any). Use
* {@link #setTransitGatewayAttachments(java.util.Collection)} or
* {@link #withTransitGatewayAttachments(java.util.Collection)} if you want to override the existing values.
* </p>
*
* @param transitGatewayAttachments
* Information about the attachments.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DescribeTransitGatewayAttachmentsResult withTransitGatewayAttachments(TransitGatewayAttachment... transitGatewayAttachments) {
if (this.transitGatewayAttachments == null) {
setTransitGatewayAttachments(new com.amazonaws.internal.SdkInternalList<TransitGatewayAttachment>(transitGatewayAttachments.length));
}
for (TransitGatewayAttachment ele : transitGatewayAttachments) {
this.transitGatewayAttachments.add(ele);
}
return this;
}
/**
* <p>
* Information about the attachments.
* </p>
*
* @param transitGatewayAttachments
* Information about the attachments.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DescribeTransitGatewayAttachmentsResult withTransitGatewayAttachments(java.util.Collection<TransitGatewayAttachment> transitGatewayAttachments) {
setTransitGatewayAttachments(transitGatewayAttachments);
return this;
}
/**
* <p>
* The token to use to retrieve the next page of results. This value is <code>null</code> when there are no more
* results to return.
* </p>
*
* @param nextToken
* The token to use to retrieve the next page of results. This value is <code>null</code> when there are no
* more results to return.
*/
public void setNextToken(String nextToken) {
this.nextToken = nextToken;
}
/**
* <p>
* The token to use to retrieve the next page of results. This value is <code>null</code> when there are no more
* results to return.
* </p>
*
* @return The token to use to retrieve the next page of results. This value is <code>null</code> when there are no
* more results to return.
*/
public String getNextToken() {
return this.nextToken;
}
/**
* <p>
* The token to use to retrieve the next page of results. This value is <code>null</code> when there are no more
* results to return.
* </p>
*
* @param nextToken
* The token to use to retrieve the next page of results. This value is <code>null</code> when there are no
* more results to return.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DescribeTransitGatewayAttachmentsResult withNextToken(String nextToken) {
setNextToken(nextToken);
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getTransitGatewayAttachments() != null)
sb.append("TransitGatewayAttachments: ").append(getTransitGatewayAttachments()).append(",");
if (getNextToken() != null)
sb.append("NextToken: ").append(getNextToken());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof DescribeTransitGatewayAttachmentsResult == false)
return false;
DescribeTransitGatewayAttachmentsResult other = (DescribeTransitGatewayAttachmentsResult) obj;
if (other.getTransitGatewayAttachments() == null ^ this.getTransitGatewayAttachments() == null)
return false;
if (other.getTransitGatewayAttachments() != null && other.getTransitGatewayAttachments().equals(this.getTransitGatewayAttachments()) == false)
return false;
if (other.getNextToken() == null ^ this.getNextToken() == null)
return false;
if (other.getNextToken() != null && other.getNextToken().equals(this.getNextToken()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getTransitGatewayAttachments() == null) ? 0 : getTransitGatewayAttachments().hashCode());
hashCode = prime * hashCode + ((getNextToken() == null) ? 0 : getNextToken().hashCode());
return hashCode;
}
@Override
public DescribeTransitGatewayAttachmentsResult clone() {
try {
return (DescribeTransitGatewayAttachmentsResult) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e);
}
}
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.prestosql.sql.planner;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import io.prestosql.sql.planner.assertions.BasePlanTest;
import io.prestosql.sql.planner.assertions.PlanMatchPattern;
import org.testng.annotations.Test;
import java.util.Optional;
import static io.prestosql.sql.planner.assertions.PlanMatchPattern.anyTree;
import static io.prestosql.sql.planner.assertions.PlanMatchPattern.equiJoinClause;
import static io.prestosql.sql.planner.assertions.PlanMatchPattern.expression;
import static io.prestosql.sql.planner.assertions.PlanMatchPattern.filter;
import static io.prestosql.sql.planner.assertions.PlanMatchPattern.join;
import static io.prestosql.sql.planner.assertions.PlanMatchPattern.limit;
import static io.prestosql.sql.planner.assertions.PlanMatchPattern.output;
import static io.prestosql.sql.planner.assertions.PlanMatchPattern.project;
import static io.prestosql.sql.planner.assertions.PlanMatchPattern.semiJoin;
import static io.prestosql.sql.planner.assertions.PlanMatchPattern.strictProject;
import static io.prestosql.sql.planner.assertions.PlanMatchPattern.unnest;
import static io.prestosql.sql.planner.assertions.PlanMatchPattern.values;
import static io.prestosql.sql.planner.plan.JoinNode.Type.INNER;
public class TestDereferencePushDown
extends BasePlanTest
{
@Test
public void testDereferencePushdownMultiLevel()
{
assertPlan("WITH t(msg) AS (VALUES ROW(CAST(ROW(1, 2.0) AS ROW(x BIGINT, y DOUBLE)))) " +
"SELECT a.msg.x, a.msg, b.msg.y FROM t a CROSS JOIN t b",
output(ImmutableList.of("a_msg_x", "a_msg", "b_msg_y"),
strictProject(
ImmutableMap.of(
"a_msg_x", PlanMatchPattern.expression("a_msg.x"),
"a_msg", PlanMatchPattern.expression("a_msg"),
"b_msg_y", PlanMatchPattern.expression("b_msg_y")),
join(INNER, ImmutableList.of(),
values("a_msg"),
strictProject(
ImmutableMap.of("b_msg_y", PlanMatchPattern.expression("b_msg.y")),
values("b_msg"))))));
}
@Test
public void testDereferencePushdownJoin()
{
assertPlan("WITH t(msg) AS (VALUES ROW(CAST(ROW(1, 2.0) AS ROW(x BIGINT, y DOUBLE))))" +
"SELECT b.msg.x " +
"FROM t a, t b " +
"WHERE a.msg.y = b.msg.y",
output(ImmutableList.of("b_x"),
join(INNER, ImmutableList.of(equiJoinClause("a_y", "b_y")),
anyTree(
strictProject(ImmutableMap.of("a_y", expression("msg.y")),
values("msg"))),
anyTree(
strictProject(ImmutableMap.of("b_y", expression("msg.y"), "b_x", expression("msg.x")),
values("msg"))))));
assertPlan("WITH t(msg) AS (VALUES ROW(CAST(ROW(1, 2.0) AS ROW(x BIGINT, y DOUBLE))))" +
"SELECT a.msg.y " +
"FROM t a JOIN t b ON a.msg.y = b.msg.y " +
"WHERE a.msg.x > BIGINT '5'",
output(ImmutableList.of("a_y"),
join(INNER, ImmutableList.of(equiJoinClause("a_y", "b_y")),
anyTree(
strictProject(ImmutableMap.of("a_y", expression("msg.y")),
filter("msg.x > BIGINT '5'",
values("msg")))),
anyTree(
strictProject(ImmutableMap.of("b_y", expression("msg.y")),
values("msg"))))));
assertPlan("WITH t(msg) AS (VALUES ROW(CAST(ROW(1, 2.0) AS ROW(x BIGINT, y DOUBLE))))" +
"SELECT b.msg.x " +
"FROM t a JOIN t b ON a.msg.y = b.msg.y " +
"WHERE a.msg.x + b.msg.x < BIGINT '10'",
output(ImmutableList.of("b_x"),
join(INNER, ImmutableList.of(equiJoinClause("a_y", "b_y")), Optional.of("a_x + b_x < BIGINT '10'"),
anyTree(
strictProject(ImmutableMap.of("a_y", expression("msg.y"), "a_x", expression("msg.x")),
values("msg"))),
anyTree(
strictProject(ImmutableMap.of("b_y", expression("msg.y"), "b_x", expression("msg.x")),
values("msg"))))));
}
@Test
public void testDereferencePushdownFilter()
{
assertPlan("WITH t(msg) AS (VALUES ROW(CAST(ROW(1, 2.0) AS ROW(x BIGINT, y DOUBLE))))" +
"SELECT a.msg.y, b.msg.x " +
"FROM t a CROSS JOIN t b " +
"WHERE a.msg.x = 7 OR IS_FINITE(b.msg.y)",
anyTree(
join(INNER, ImmutableList.of(),
strictProject(ImmutableMap.of("a_x", expression("msg.x"), "a_y", expression("msg.y")),
values("msg")),
strictProject(ImmutableMap.of("b_x", expression("msg.x"), "b_y", expression("msg.y")),
values("msg")))));
}
@Test
public void testDereferencePushdownWindow()
{
assertPlan("WITH t(msg) AS (VALUES ROW(CAST(ROW(1, 2.0) AS ROW(x BIGINT, y DOUBLE))))" +
"SELECT msg.x AS x, ROW_NUMBER() OVER (PARTITION BY msg.y) AS rn " +
"FROM t ",
anyTree(
strictProject(ImmutableMap.of("a_x", expression("msg.x"), "a_y", expression("msg.y")),
values("msg"))));
assertPlan(
"WITH t(msg1, msg2, msg3, msg4, msg5) AS (VALUES " +
// Use two rows to avoid any optimizations around short-circuting operations
"ROW(" +
" CAST(ROW(1, 0.0) AS ROW(x BIGINT, y DOUBLE))," +
" CAST(ROW(2, 0.0) AS ROW(x BIGINT, y DOUBLE))," +
" CAST(ROW(3, 0.0) AS ROW(x BIGINT, y DOUBLE))," +
" CAST(ROW(4, 0.0) AS ROW(x BIGINT, y DOUBLE))," +
" CAST(ROW(5, 0.0) AS ROW(x BIGINT, y DOUBLE)))," +
"ROW(" +
" CAST(ROW(1, 1.0) AS ROW(x BIGINT, y DOUBLE))," +
" CAST(ROW(2, 2.0) AS ROW(x BIGINT, y DOUBLE))," +
" CAST(ROW(3, 3.0) AS ROW(x BIGINT, y DOUBLE))," +
" CAST(ROW(4, 4.0) AS ROW(x BIGINT, y DOUBLE))," +
" CAST(ROW(5, 5.0) AS ROW(x BIGINT, y DOUBLE))))" +
"SELECT " +
" msg1.x AS x1, " +
" msg2.x AS x2, " +
" msg3.x AS x3, " +
" msg4.x AS x4, " +
" msg5.x AS x5, " +
" MIN(msg3) OVER (PARTITION BY msg1 ORDER BY msg2) AS msg6," +
" MIN(msg4.x) OVER (PARTITION BY msg1 ORDER BY msg2) AS bigint_msg4 " +
"FROM t",
anyTree(
project(
ImmutableMap.of(
"msg1", expression("msg1"), // not pushed down because used in partition by
"msg2", expression("msg2"), // not pushed down because used in order by
"msg3", expression("msg3"), // not pushed down because used in window function
"msg4_x", expression("msg4.x"), // pushed down because msg4.x used in window function
"msg5_x", expression("msg5.x")), // pushed down because window node does not refer it
values("msg1", "msg2", "msg3", "msg4", "msg5"))));
}
@Test
public void testDereferencePushdownSemiJoin()
{
assertPlan("WITH t(msg) AS (VALUES ROW(CAST(ROW(1, 2.0, 3) AS ROW(x BIGINT, y DOUBLE, z BIGINT)))) " +
"SELECT msg.y " +
"FROM t " +
"WHERE " +
"msg.x IN (SELECT msg.z FROM t)",
anyTree(
semiJoin("a_x", "b_z", "semi_join_symbol",
anyTree(
strictProject(ImmutableMap.of("a_x", expression("msg.x"), "a_y", expression("msg.y")),
values("msg"))),
anyTree(
strictProject(ImmutableMap.of("b_z", expression("msg.z")),
values("msg"))))));
}
@Test
public void testDereferencePushdownLimit()
{
assertPlan("WITH t(msg) AS (VALUES ROW(CAST(ROW(1, 2.0) AS ROW(x BIGINT, y DOUBLE))), ROW(CAST(ROW(3, 4.0) AS ROW(x BIGINT, y DOUBLE))))" +
"SELECT msg.x * 3 FROM t limit 1",
anyTree(
strictProject(ImmutableMap.of("x_into_3", expression("msg_x * BIGINT '3'")),
limit(1,
strictProject(ImmutableMap.of("msg_x", expression("msg.x")),
values("msg"))))));
assertPlan("WITH t(msg) AS (VALUES ROW(CAST(ROW(1, 2.0) AS ROW(x BIGINT, y DOUBLE))))" +
"SELECT b.msg.x " +
"FROM t a, t b " +
"WHERE a.msg.y = b.msg.y " +
"LIMIT 100",
anyTree(join(INNER, ImmutableList.of(equiJoinClause("a_y", "b_y")),
anyTree(
strictProject(ImmutableMap.of("a_y", expression("msg.y")),
values("msg"))),
anyTree(
strictProject(ImmutableMap.of("b_y", expression("msg.y"), "b_x", expression("msg.x")),
values("msg"))))));
assertPlan("WITH t(msg) AS (VALUES ROW(CAST(ROW(1, 2.0) AS ROW(x BIGINT, y DOUBLE))))" +
"SELECT a.msg.y " +
"FROM t a JOIN t b ON a.msg.y = b.msg.y " +
"WHERE a.msg.x > BIGINT '5' " +
"LIMIT 100",
anyTree(join(INNER, ImmutableList.of(equiJoinClause("a_y", "b_y")),
anyTree(
strictProject(ImmutableMap.of("a_y", expression("msg.y")),
filter("msg.x > BIGINT '5'",
values("msg")))),
anyTree(
strictProject(ImmutableMap.of("b_y", expression("msg.y")),
values("msg"))))));
assertPlan("WITH t(msg) AS (VALUES ROW(CAST(ROW(1, 2.0) AS ROW(x BIGINT, y DOUBLE))))" +
"SELECT b.msg.x " +
"FROM t a JOIN t b ON a.msg.y = b.msg.y " +
"WHERE a.msg.x + b.msg.x < BIGINT '10' " +
"LIMIT 100",
anyTree(join(INNER, ImmutableList.of(equiJoinClause("a_y", "b_y")), Optional.of("a_x + b_x < BIGINT '10'"),
anyTree(
strictProject(ImmutableMap.of("a_y", expression("msg.y"), "a_x", expression("msg.x")),
values("msg"))),
anyTree(
strictProject(ImmutableMap.of("b_y", expression("msg.y"), "b_x", expression("msg.x")),
values("msg"))))));
}
@Test
public void testDereferencePushdownUnnest()
{
assertPlan("WITH t(msg, array) AS (VALUES ROW(CAST(ROW(1, 2.0) AS ROW(x BIGINT, y DOUBLE)), ARRAY[1, 2, 3])) " +
"SELECT a.msg.x " +
"FROM t a JOIN t b ON a.msg.y = b.msg.y " +
"CROSS JOIN UNNEST (a.array) " +
"WHERE a.msg.x + b.msg.x < BIGINT '10'",
output(ImmutableList.of("expr"),
strictProject(ImmutableMap.of("expr", expression("a_x")),
unnest(
join(INNER, ImmutableList.of(equiJoinClause("a_y", "b_y")),
Optional.of("a_x + b_x < BIGINT '10'"),
anyTree(
strictProject(ImmutableMap.of("a_y", expression("msg.y"), "a_x", expression("msg.x"), "a_z", expression("array")),
values("msg", "array"))),
anyTree(
strictProject(ImmutableMap.of("b_y", expression("msg.y"), "b_x", expression("msg.x")),
values("msg"))))))));
}
}
| |
package company.kr.sand.views;
/**
* Created by User on 2015-11-01.
*/
import android.content.Context;
import android.text.TextUtils;
import android.util.AttributeSet;
import android.widget.ImageView;
import android.widget.LinearLayout.LayoutParams;
import com.android.volley.VolleyError;
import com.android.volley.toolbox.ImageLoader;
import com.android.volley.toolbox.ImageLoader.ImageContainer;
import com.android.volley.toolbox.ImageLoader.ImageListener;
public class PicImageView extends ImageView {
public interface ResponseObserver {
public void onError();
public void onSuccess();
}
private ResponseObserver mObserver;
public void setResponseObserver(ResponseObserver observer) {
mObserver = observer;
}
/**
* The URL of the network image to load
*/
private String mUrl;
/**
* Resource ID of the image to be used as a placeholder until the network
* image is loaded.
*/
private int mDefaultImageId;
/**
* Resource ID of the image to be used if the network response fails.
*/
private int mErrorImageId;
/**
* Local copy of the ImageLoader.
*/
private ImageLoader mImageLoader;
/**
* Current ImageContainer. (either in-flight or finished)
*/
private ImageContainer mImageContainer;
public PicImageView(Context context) {
this(context, null);
}
public PicImageView(Context context, AttributeSet attrs) {
this(context, attrs, 0);
}
public PicImageView(Context context, AttributeSet attrs,
int defStyle) {
super(context, attrs, defStyle);
}
/**
* Sets URL of the image that should be loaded into this view. Note that
* calling this will immediately either set the cached image (if available)
* or the default image specified by
* {@link VolleyImageView#setDefaultImageResId(int)} on the view.
* <p/>
* NOTE: If applicable, {@link VolleyImageView#setDefaultImageResId(int)}
* and {@link VolleyImageView#setErrorImageResId(int)} should be called
* prior to calling this function.
*
* @param url The URL that should be loaded into this ImageView.
* @param imageLoader ImageLoader that will be used to make the request.
*/
public void setImageUrl(String url, ImageLoader imageLoader) {
mUrl = url;
mImageLoader = imageLoader;
// The URL has potentially changed. See if we need to load it.
loadImageIfNecessary(false);
}
/**
* Sets the default image resource ID to be used for this view until the
* attempt to load it completes.
*/
public void setDefaultImageResId(int defaultImage) {
mDefaultImageId = defaultImage;
}
/**
* Sets the error image resource ID to be used for this view in the event
* that the image requested fails to load.
*/
public void setErrorImageResId(int errorImage) {
mErrorImageId = errorImage;
}
/**
* Loads the image for the view if it isn't already loaded.
*
* @param isInLayoutPass True if this was invoked from a layout pass, false otherwise.
*/
private void loadImageIfNecessary(final boolean isInLayoutPass) {
final int width = getWidth();
int height = getHeight();
boolean isFullyWrapContent = getLayoutParams() != null
&& getLayoutParams().height == LayoutParams.WRAP_CONTENT
&& getLayoutParams().width == LayoutParams.WRAP_CONTENT;
// if the view's bounds aren't known yet, and this is not a
// wrap-content/wrap-content
// view, hold off on loading the image.
if (width == 0 && height == 0 && !isFullyWrapContent) {
return;
}
// if the URL to be loaded in this view is empty, cancel any old
// requests and clear the
// currently loaded image.
if (TextUtils.isEmpty(mUrl)) {
if (mImageContainer != null) {
mImageContainer.cancelRequest();
mImageContainer = null;
}
setDefaultImageOrNull();
return;
}
// if there was an old request in this view, check if it needs to be
// canceled.
if (mImageContainer != null && mImageContainer.getRequestUrl() != null) {
if (mImageContainer.getRequestUrl().equals(mUrl)) {
// if the request is from the same URL, return.
return;
} else {
// if there is a pre-existing request, cancel it if it's
// fetching a different URL.
mImageContainer.cancelRequest();
setDefaultImageOrNull();
}
}
// The pre-existing content of this view didn't match the current URL.
// Load the new image
// from the network.
ImageContainer newContainer = mImageLoader.get(mUrl,
new ImageListener() {
@Override
public void onErrorResponse(VolleyError error) {
if (mErrorImageId != 0) {
setImageResource(mErrorImageId);
}
if (mObserver != null) {
mObserver.onError();
}
}
@Override
public void onResponse(final ImageContainer response,
boolean isImmediate) {
// If this was an immediate response that was delivered
// inside of a layout
// pass do not set the image immediately as it will
// trigger a requestLayout
// inside of a layout. Instead, defer setting the image
// by posting back to
// the main thread.
if (isImmediate && isInLayoutPass) {
post(new Runnable() {
@Override
public void run() {
onResponse(response, false);
}
});
return;
}
int bWidth = 0, bHeight = 0;
if (response.getBitmap() != null) {
setImageBitmap(response.getBitmap());
bWidth = response.getBitmap().getWidth();
bHeight = response.getBitmap().getHeight();
adjustImageAspect(bWidth, bHeight);
} else if (mDefaultImageId != 0) {
setImageResource(mDefaultImageId);
}
if (mObserver != null) {
mObserver.onSuccess();
}
}
});
// update the ImageContainer to be the new bitmap container.
mImageContainer = newContainer;
}
private void setDefaultImageOrNull() {
if (mDefaultImageId != 0) {
setImageResource(mDefaultImageId);
} else {
setImageBitmap(null);
}
}
@Override
protected void onLayout(boolean changed, int left, int top, int right,
int bottom) {
super.onLayout(changed, left, top, right, bottom);
loadImageIfNecessary(true);
}
@Override
protected void onDetachedFromWindow() {
if (mImageContainer != null) {
// If the view was bound to an image request, cancel it and clear
// out the image from the view.
mImageContainer.cancelRequest();
setImageBitmap(null);
// also clear out the container so we can reload the image if
// necessary.
mImageContainer = null;
}
super.onDetachedFromWindow();
}
@Override
protected void drawableStateChanged() {
super.drawableStateChanged();
invalidate();
}
/*
* Adjusting imageview height
* */
private void adjustImageAspect(int bWidth, int bHeight) {
LayoutParams params = (LayoutParams) getLayoutParams();
if (bWidth == 0 || bHeight == 0)
return;
int swidth = getWidth();
int sheight = getHeight();
params.width = swidth;
params.height = sheight;
setLayoutParams(params);
}
}
| |
/*
* Copyright 2017-2018 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.cloud.spring.data.spanner.core;
import static org.assertj.core.api.Assertions.assertThat;
import static org.mockito.ArgumentMatchers.eq;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import com.google.cloud.ByteArray;
import com.google.cloud.spanner.DatabaseClient;
import com.google.cloud.spanner.Key;
import com.google.cloud.spanner.KeySet;
import com.google.cloud.spanner.Mutation;
import com.google.cloud.spanner.ReadContext;
import com.google.cloud.spanner.ReadOnlyTransaction;
import com.google.cloud.spanner.Statement;
import com.google.cloud.spanner.TransactionContext;
import com.google.cloud.spanner.TransactionManager;
import com.google.cloud.spring.data.spanner.core.admin.SpannerSchemaUtils;
import com.google.cloud.spring.data.spanner.core.convert.SpannerEntityProcessor;
import com.google.cloud.spring.data.spanner.core.mapping.Column;
import com.google.cloud.spring.data.spanner.core.mapping.PrimaryKey;
import com.google.cloud.spring.data.spanner.core.mapping.SpannerMappingContext;
import com.google.cloud.spring.data.spanner.core.mapping.Table;
import java.util.Arrays;
import java.util.List;
import java.util.concurrent.atomic.AtomicReference;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
import org.junit.runner.RunWith;
import org.mockito.Mock;
import org.mockito.Mockito;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.mock.mockito.MockBean;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit4.SpringRunner;
import org.springframework.transaction.annotation.EnableTransactionManagement;
import org.springframework.transaction.annotation.Transactional;
/** Tests for Spanner Template when using transactional annotation. */
@RunWith(SpringRunner.class)
@ContextConfiguration
public class SpannerTemplateTransactionManagerTests {
private static final List<Mutation> INSERT_MUTATION =
Arrays.asList(Mutation.newInsertBuilder("custom_test_table").build());
private static final List<Mutation> UPSERT_MUTATION =
Arrays.asList(Mutation.newInsertOrUpdateBuilder("custom_test_table").build());
private static final Mutation DELETE_MUTATION = Mutation.delete("custom_test_table", Key.of("1"));
private static final Statement DML_STATEMENT = Statement.of("update statement here");
private final AtomicReference<TransactionManager.TransactionState> transactionState =
new AtomicReference<>();
/** Used to test for exception messages and types. */
@Rule public ExpectedException expectedException = ExpectedException.none();
@MockBean DatabaseClient databaseClient;
@MockBean ReadContext readContext;
@MockBean TransactionContext transactionContext;
@Autowired TransactionalService transactionalService;
TransactionManager transactionManager;
@Mock ReadOnlyTransaction readOnlyTransaction;
@Before
public void setUp() {
when(this.databaseClient.singleUse()).thenReturn(this.readContext);
this.transactionManager = Mockito.spy(TransactionManager.class);
Mockito.doAnswer(
invocation -> {
this.transactionState.set(TransactionManager.TransactionState.STARTED);
return this.transactionContext;
})
.when(this.transactionManager)
.begin();
Mockito.doAnswer(
invocation -> {
this.transactionState.set(TransactionManager.TransactionState.ROLLED_BACK);
return null;
})
.when(this.transactionManager)
.rollback();
Mockito.doAnswer(
invocation -> {
this.transactionState.set(TransactionManager.TransactionState.COMMITTED);
return null;
})
.when(this.transactionManager)
.commit();
Mockito.doAnswer(invocation -> this.transactionState.get())
.when(this.transactionManager)
.getState();
when(this.databaseClient.transactionManager()).thenReturn(this.transactionManager);
when(this.databaseClient.readOnlyTransaction()).thenReturn(this.readOnlyTransaction);
when(this.transactionManager.begin()).thenReturn(this.transactionContext);
}
@Test
public void readOnlyTest() {
// The transactionManager will NOT be started in readonly
when(this.transactionManager.getState())
.thenReturn(TransactionManager.TransactionState.COMMITTED);
this.transactionalService.readOnlyOperation();
// begin() is for read-write transactions
verify(this.transactionManager, times(0)).begin();
verify(this.databaseClient, times(1)).readOnlyTransaction();
verify(this.readOnlyTransaction, times(1)).close();
verify(this.transactionManager, times(0)).rollback();
}
@Test
public void greenPathTransaction() {
TestEntity entity1 = new TestEntity();
TestEntity entity2 = new TestEntity();
this.transactionalService.doInTransaction(entity1, entity2);
verify(this.transactionManager, times(1)).begin();
verify(this.transactionManager, times(1)).commit();
verify(this.transactionManager, times(0)).rollback();
verify(this.databaseClient, times(1)).transactionManager(); // only 1 transaction
verify(this.transactionContext, times(2)).buffer(INSERT_MUTATION);
verify(this.transactionContext, times(1))
.read(
eq("custom_test_table"),
eq(KeySet.singleKey(Key.of("abc"))),
Mockito.any(Iterable.class),
Mockito.any());
verify(this.transactionContext, times(1)).buffer(Arrays.asList(DELETE_MUTATION));
verify(this.transactionContext, times(1)).buffer(UPSERT_MUTATION);
verify(this.transactionContext, times(1)).executeUpdate(DML_STATEMENT);
}
@Test
public void rollBackTransaction() {
TestEntity entity1 = new TestEntity();
TestEntity entity2 = new TestEntity();
Exception exception = null;
try {
this.transactionalService.doInTransactionWithException(entity1, entity2);
} catch (Exception ex) {
exception = ex;
}
assertThat(exception).isNotNull();
verify(this.transactionManager, times(1)).begin();
verify(this.transactionManager, times(0)).commit();
verify(this.transactionManager, times(1)).rollback();
verify(this.databaseClient, times(1)).transactionManager(); // only 1 transaction
verify(this.transactionContext, times(2)).buffer(INSERT_MUTATION);
verify(this.transactionContext, times(1))
.read(
eq("custom_test_table"),
eq(KeySet.singleKey(Key.of("abc"))),
Mockito.any(Iterable.class),
Mockito.any());
verify(this.transactionContext, times(1)).buffer(Arrays.asList(DELETE_MUTATION));
verify(this.transactionContext, times(1)).buffer(UPSERT_MUTATION);
}
@Test
public void doWithoutTransaction() {
TestEntity entity1 = new TestEntity();
TestEntity entity2 = new TestEntity();
this.transactionalService.doWithoutTransaction(entity1, entity2);
verify(this.transactionManager, Mockito.never()).begin();
verify(this.transactionManager, Mockito.never()).commit();
verify(this.transactionManager, Mockito.never()).rollback();
verify(this.databaseClient, Mockito.never()).transactionManager(); // only 1 transaction
verify(this.transactionContext, Mockito.never()).buffer(Mockito.any(List.class));
verify(this.transactionContext, Mockito.never())
.read(
Mockito.anyString(),
Mockito.any(KeySet.class),
Mockito.any(Iterable.class),
Mockito.any());
}
@Test
public void readOnlySaveTest() {
this.expectedException.expectMessage(
"Spanner transaction cannot apply mutations because it is in readonly mode");
this.transactionalService.writingInReadOnly(new TestEntity());
}
@Test
public void readOnlyDeleteTest() {
this.expectedException.expectMessage(
"Spanner transaction cannot apply mutations because it is in readonly mode");
this.transactionalService.deleteInReadOnly(new TestEntity());
}
@Test
public void readOnlyDmlTest() {
this.expectedException.expectMessage(
"Spanner transaction cannot execute DML because it is in readonly mode");
this.transactionalService.dmlInReadOnly();
}
@Test
public void partitionedDmlInTransactionTest() {
this.expectedException.expectMessage("Cannot execute partitioned DML in a transaction.");
this.transactionalService.partitionedDmlInTransaction();
}
/** Spring config for the tests. */
@Configuration
@EnableTransactionManagement
static class Config {
@Bean
public SpannerTemplate spannerTemplate(DatabaseClient databaseClient) {
SpannerMappingContext mappingContext = new SpannerMappingContext();
SpannerEntityProcessor objectMapper = Mockito.mock(SpannerEntityProcessor.class);
SpannerMutationFactory mutationFactory = Mockito.mock(SpannerMutationFactory.class);
when(mutationFactory.insert(Mockito.any(TestEntity.class))).thenReturn(INSERT_MUTATION);
when(mutationFactory.upsert(Mockito.any(TestEntity.class), Mockito.any()))
.thenReturn(UPSERT_MUTATION);
when(mutationFactory.delete(Mockito.any(TestEntity.class))).thenReturn(DELETE_MUTATION);
SpannerSchemaUtils schemaUtils = new SpannerSchemaUtils(mappingContext, objectMapper, true);
return new SpannerTemplate(
() -> databaseClient, mappingContext, objectMapper, mutationFactory, schemaUtils);
}
@Bean
public SpannerTransactionManager spannerTransactionManager(DatabaseClient databaseClient) {
return new SpannerTransactionManager(() -> databaseClient);
}
@Bean
TransactionalService transactionalService() {
return new TransactionalService();
}
}
/** A mock transactional service to execute methods annotated as transactional. */
public static class TransactionalService {
@Autowired SpannerTemplate spannerTemplate;
@Transactional
public void doInTransaction(TestEntity entity1, TestEntity entity2) {
this.spannerTemplate.read(TestEntity.class, Key.of("abc"));
this.spannerTemplate.executeDmlStatement(DML_STATEMENT);
this.spannerTemplate.insert(entity1);
this.spannerTemplate.insert(entity2);
this.spannerTemplate.delete(entity1);
this.spannerTemplate.upsert(entity2);
}
@Transactional
public void doInTransactionWithException(TestEntity entity1, TestEntity entity2) {
this.spannerTemplate.read(TestEntity.class, Key.of("abc"));
this.spannerTemplate.insert(entity1);
this.spannerTemplate.insert(entity2);
this.spannerTemplate.delete(entity1);
this.spannerTemplate.upsert(entity2);
throw new RuntimeException("oops");
}
public void doWithoutTransaction(TestEntity entity1, TestEntity entity2) {
this.spannerTemplate.read(TestEntity.class, Key.of("abc"));
this.spannerTemplate.insert(entity1);
this.spannerTemplate.insert(entity2);
this.spannerTemplate.delete(entity1);
this.spannerTemplate.upsert(entity2);
}
@Transactional(readOnly = true)
public void readOnlyOperation() {
this.spannerTemplate.read(TestEntity.class, Key.of("abc"));
}
@Transactional(readOnly = true)
public void writingInReadOnly(TestEntity testEntity) {
this.spannerTemplate.upsert(testEntity);
}
@Transactional(readOnly = true)
public void deleteInReadOnly(TestEntity testEntity) {
this.spannerTemplate.delete(testEntity);
}
@Transactional(readOnly = true)
public void dmlInReadOnly() {
this.spannerTemplate.executeDmlStatement(Statement.of("fake"));
}
@Transactional
public void partitionedDmlInTransaction() {
this.spannerTemplate.executePartitionedDmlStatement(
Statement.of("partitioned dml statement here"));
}
}
@Table(name = "custom_test_table")
private static class TestEntity {
@PrimaryKey(keyOrder = 1)
String id;
@PrimaryKey(keyOrder = 2)
long id2;
@Column(name = "custom_col")
String something;
@Column(name = "")
String other;
ByteArray bytes;
List<ByteArray> bytesList;
List<Integer> integerList;
double[] doubles;
}
}
| |
package com.mobeta.android.dslv;
import android.graphics.Point;
import android.view.GestureDetector;
import android.view.HapticFeedbackConstants;
import android.view.MotionEvent;
import android.view.View;
import android.view.ViewConfiguration;
import android.widget.AdapterView;
/**
* Class that starts and stops item drags on a {@link DragSortListView}
* based on touch gestures. This class also inherits from
* {@link SimpleFloatViewManager}, which provides basic float View
* creation.
*
* An instance of this class is meant to be passed to the methods
* {@link DragSortListView#setTouchListener()} and
* {@link DragSortListView#setFloatViewManager()} of your
* {@link DragSortListView} instance.
*/
public class DragSortController extends SimpleFloatViewManager implements View.OnTouchListener, GestureDetector.OnGestureListener {
/**
* Drag init mode enum.
*/
public static final int ON_DOWN = 0;
public static final int ON_DRAG = 1;
public static final int ON_LONG_PRESS = 2;
private int mDragInitMode = ON_DOWN;
private boolean mSortEnabled = true;
/**
* Remove mode enum.
*/
public static final int CLICK_REMOVE = 0;
public static final int FLING_REMOVE = 1;
/**
* The current remove mode.
*/
private int mRemoveMode;
private boolean mRemoveEnabled = false;
private boolean mIsRemoving = false;
private GestureDetector mDetector;
private GestureDetector mFlingRemoveDetector;
private int mTouchSlop;
public static final int MISS = -1;
private int mHitPos = MISS;
private int mFlingHitPos = MISS;
private int mClickRemoveHitPos = MISS;
private int[] mTempLoc = new int[2];
private int mItemX;
private int mItemY;
private int mCurrX;
private int mCurrY;
private boolean mDragging = false;
private float mFlingSpeed = 500f;
private int mDragHandleId;
private int mClickRemoveId;
private int mFlingHandleId;
private boolean mCanDrag;
private DragSortListView mDslv;
private int mPositionX;
/**
* Calls {@link #DragSortController(DragSortListView, int)} with a
* 0 drag handle id, FLING_RIGHT_REMOVE remove mode,
* and ON_DOWN drag init. By default, sorting is enabled, and
* removal is disabled.
*
* @param dslv The DSLV instance
*/
public DragSortController(DragSortListView dslv) {
this(dslv, 0, ON_DOWN, FLING_REMOVE);
}
public DragSortController(DragSortListView dslv, int dragHandleId, int dragInitMode, int removeMode) {
this(dslv, dragHandleId, dragInitMode, removeMode, 0);
}
public DragSortController(DragSortListView dslv, int dragHandleId, int dragInitMode, int removeMode, int clickRemoveId) {
this(dslv, dragHandleId, dragInitMode, removeMode, clickRemoveId, 0);
}
/**
* By default, sorting is enabled, and removal is disabled.
*
* @param dslv The DSLV instance
* @param dragHandleId The resource id of the View that represents
* the drag handle in a list item.
*/
public DragSortController(DragSortListView dslv, int dragHandleId, int dragInitMode,
int removeMode, int clickRemoveId, int flingHandleId) {
super(dslv);
mDslv = dslv;
mDetector = new GestureDetector(dslv.getContext(), this);
mFlingRemoveDetector = new GestureDetector(dslv.getContext(), mFlingRemoveListener);
mFlingRemoveDetector.setIsLongpressEnabled(false);
mTouchSlop = ViewConfiguration.get(dslv.getContext()).getScaledTouchSlop();
mDragHandleId = dragHandleId;
mClickRemoveId = clickRemoveId;
mFlingHandleId = flingHandleId;
setRemoveMode(removeMode);
setDragInitMode(dragInitMode);
}
public int getDragInitMode() {
return mDragInitMode;
}
/**
* Set how a drag is initiated. Needs to be one of
* {@link ON_DOWN}, {@link ON_DRAG}, or {@link ON_LONG_PRESS}.
*
* @param mode The drag init mode.
*/
public void setDragInitMode(int mode) {
mDragInitMode = mode;
}
/**
* Enable/Disable list item sorting. Disabling is useful if only item
* removal is desired. Prevents drags in the vertical direction.
*
* @param enabled Set <code>true</code> to enable list
* item sorting.
*/
public void setSortEnabled(boolean enabled) {
mSortEnabled = enabled;
}
public boolean isSortEnabled() {
return mSortEnabled;
}
/**
* One of {@link CLICK_REMOVE}, {@link FLING_RIGHT_REMOVE},
* {@link FLING_LEFT_REMOVE},
* {@link SLIDE_RIGHT_REMOVE}, or {@link SLIDE_LEFT_REMOVE}.
*/
public void setRemoveMode(int mode) {
mRemoveMode = mode;
}
public int getRemoveMode() {
return mRemoveMode;
}
/**
* Enable/Disable item removal without affecting remove mode.
*/
public void setRemoveEnabled(boolean enabled) {
mRemoveEnabled = enabled;
}
public boolean isRemoveEnabled() {
return mRemoveEnabled;
}
/**
* Set the resource id for the View that represents the drag
* handle in a list item.
*
* @param id An android resource id.
*/
public void setDragHandleId(int id) {
mDragHandleId = id;
}
/**
* Set the resource id for the View that represents the fling
* handle in a list item.
*
* @param id An android resource id.
*/
public void setFlingHandleId(int id) {
mFlingHandleId = id;
}
/**
* Set the resource id for the View that represents click
* removal button.
*
* @param id An android resource id.
*/
public void setClickRemoveId(int id) {
mClickRemoveId = id;
}
/**
* Sets flags to restrict certain motions of the floating View
* based on DragSortController settings (such as remove mode).
* Starts the drag on the DragSortListView.
*
* @param position The list item position (includes headers).
* @param deltaX Touch x-coord minus left edge of floating View.
* @param deltaY Touch y-coord minus top edge of floating View.
*
* @return True if drag started, false otherwise.
*/
public boolean startDrag(int position, int deltaX, int deltaY) {
int dragFlags = 0;
if (mSortEnabled && !mIsRemoving) {
dragFlags |= DragSortListView.DRAG_POS_Y | DragSortListView.DRAG_NEG_Y;
}
if (mRemoveEnabled && mIsRemoving) {
dragFlags |= DragSortListView.DRAG_POS_X;
dragFlags |= DragSortListView.DRAG_NEG_X;
}
mDragging = mDslv.startDrag(position - mDslv.getHeaderViewsCount(), dragFlags, deltaX,
deltaY);
return mDragging;
}
@Override
public boolean onTouch(View v, MotionEvent ev) {
if (!mDslv.isDragEnabled() || mDslv.listViewIntercepted()) {
return false;
}
mDetector.onTouchEvent(ev);
if (mRemoveEnabled && mDragging && mRemoveMode == FLING_REMOVE) {
mFlingRemoveDetector.onTouchEvent(ev);
}
int action = ev.getAction() & MotionEvent.ACTION_MASK;
switch (action) {
case MotionEvent.ACTION_DOWN:
mCurrX = (int) ev.getX();
mCurrY = (int) ev.getY();
break;
case MotionEvent.ACTION_UP:
if (mRemoveEnabled && mIsRemoving) {
int x = mPositionX >= 0 ? mPositionX : -mPositionX;
int removePoint = mDslv.getWidth() / 2;
if (x > removePoint) {
mDslv.stopDragWithVelocity(true, 0);
}
}
case MotionEvent.ACTION_CANCEL:
mIsRemoving = false;
mDragging = false;
break;
}
return false;
}
/**
* Overrides to provide fading when slide removal is enabled.
*/
@Override
public void onDragFloatView(View floatView, Point position, Point touch) {
if (mRemoveEnabled && mIsRemoving) {
mPositionX = position.x;
}
}
/**
* Get the position to start dragging based on the ACTION_DOWN
* MotionEvent. This function simply calls
* {@link #dragHandleHitPosition(MotionEvent)}. Override
* to change drag handle behavior;
* this function is called internally when an ACTION_DOWN
* event is detected.
*
* @param ev The ACTION_DOWN MotionEvent.
*
* @return The list position to drag if a drag-init gesture is
* detected; MISS if unsuccessful.
*/
public int startDragPosition(MotionEvent ev) {
return dragHandleHitPosition(ev);
}
public int startFlingPosition(MotionEvent ev) {
return mRemoveMode == FLING_REMOVE ? flingHandleHitPosition(ev) : MISS;
}
/**
* Checks for the touch of an item's drag handle (specified by
* {@link #setDragHandleId(int)}), and returns that item's position
* if a drag handle touch was detected.
*
* @param ev The ACTION_DOWN MotionEvent.
* @return The list position of the item whose drag handle was
* touched; MISS if unsuccessful.
*/
public int dragHandleHitPosition(MotionEvent ev) {
return viewIdHitPosition(ev, mDragHandleId);
}
public int flingHandleHitPosition(MotionEvent ev) {
return viewIdHitPosition(ev, mFlingHandleId);
}
public int viewIdHitPosition(MotionEvent ev, int id) {
final int x = (int) ev.getX();
final int y = (int) ev.getY();
int touchPos = mDslv.pointToPosition(x, y); // includes headers/footers
final int numHeaders = mDslv.getHeaderViewsCount();
final int numFooters = mDslv.getFooterViewsCount();
final int count = mDslv.getCount();
// Log.d("mobeta", "touch down on position " + itemnum);
// We're only interested if the touch was on an
// item that's not a header or footer.
if (touchPos != AdapterView.INVALID_POSITION && touchPos >= numHeaders
&& touchPos < (count - numFooters)) {
final View item = mDslv.getChildAt(touchPos - mDslv.getFirstVisiblePosition());
final int rawX = (int) ev.getRawX();
final int rawY = (int) ev.getRawY();
View dragBox = id == 0 ? item : (View) item.findViewById(id);
if (dragBox != null) {
dragBox.getLocationOnScreen(mTempLoc);
if (rawX > mTempLoc[0] && rawY > mTempLoc[1] &&
rawX < mTempLoc[0] + dragBox.getWidth() &&
rawY < mTempLoc[1] + dragBox.getHeight()) {
mItemX = item.getLeft();
mItemY = item.getTop();
return touchPos;
}
}
}
return MISS;
}
@Override
public boolean onDown(MotionEvent ev) {
if (mRemoveEnabled && mRemoveMode == CLICK_REMOVE) {
mClickRemoveHitPos = viewIdHitPosition(ev, mClickRemoveId);
}
mHitPos = startDragPosition(ev);
if (mHitPos != MISS && mDragInitMode == ON_DOWN) {
startDrag(mHitPos, (int) ev.getX() - mItemX, (int) ev.getY() - mItemY);
}
mIsRemoving = false;
mCanDrag = true;
mPositionX = 0;
mFlingHitPos = startFlingPosition(ev);
return true;
}
@Override
public boolean onScroll(MotionEvent e1, MotionEvent e2, float distanceX, float distanceY) {
// it can happen where the motion events are null
if (e1 == null || e2 == null) {
// we can't really do anything
return false;
}
final int x1 = (int) e1.getX();
final int y1 = (int) e1.getY();
final int x2 = (int) e2.getX();
final int y2 = (int) e2.getY();
final int deltaX = x2 - mItemX;
final int deltaY = y2 - mItemY;
if (mCanDrag && !mDragging && (mHitPos != MISS || mFlingHitPos != MISS)) {
if (mHitPos != MISS) {
if (mDragInitMode == ON_DRAG && Math.abs(y2 - y1) > mTouchSlop && mSortEnabled) {
startDrag(mHitPos, deltaX, deltaY);
}
else if (mDragInitMode != ON_DOWN && Math.abs(x2 - x1) > mTouchSlop && mRemoveEnabled)
{
mIsRemoving = true;
startDrag(mFlingHitPos, deltaX, deltaY);
}
} else if (mFlingHitPos != MISS) {
if (Math.abs(x2 - x1) > mTouchSlop && mRemoveEnabled) {
mIsRemoving = true;
startDrag(mFlingHitPos, deltaX, deltaY);
} else if (Math.abs(y2 - y1) > mTouchSlop) {
mCanDrag = false; // if started to scroll the list then
// don't allow sorting nor fling-removing
}
}
}
// return whatever
return false;
}
@Override
public void onLongPress(MotionEvent e) {
// Log.d("mobeta", "lift listener long pressed");
if (mHitPos != MISS && mDragInitMode == ON_LONG_PRESS) {
mDslv.performHapticFeedback(HapticFeedbackConstants.LONG_PRESS);
startDrag(mHitPos, mCurrX - mItemX, mCurrY - mItemY);
}
}
// complete the OnGestureListener interface
@Override
public final boolean onFling(MotionEvent e1, MotionEvent e2, float velocityX, float velocityY) {
return false;
}
// complete the OnGestureListener interface
@Override
public boolean onSingleTapUp(MotionEvent ev) {
if (mRemoveEnabled && mRemoveMode == CLICK_REMOVE) {
if (mClickRemoveHitPos != MISS) {
mDslv.removeItem(mClickRemoveHitPos - mDslv.getHeaderViewsCount());
}
}
return true;
}
// complete the OnGestureListener interface
@Override
public void onShowPress(MotionEvent ev) {
// do nothing
}
private GestureDetector.OnGestureListener mFlingRemoveListener =
new GestureDetector.SimpleOnGestureListener() {
@Override
public final boolean onFling(MotionEvent e1, MotionEvent e2, float velocityX,
float velocityY) {
ViewConfiguration vc = ViewConfiguration.get(mDslv.getContext());
int minSwipeVelocity = vc.getScaledMinimumFlingVelocity();
int maxSwipeVelocity = vc.getScaledMaximumFlingVelocity();
if (mRemoveEnabled && mIsRemoving) {
int w = mDslv.getWidth();
if(mPositionX >= w/2) {
mDslv.stopDragWithVelocity(true, velocityX);
} else if(mPositionX >= w/5 && minSwipeVelocity <= velocityX && velocityX <= maxSwipeVelocity) {
mDslv.stopDragWithVelocity(true, velocityX);
}
mIsRemoving = false;
}
return false;
}
};
}
| |
// Copyright 2000-2020 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.jetbrains.python.refactoring;
import com.intellij.lang.LanguageRefactoringSupport;
import com.intellij.lang.refactoring.RefactoringSupportProvider;
import com.intellij.refactoring.RefactoringActionHandler;
import com.jetbrains.python.PythonLanguage;
import com.jetbrains.python.fixtures.LightMarkedTestCase;
import com.jetbrains.python.psi.LanguageLevel;
import com.jetbrains.python.refactoring.extractmethod.PyExtractMethodUtil;
public class PyExtractMethodTest extends LightMarkedTestCase {
private void doTest(String newName, LanguageLevel level) {
runWithLanguageLevel(level, () -> doTest(newName));
}
private void doTest(String newName) {
final String testName = getTestName(false);
final String beforeName = testName + ".before.py";
final String afterName = testName + ".after.py";
final String dir = "refactoring/extractmethod/";
myFixture.configureByFile(dir + beforeName);
final RefactoringSupportProvider provider = LanguageRefactoringSupport.INSTANCE.forLanguage(PythonLanguage.getInstance());
assertNotNull(provider);
final RefactoringActionHandler handler = provider.getExtractMethodHandler();
assertNotNull(handler);
System.setProperty(PyExtractMethodUtil.NAME, newName);
try {
refactorUsingHandler(handler);
}
finally {
System.clearProperty(PyExtractMethodUtil.NAME);
}
myFixture.checkResultByFile(dir + afterName);
}
private void doFail(String newName, String message) {
try {
doTest(newName);
}
catch (Exception e) {
assertEquals(message, e.getMessage());
return;
}
fail("No exception was thrown");
}
public void testParameter() {
doTest("bar");
}
public void testBreakAst() {
doTest("bar");
}
public void testExpression() {
doTest("plus");
}
public void testStatement() {
doTest("foo");
}
public void testStatements() {
doTest("foo");
}
public void testStatementReturn() {
doTest("foo");
}
public void testBinaryExpression() {
doTest("foo");
}
public void testWhileOutput() {
doTest("bar");
}
public void testNameCollisionFile() {
doFail("hello", "Method name clashes with already existing name");
}
public void testNameCollisionSuperClass() {
doFail("hello", "Method name clashes with already existing name");
}
public void testOutNotEmptyStatements() {
doTest("sum_squares");
}
public void testOutNotEmptyStatements2() {
doTest("sum_squares");
}
public void testFile() {
doTest("bar");
}
public void testMethodContext() {
doTest("bar");
}
public void testMethodIndent() {
doTest("bar");
}
public void testMethodReturn() {
doTest("bar");
}
public void testWrongSelectionIfPart() {
doFail("bar", "Cannot perform extract method using selected element(s)");
}
public void testWrongSelectionFromImportStar() {
doFail("bar", "Cannot perform refactoring with star import statement inside code block");
}
public void testPy479() {
doTest("bar");
}
public void testConditionalReturn() {
doFail("bar", "Cannot perform refactoring when execution flow is interrupted");
}
public void testReturnTuple() {
doTest("bar");
}
public void testCommentIncluded() {
doTest("baz");
}
public void testElseBody() {
doTest("baz");
}
public void testClassMethod() {
doTest("baz");
}
public void testStaticMethod() {
doTest("baz");
}
// PY-5123
public void testMethodInIf() {
doTest("baz");
}
// PY-6081
public void testLocalVarDefinedBeforeModifiedInside() {
doTest("bar");
}
// PY-6391
public void testDefinedBeforeAccessedAfter() {
doTest("bar");
}
// PY-5865
public void testSingleRaise() {
doTest("bar");
}
// PY-4156
public void testLocalFunction() {
doTest("bar");
}
// PY-6413
public void testTryFinally() {
doTest("bar");
}
// PY-6414
public void testTryContext() {
doTest("bar");
}
// PY-6416
public void testCommentAfterSelection() {
doTest("bar");
}
// PY-6417
public void testGlobalVarAssignment() {
doTest("bar");
}
// PY-6619
public void testGlobalToplevelAssignment() {
doTest("bar");
}
// PY-6623
public void testForLoopContinue() {
doFail("bar", "Cannot perform refactoring when execution flow is interrupted");
}
// PY-6622
public void testClassWithoutInit() {
doTest("bar");
}
// PY-6625
public void testNonlocal() {
doTest("baz", LanguageLevel.PYTHON34);
}
// PY-7381
public void testYield() {
doFail("bar", "Cannot perform refactoring with 'yield' statement inside code block");
}
// PY-7382
public void testYield33() {
doTest("bar", LanguageLevel.PYTHON34);
}
// PY-7399
public void testYieldFrom33() {
doTest("bar", LanguageLevel.PYTHON34);
}
public void testDuplicateSingleLine() {
doTest("foo");
}
public void testDuplicateMultiLine() {
doTest("foo");
}
public void testDuplicateInClass() {
doTest("foo");
}
public void testDuplicateWithRename() {
doTest("foo");
}
public void testDuplicateCheckParam() {
doTest("foo");
}
// PY-7753
public void testRedundantGlobalInTopLevelFunction() {
doTest("foo");
}
// PY-6620
public void testProhibitedAtClassLevel() {
doFail("foo", "Cannot perform refactoring at class level");
}
public void testAsyncDef() {
doTest("bar", LanguageLevel.PYTHON35);
}
public void testAwaitExpression() {
doTest("bar", LanguageLevel.PYTHON35);
}
public void testCommentsPrecedingSourceStatement() {
doTest("func");
}
// PY-28972
public void testInterruptedOuterLoop() {
doFail("foo", "Cannot perform refactoring when execution flow is interrupted");
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.common.geo;
import com.vividsolutions.jts.geom.Coordinate;
import com.vividsolutions.jts.geom.Geometry;
import com.vividsolutions.jts.geom.GeometryFactory;
import com.vividsolutions.jts.geom.LineString;
import com.vividsolutions.jts.geom.LinearRing;
import com.vividsolutions.jts.geom.MultiLineString;
import com.vividsolutions.jts.geom.Point;
import com.vividsolutions.jts.geom.Polygon;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.common.geo.builders.ShapeBuilder;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.json.JsonXContent;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.test.hamcrest.ElasticsearchGeoAssertions;
import org.locationtech.spatial4j.exception.InvalidShapeException;
import org.locationtech.spatial4j.shape.Circle;
import org.locationtech.spatial4j.shape.Rectangle;
import org.locationtech.spatial4j.shape.Shape;
import org.locationtech.spatial4j.shape.ShapeCollection;
import org.locationtech.spatial4j.shape.jts.JtsGeometry;
import org.locationtech.spatial4j.shape.jts.JtsPoint;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import static org.elasticsearch.common.geo.builders.ShapeBuilder.SPATIAL_CONTEXT;
/**
* Tests for {@code GeoJSONShapeParser}
*/
public class GeoJSONShapeParserTests extends ESTestCase {
private static final GeometryFactory GEOMETRY_FACTORY = SPATIAL_CONTEXT.getGeometryFactory();
public void testParseSimplePoint() throws IOException {
XContentBuilder pointGeoJson = XContentFactory.jsonBuilder()
.startObject()
.field("type", "Point")
.startArray("coordinates").value(100.0).value(0.0).endArray()
.endObject();
Point expected = GEOMETRY_FACTORY.createPoint(new Coordinate(100.0, 0.0));
assertGeometryEquals(new JtsPoint(expected, SPATIAL_CONTEXT), pointGeoJson);
}
public void testParseLineString() throws IOException {
XContentBuilder lineGeoJson = XContentFactory.jsonBuilder()
.startObject()
.field("type", "LineString")
.startArray("coordinates")
.startArray().value(100.0).value(0.0).endArray()
.startArray().value(101.0).value(1.0).endArray()
.endArray()
.endObject();
List<Coordinate> lineCoordinates = new ArrayList<>();
lineCoordinates.add(new Coordinate(100, 0));
lineCoordinates.add(new Coordinate(101, 1));
LineString expected = GEOMETRY_FACTORY.createLineString(
lineCoordinates.toArray(new Coordinate[lineCoordinates.size()]));
assertGeometryEquals(jtsGeom(expected), lineGeoJson);
}
public void testParseMultiLineString() throws IOException {
XContentBuilder multilinesGeoJson = XContentFactory.jsonBuilder()
.startObject()
.field("type", "MultiLineString")
.startArray("coordinates")
.startArray()
.startArray().value(100.0).value(0.0).endArray()
.startArray().value(101.0).value(1.0).endArray()
.endArray()
.startArray()
.startArray().value(102.0).value(2.0).endArray()
.startArray().value(103.0).value(3.0).endArray()
.endArray()
.endArray()
.endObject();
MultiLineString expected = GEOMETRY_FACTORY.createMultiLineString(new LineString[]{
GEOMETRY_FACTORY.createLineString(new Coordinate[]{
new Coordinate(100, 0),
new Coordinate(101, 1),
}),
GEOMETRY_FACTORY.createLineString(new Coordinate[]{
new Coordinate(102, 2),
new Coordinate(103, 3),
}),
});
assertGeometryEquals(jtsGeom(expected), multilinesGeoJson);
}
public void testParseCircle() throws IOException {
XContentBuilder multilinesGeoJson = XContentFactory.jsonBuilder()
.startObject()
.field("type", "circle")
.startArray("coordinates").value(100.0).value(0.0).endArray()
.field("radius", "100m")
.endObject();
Circle expected = SPATIAL_CONTEXT.makeCircle(100.0, 0.0, 360 * 100 / GeoUtils.EARTH_EQUATOR);
assertGeometryEquals(expected, multilinesGeoJson);
}
public void testParseMultiDimensionShapes() throws IOException {
// multi dimension point
XContentBuilder pointGeoJson = XContentFactory.jsonBuilder()
.startObject()
.field("type", "Point")
.startArray("coordinates").value(100.0).value(0.0).value(15.0).value(18.0).endArray()
.endObject();
Point expectedPt = GEOMETRY_FACTORY.createPoint(new Coordinate(100.0, 0.0));
assertGeometryEquals(new JtsPoint(expectedPt, SPATIAL_CONTEXT), pointGeoJson);
// multi dimension linestring
XContentBuilder lineGeoJson = XContentFactory.jsonBuilder()
.startObject()
.field("type", "LineString")
.startArray("coordinates")
.startArray().value(100.0).value(0.0).value(15.0).endArray()
.startArray().value(101.0).value(1.0).value(18.0).value(19.0).endArray()
.endArray()
.endObject();
List<Coordinate> lineCoordinates = new ArrayList<>();
lineCoordinates.add(new Coordinate(100, 0));
lineCoordinates.add(new Coordinate(101, 1));
LineString expectedLS = GEOMETRY_FACTORY.createLineString(
lineCoordinates.toArray(new Coordinate[lineCoordinates.size()]));
assertGeometryEquals(jtsGeom(expectedLS), lineGeoJson);
}
public void testParseEnvelope() throws IOException {
// test #1: envelope with expected coordinate order (TopLeft, BottomRight)
XContentBuilder multilinesGeoJson = XContentFactory.jsonBuilder().startObject().field("type", "envelope")
.startArray("coordinates")
.startArray().value(-50).value(30).endArray()
.startArray().value(50).value(-30).endArray()
.endArray()
.endObject();
Rectangle expected = SPATIAL_CONTEXT.makeRectangle(-50, 50, -30, 30);
assertGeometryEquals(expected, multilinesGeoJson);
// test #2: envelope with agnostic coordinate order (TopRight, BottomLeft)
multilinesGeoJson = XContentFactory.jsonBuilder().startObject().field("type", "envelope")
.startArray("coordinates")
.startArray().value(50).value(30).endArray()
.startArray().value(-50).value(-30).endArray()
.endArray()
.endObject();
expected = SPATIAL_CONTEXT.makeRectangle(-50, 50, -30, 30);
assertGeometryEquals(expected, multilinesGeoJson);
// test #3: "envelope" (actually a triangle) with invalid number of coordinates (TopRight, BottomLeft, BottomRight)
multilinesGeoJson = XContentFactory.jsonBuilder().startObject().field("type", "envelope")
.startArray("coordinates")
.startArray().value(50).value(30).endArray()
.startArray().value(-50).value(-30).endArray()
.startArray().value(50).value(-39).endArray()
.endArray()
.endObject();
XContentParser parser = createParser(multilinesGeoJson);
parser.nextToken();
ElasticsearchGeoAssertions.assertValidException(parser, ElasticsearchParseException.class);
// test #4: "envelope" with empty coordinates
multilinesGeoJson = XContentFactory.jsonBuilder().startObject().field("type", "envelope")
.startArray("coordinates")
.endArray()
.endObject();
parser = createParser(multilinesGeoJson);
parser.nextToken();
ElasticsearchGeoAssertions.assertValidException(parser, ElasticsearchParseException.class);
}
public void testParsePolygonNoHoles() throws IOException {
XContentBuilder polygonGeoJson = XContentFactory.jsonBuilder()
.startObject()
.field("type", "Polygon")
.startArray("coordinates")
.startArray()
.startArray().value(100.0).value(1.0).endArray()
.startArray().value(101.0).value(1.0).endArray()
.startArray().value(101.0).value(0.0).endArray()
.startArray().value(100.0).value(0.0).endArray()
.startArray().value(100.0).value(1.0).endArray()
.endArray()
.endArray()
.endObject();
List<Coordinate> shellCoordinates = new ArrayList<>();
shellCoordinates.add(new Coordinate(100, 0));
shellCoordinates.add(new Coordinate(101, 0));
shellCoordinates.add(new Coordinate(101, 1));
shellCoordinates.add(new Coordinate(100, 1));
shellCoordinates.add(new Coordinate(100, 0));
LinearRing shell = GEOMETRY_FACTORY.createLinearRing(shellCoordinates.toArray(new Coordinate[shellCoordinates.size()]));
Polygon expected = GEOMETRY_FACTORY.createPolygon(shell, null);
assertGeometryEquals(jtsGeom(expected), polygonGeoJson);
}
public void testParseInvalidPoint() throws IOException {
// test case 1: create an invalid point object with multipoint data format
XContentBuilder invalidPoint1 = XContentFactory.jsonBuilder()
.startObject()
.field("type", "point")
.startArray("coordinates")
.startArray().value(-74.011).value(40.753).endArray()
.endArray()
.endObject();
XContentParser parser = createParser(invalidPoint1);
parser.nextToken();
ElasticsearchGeoAssertions.assertValidException(parser, ElasticsearchParseException.class);
// test case 2: create an invalid point object with an empty number of coordinates
XContentBuilder invalidPoint2 = XContentFactory.jsonBuilder()
.startObject()
.field("type", "point")
.startArray("coordinates")
.endArray()
.endObject();
parser = createParser(invalidPoint2);
parser.nextToken();
ElasticsearchGeoAssertions.assertValidException(parser, ElasticsearchParseException.class);
}
public void testParseInvalidMultipoint() throws IOException {
// test case 1: create an invalid multipoint object with single coordinate
XContentBuilder invalidMultipoint1 = XContentFactory.jsonBuilder()
.startObject()
.field("type", "multipoint")
.startArray("coordinates").value(-74.011).value(40.753).endArray()
.endObject();
XContentParser parser = createParser(invalidMultipoint1);
parser.nextToken();
ElasticsearchGeoAssertions.assertValidException(parser, ElasticsearchParseException.class);
// test case 2: create an invalid multipoint object with null coordinate
XContentBuilder invalidMultipoint2 = XContentFactory.jsonBuilder()
.startObject()
.field("type", "multipoint")
.startArray("coordinates")
.endArray()
.endObject();
parser = createParser(invalidMultipoint2);
parser.nextToken();
ElasticsearchGeoAssertions.assertValidException(parser, ElasticsearchParseException.class);
// test case 3: create a valid formatted multipoint object with invalid number (0) of coordinates
XContentBuilder invalidMultipoint3 = XContentFactory.jsonBuilder()
.startObject()
.field("type", "multipoint")
.startArray("coordinates")
.startArray().endArray()
.endArray()
.endObject();
parser = createParser(invalidMultipoint3);
parser.nextToken();
ElasticsearchGeoAssertions.assertValidException(parser, ElasticsearchParseException.class);
}
public void testParseInvalidMultiPolygon() throws IOException {
// test invalid multipolygon (an "accidental" polygon with inner rings outside outer ring)
String multiPolygonGeoJson = XContentFactory.jsonBuilder().startObject().field("type", "MultiPolygon")
.startArray("coordinates")
.startArray()//one poly (with two holes)
.startArray()
.startArray().value(102.0).value(2.0).endArray()
.startArray().value(103.0).value(2.0).endArray()
.startArray().value(103.0).value(3.0).endArray()
.startArray().value(102.0).value(3.0).endArray()
.startArray().value(102.0).value(2.0).endArray()
.endArray()
.startArray()// first hole
.startArray().value(100.0).value(0.0).endArray()
.startArray().value(101.0).value(0.0).endArray()
.startArray().value(101.0).value(1.0).endArray()
.startArray().value(100.0).value(1.0).endArray()
.startArray().value(100.0).value(0.0).endArray()
.endArray()
.startArray()//second hole
.startArray().value(100.2).value(0.8).endArray()
.startArray().value(100.2).value(0.2).endArray()
.startArray().value(100.8).value(0.2).endArray()
.startArray().value(100.8).value(0.8).endArray()
.startArray().value(100.2).value(0.8).endArray()
.endArray()
.endArray()
.endArray()
.endObject().string();
XContentParser parser = createParser(JsonXContent.jsonXContent, multiPolygonGeoJson);
parser.nextToken();
ElasticsearchGeoAssertions.assertValidException(parser, InvalidShapeException.class);
}
public void testParseOGCPolygonWithoutHoles() throws IOException {
// test 1: ccw poly not crossing dateline
String polygonGeoJson = XContentFactory.jsonBuilder().startObject().field("type", "Polygon")
.startArray("coordinates")
.startArray()
.startArray().value(176.0).value(15.0).endArray()
.startArray().value(-177.0).value(10.0).endArray()
.startArray().value(-177.0).value(-10.0).endArray()
.startArray().value(176.0).value(-15.0).endArray()
.startArray().value(172.0).value(0.0).endArray()
.startArray().value(176.0).value(15.0).endArray()
.endArray()
.endArray()
.endObject().string();
XContentParser parser = createParser(JsonXContent.jsonXContent, polygonGeoJson);
parser.nextToken();
Shape shape = ShapeBuilder.parse(parser).build();
ElasticsearchGeoAssertions.assertPolygon(shape);
// test 2: ccw poly crossing dateline
polygonGeoJson = XContentFactory.jsonBuilder().startObject().field("type", "Polygon")
.startArray("coordinates")
.startArray()
.startArray().value(-177.0).value(10.0).endArray()
.startArray().value(176.0).value(15.0).endArray()
.startArray().value(172.0).value(0.0).endArray()
.startArray().value(176.0).value(-15.0).endArray()
.startArray().value(-177.0).value(-10.0).endArray()
.startArray().value(-177.0).value(10.0).endArray()
.endArray()
.endArray()
.endObject().string();
parser = createParser(JsonXContent.jsonXContent, polygonGeoJson);
parser.nextToken();
shape = ShapeBuilder.parse(parser).build();
ElasticsearchGeoAssertions.assertMultiPolygon(shape);
// test 3: cw poly not crossing dateline
polygonGeoJson = XContentFactory.jsonBuilder().startObject().field("type", "Polygon")
.startArray("coordinates")
.startArray()
.startArray().value(176.0).value(15.0).endArray()
.startArray().value(180.0).value(10.0).endArray()
.startArray().value(180.0).value(-10.0).endArray()
.startArray().value(176.0).value(-15.0).endArray()
.startArray().value(172.0).value(0.0).endArray()
.startArray().value(176.0).value(15.0).endArray()
.endArray()
.endArray()
.endObject().string();
parser = createParser(JsonXContent.jsonXContent, polygonGeoJson);
parser.nextToken();
shape = ShapeBuilder.parse(parser).build();
ElasticsearchGeoAssertions.assertPolygon(shape);
// test 4: cw poly crossing dateline
polygonGeoJson = XContentFactory.jsonBuilder().startObject().field("type", "Polygon")
.startArray("coordinates")
.startArray()
.startArray().value(176.0).value(15.0).endArray()
.startArray().value(184.0).value(15.0).endArray()
.startArray().value(184.0).value(0.0).endArray()
.startArray().value(176.0).value(-15.0).endArray()
.startArray().value(174.0).value(-10.0).endArray()
.startArray().value(176.0).value(15.0).endArray()
.endArray()
.endArray()
.endObject().string();
parser = createParser(JsonXContent.jsonXContent, polygonGeoJson);
parser.nextToken();
shape = ShapeBuilder.parse(parser).build();
ElasticsearchGeoAssertions.assertMultiPolygon(shape);
}
public void testParseOGCPolygonWithHoles() throws IOException {
// test 1: ccw poly not crossing dateline
String polygonGeoJson = XContentFactory.jsonBuilder().startObject().field("type", "Polygon")
.startArray("coordinates")
.startArray()
.startArray().value(176.0).value(15.0).endArray()
.startArray().value(-177.0).value(10.0).endArray()
.startArray().value(-177.0).value(-10.0).endArray()
.startArray().value(176.0).value(-15.0).endArray()
.startArray().value(172.0).value(0.0).endArray()
.startArray().value(176.0).value(15.0).endArray()
.endArray()
.startArray()
.startArray().value(-172.0).value(8.0).endArray()
.startArray().value(174.0).value(10.0).endArray()
.startArray().value(-172.0).value(-8.0).endArray()
.startArray().value(-172.0).value(8.0).endArray()
.endArray()
.endArray()
.endObject().string();
XContentParser parser = createParser(JsonXContent.jsonXContent, polygonGeoJson);
parser.nextToken();
Shape shape = ShapeBuilder.parse(parser).build();
ElasticsearchGeoAssertions.assertPolygon(shape);
// test 2: ccw poly crossing dateline
polygonGeoJson = XContentFactory.jsonBuilder().startObject().field("type", "Polygon")
.startArray("coordinates")
.startArray()
.startArray().value(-177.0).value(10.0).endArray()
.startArray().value(176.0).value(15.0).endArray()
.startArray().value(172.0).value(0.0).endArray()
.startArray().value(176.0).value(-15.0).endArray()
.startArray().value(-177.0).value(-10.0).endArray()
.startArray().value(-177.0).value(10.0).endArray()
.endArray()
.startArray()
.startArray().value(178.0).value(8.0).endArray()
.startArray().value(-178.0).value(8.0).endArray()
.startArray().value(-180.0).value(-8.0).endArray()
.startArray().value(178.0).value(8.0).endArray()
.endArray()
.endArray()
.endObject().string();
parser = createParser(JsonXContent.jsonXContent, polygonGeoJson);
parser.nextToken();
shape = ShapeBuilder.parse(parser).build();
ElasticsearchGeoAssertions.assertMultiPolygon(shape);
// test 3: cw poly not crossing dateline
polygonGeoJson = XContentFactory.jsonBuilder().startObject().field("type", "Polygon")
.startArray("coordinates")
.startArray()
.startArray().value(176.0).value(15.0).endArray()
.startArray().value(180.0).value(10.0).endArray()
.startArray().value(179.0).value(-10.0).endArray()
.startArray().value(176.0).value(-15.0).endArray()
.startArray().value(172.0).value(0.0).endArray()
.startArray().value(176.0).value(15.0).endArray()
.endArray()
.startArray()
.startArray().value(177.0).value(8.0).endArray()
.startArray().value(179.0).value(10.0).endArray()
.startArray().value(179.0).value(-8.0).endArray()
.startArray().value(177.0).value(8.0).endArray()
.endArray()
.endArray()
.endObject().string();
parser = createParser(JsonXContent.jsonXContent, polygonGeoJson);
parser.nextToken();
shape = ShapeBuilder.parse(parser).build();
ElasticsearchGeoAssertions.assertPolygon(shape);
// test 4: cw poly crossing dateline
polygonGeoJson = XContentFactory.jsonBuilder().startObject().field("type", "Polygon")
.startArray("coordinates")
.startArray()
.startArray().value(183.0).value(10.0).endArray()
.startArray().value(183.0).value(-10.0).endArray()
.startArray().value(176.0).value(-15.0).endArray()
.startArray().value(172.0).value(0.0).endArray()
.startArray().value(176.0).value(15.0).endArray()
.startArray().value(183.0).value(10.0).endArray()
.endArray()
.startArray()
.startArray().value(178.0).value(8.0).endArray()
.startArray().value(182.0).value(8.0).endArray()
.startArray().value(180.0).value(-8.0).endArray()
.startArray().value(178.0).value(8.0).endArray()
.endArray()
.endArray()
.endObject().string();
parser = createParser(JsonXContent.jsonXContent, polygonGeoJson);
parser.nextToken();
shape = ShapeBuilder.parse(parser).build();
ElasticsearchGeoAssertions.assertMultiPolygon(shape);
}
public void testParseInvalidPolygon() throws IOException {
/**
* The following 3 test cases ensure proper error handling of invalid polygons
* per the GeoJSON specification
*/
// test case 1: create an invalid polygon with only 2 points
String invalidPoly = XContentFactory.jsonBuilder().startObject().field("type", "polygon")
.startArray("coordinates")
.startArray()
.startArray().value(-74.011).value(40.753).endArray()
.startArray().value(-75.022).value(41.783).endArray()
.endArray()
.endArray()
.endObject().string();
XContentParser parser = createParser(JsonXContent.jsonXContent, invalidPoly);
parser.nextToken();
ElasticsearchGeoAssertions.assertValidException(parser, ElasticsearchParseException.class);
// test case 2: create an invalid polygon with only 1 point
invalidPoly = XContentFactory.jsonBuilder().startObject().field("type", "polygon")
.startArray("coordinates")
.startArray()
.startArray().value(-74.011).value(40.753).endArray()
.endArray()
.endArray()
.endObject().string();
parser = createParser(JsonXContent.jsonXContent, invalidPoly);
parser.nextToken();
ElasticsearchGeoAssertions.assertValidException(parser, ElasticsearchParseException.class);
// test case 3: create an invalid polygon with 0 points
invalidPoly = XContentFactory.jsonBuilder().startObject().field("type", "polygon")
.startArray("coordinates")
.startArray()
.startArray().endArray()
.endArray()
.endArray()
.endObject().string();
parser = createParser(JsonXContent.jsonXContent, invalidPoly);
parser.nextToken();
ElasticsearchGeoAssertions.assertValidException(parser, ElasticsearchParseException.class);
// test case 4: create an invalid polygon with null value points
invalidPoly = XContentFactory.jsonBuilder().startObject().field("type", "polygon")
.startArray("coordinates")
.startArray()
.startArray().nullValue().nullValue().endArray()
.endArray()
.endArray()
.endObject().string();
parser = createParser(JsonXContent.jsonXContent, invalidPoly);
parser.nextToken();
ElasticsearchGeoAssertions.assertValidException(parser, IllegalArgumentException.class);
// test case 5: create an invalid polygon with 1 invalid LinearRing
invalidPoly = XContentFactory.jsonBuilder().startObject().field("type", "polygon")
.startArray("coordinates")
.nullValue().nullValue()
.endArray()
.endObject().string();
parser = createParser(JsonXContent.jsonXContent, invalidPoly);
parser.nextToken();
ElasticsearchGeoAssertions.assertValidException(parser, IllegalArgumentException.class);
// test case 6: create an invalid polygon with 0 LinearRings
invalidPoly = XContentFactory.jsonBuilder().startObject().field("type", "polygon")
.startArray("coordinates").endArray()
.endObject().string();
parser = createParser(JsonXContent.jsonXContent, invalidPoly);
parser.nextToken();
ElasticsearchGeoAssertions.assertValidException(parser, ElasticsearchParseException.class);
// test case 7: create an invalid polygon with 0 LinearRings
invalidPoly = XContentFactory.jsonBuilder().startObject().field("type", "polygon")
.startArray("coordinates")
.startArray().value(-74.011).value(40.753).endArray()
.endArray()
.endObject().string();
parser = createParser(JsonXContent.jsonXContent, invalidPoly);
parser.nextToken();
ElasticsearchGeoAssertions.assertValidException(parser, ElasticsearchParseException.class);
}
public void testParsePolygonWithHole() throws IOException {
XContentBuilder polygonGeoJson = XContentFactory.jsonBuilder()
.startObject()
.field("type", "Polygon")
.startArray("coordinates")
.startArray()
.startArray().value(100.0).value(1.0).endArray()
.startArray().value(101.0).value(1.0).endArray()
.startArray().value(101.0).value(0.0).endArray()
.startArray().value(100.0).value(0.0).endArray()
.startArray().value(100.0).value(1.0).endArray()
.endArray()
.startArray()
.startArray().value(100.2).value(0.8).endArray()
.startArray().value(100.2).value(0.2).endArray()
.startArray().value(100.8).value(0.2).endArray()
.startArray().value(100.8).value(0.8).endArray()
.startArray().value(100.2).value(0.8).endArray()
.endArray()
.endArray()
.endObject();
// add 3d point to test ISSUE #10501
List<Coordinate> shellCoordinates = new ArrayList<>();
shellCoordinates.add(new Coordinate(100, 0, 15.0));
shellCoordinates.add(new Coordinate(101, 0));
shellCoordinates.add(new Coordinate(101, 1));
shellCoordinates.add(new Coordinate(100, 1, 10.0));
shellCoordinates.add(new Coordinate(100, 0));
List<Coordinate> holeCoordinates = new ArrayList<>();
holeCoordinates.add(new Coordinate(100.2, 0.2));
holeCoordinates.add(new Coordinate(100.8, 0.2));
holeCoordinates.add(new Coordinate(100.8, 0.8));
holeCoordinates.add(new Coordinate(100.2, 0.8));
holeCoordinates.add(new Coordinate(100.2, 0.2));
LinearRing shell = GEOMETRY_FACTORY.createLinearRing(
shellCoordinates.toArray(new Coordinate[shellCoordinates.size()]));
LinearRing[] holes = new LinearRing[1];
holes[0] = GEOMETRY_FACTORY.createLinearRing(
holeCoordinates.toArray(new Coordinate[holeCoordinates.size()]));
Polygon expected = GEOMETRY_FACTORY.createPolygon(shell, holes);
assertGeometryEquals(jtsGeom(expected), polygonGeoJson);
}
public void testParseSelfCrossingPolygon() throws IOException {
// test self crossing ccw poly not crossing dateline
String polygonGeoJson = XContentFactory.jsonBuilder().startObject().field("type", "Polygon")
.startArray("coordinates")
.startArray()
.startArray().value(176.0).value(15.0).endArray()
.startArray().value(-177.0).value(10.0).endArray()
.startArray().value(-177.0).value(-10.0).endArray()
.startArray().value(176.0).value(-15.0).endArray()
.startArray().value(-177.0).value(15.0).endArray()
.startArray().value(172.0).value(0.0).endArray()
.startArray().value(176.0).value(15.0).endArray()
.endArray()
.endArray()
.endObject().string();
XContentParser parser = createParser(JsonXContent.jsonXContent, polygonGeoJson);
parser.nextToken();
ElasticsearchGeoAssertions.assertValidException(parser, InvalidShapeException.class);
}
public void testParseMultiPoint() throws IOException {
XContentBuilder multiPointGeoJson = XContentFactory.jsonBuilder()
.startObject()
.field("type", "MultiPoint")
.startArray("coordinates")
.startArray().value(100.0).value(0.0).endArray()
.startArray().value(101.0).value(1.0).endArray()
.endArray()
.endObject();
ShapeCollection expected = shapeCollection(
SPATIAL_CONTEXT.makePoint(100, 0),
SPATIAL_CONTEXT.makePoint(101, 1.0));
assertGeometryEquals(expected, multiPointGeoJson);
}
public void testParseMultiPolygon() throws IOException {
// test #1: two polygons; one without hole, one with hole
XContentBuilder multiPolygonGeoJson = XContentFactory.jsonBuilder()
.startObject()
.field("type", "MultiPolygon")
.startArray("coordinates")
.startArray()//first poly (without holes)
.startArray()
.startArray().value(102.0).value(2.0).endArray()
.startArray().value(103.0).value(2.0).endArray()
.startArray().value(103.0).value(3.0).endArray()
.startArray().value(102.0).value(3.0).endArray()
.startArray().value(102.0).value(2.0).endArray()
.endArray()
.endArray()
.startArray()//second poly (with hole)
.startArray()
.startArray().value(100.0).value(0.0).endArray()
.startArray().value(101.0).value(0.0).endArray()
.startArray().value(101.0).value(1.0).endArray()
.startArray().value(100.0).value(1.0).endArray()
.startArray().value(100.0).value(0.0).endArray()
.endArray()
.startArray()//hole
.startArray().value(100.2).value(0.8).endArray()
.startArray().value(100.2).value(0.2).endArray()
.startArray().value(100.8).value(0.2).endArray()
.startArray().value(100.8).value(0.8).endArray()
.startArray().value(100.2).value(0.8).endArray()
.endArray()
.endArray()
.endArray()
.endObject();
List<Coordinate> shellCoordinates = new ArrayList<>();
shellCoordinates.add(new Coordinate(100, 0));
shellCoordinates.add(new Coordinate(101, 0));
shellCoordinates.add(new Coordinate(101, 1));
shellCoordinates.add(new Coordinate(100, 1));
shellCoordinates.add(new Coordinate(100, 0));
List<Coordinate> holeCoordinates = new ArrayList<>();
holeCoordinates.add(new Coordinate(100.2, 0.2));
holeCoordinates.add(new Coordinate(100.8, 0.2));
holeCoordinates.add(new Coordinate(100.8, 0.8));
holeCoordinates.add(new Coordinate(100.2, 0.8));
holeCoordinates.add(new Coordinate(100.2, 0.2));
LinearRing shell = GEOMETRY_FACTORY.createLinearRing(shellCoordinates.toArray(new Coordinate[shellCoordinates.size()]));
LinearRing[] holes = new LinearRing[1];
holes[0] = GEOMETRY_FACTORY.createLinearRing(holeCoordinates.toArray(new Coordinate[holeCoordinates.size()]));
Polygon withHoles = GEOMETRY_FACTORY.createPolygon(shell, holes);
shellCoordinates = new ArrayList<>();
shellCoordinates.add(new Coordinate(102, 3));
shellCoordinates.add(new Coordinate(103, 3));
shellCoordinates.add(new Coordinate(103, 2));
shellCoordinates.add(new Coordinate(102, 2));
shellCoordinates.add(new Coordinate(102, 3));
shell = GEOMETRY_FACTORY.createLinearRing(shellCoordinates.toArray(new Coordinate[shellCoordinates.size()]));
Polygon withoutHoles = GEOMETRY_FACTORY.createPolygon(shell, null);
Shape expected = shapeCollection(withoutHoles, withHoles);
assertGeometryEquals(expected, multiPolygonGeoJson);
// test #2: multipolygon; one polygon with one hole
// this test converting the multipolygon from a ShapeCollection type
// to a simple polygon (jtsGeom)
multiPolygonGeoJson = XContentFactory.jsonBuilder()
.startObject()
.field("type", "MultiPolygon")
.startArray("coordinates")
.startArray()
.startArray()
.startArray().value(100.0).value(1.0).endArray()
.startArray().value(101.0).value(1.0).endArray()
.startArray().value(101.0).value(0.0).endArray()
.startArray().value(100.0).value(0.0).endArray()
.startArray().value(100.0).value(1.0).endArray()
.endArray()
.startArray() // hole
.startArray().value(100.2).value(0.8).endArray()
.startArray().value(100.2).value(0.2).endArray()
.startArray().value(100.8).value(0.2).endArray()
.startArray().value(100.8).value(0.8).endArray()
.startArray().value(100.2).value(0.8).endArray()
.endArray()
.endArray()
.endArray()
.endObject();
shellCoordinates = new ArrayList<>();
shellCoordinates.add(new Coordinate(100, 1));
shellCoordinates.add(new Coordinate(101, 1));
shellCoordinates.add(new Coordinate(101, 0));
shellCoordinates.add(new Coordinate(100, 0));
shellCoordinates.add(new Coordinate(100, 1));
holeCoordinates = new ArrayList<>();
holeCoordinates.add(new Coordinate(100.2, 0.8));
holeCoordinates.add(new Coordinate(100.2, 0.2));
holeCoordinates.add(new Coordinate(100.8, 0.2));
holeCoordinates.add(new Coordinate(100.8, 0.8));
holeCoordinates.add(new Coordinate(100.2, 0.8));
shell = GEOMETRY_FACTORY.createLinearRing(shellCoordinates.toArray(new Coordinate[shellCoordinates.size()]));
holes = new LinearRing[1];
holes[0] = GEOMETRY_FACTORY.createLinearRing(holeCoordinates.toArray(new Coordinate[holeCoordinates.size()]));
withHoles = GEOMETRY_FACTORY.createPolygon(shell, holes);
assertGeometryEquals(jtsGeom(withHoles), multiPolygonGeoJson);
}
public void testParseGeometryCollection() throws IOException {
XContentBuilder geometryCollectionGeoJson = XContentFactory.jsonBuilder()
.startObject()
.field("type", "GeometryCollection")
.startArray("geometries")
.startObject()
.field("type", "LineString")
.startArray("coordinates")
.startArray().value(100.0).value(0.0).endArray()
.startArray().value(101.0).value(1.0).endArray()
.endArray()
.endObject()
.startObject()
.field("type", "Point")
.startArray("coordinates").value(102.0).value(2.0).endArray()
.endObject()
.endArray()
.endObject();
Shape[] expected = new Shape[2];
LineString expectedLineString = GEOMETRY_FACTORY.createLineString(new Coordinate[]{
new Coordinate(100, 0),
new Coordinate(101, 1),
});
expected[0] = jtsGeom(expectedLineString);
Point expectedPoint = GEOMETRY_FACTORY.createPoint(new Coordinate(102.0, 2.0));
expected[1] = new JtsPoint(expectedPoint, SPATIAL_CONTEXT);
//equals returns true only if geometries are in the same order
assertGeometryEquals(shapeCollection(expected), geometryCollectionGeoJson);
}
public void testThatParserExtractsCorrectTypeAndCoordinatesFromArbitraryJson() throws IOException {
XContentBuilder pointGeoJson = XContentFactory.jsonBuilder()
.startObject()
.startObject("crs")
.field("type", "name")
.startObject("properties")
.field("name", "urn:ogc:def:crs:OGC:1.3:CRS84")
.endObject()
.endObject()
.field("bbox", "foobar")
.field("type", "point")
.field("bubu", "foobar")
.startArray("coordinates").value(100.0).value(0.0).endArray()
.startObject("nested").startArray("coordinates").value(200.0).value(0.0).endArray().endObject()
.startObject("lala").field("type", "NotAPoint").endObject()
.endObject();
Point expected = GEOMETRY_FACTORY.createPoint(new Coordinate(100.0, 0.0));
assertGeometryEquals(new JtsPoint(expected, SPATIAL_CONTEXT), pointGeoJson);
}
public void testParseOrientationOption() throws IOException {
// test 1: valid ccw (right handed system) poly not crossing dateline (with 'right' field)
XContentBuilder polygonGeoJson = XContentFactory.jsonBuilder()
.startObject()
.field("type", "Polygon")
.field("orientation", "right")
.startArray("coordinates")
.startArray()
.startArray().value(176.0).value(15.0).endArray()
.startArray().value(-177.0).value(10.0).endArray()
.startArray().value(-177.0).value(-10.0).endArray()
.startArray().value(176.0).value(-15.0).endArray()
.startArray().value(172.0).value(0.0).endArray()
.startArray().value(176.0).value(15.0).endArray()
.endArray()
.startArray()
.startArray().value(-172.0).value(8.0).endArray()
.startArray().value(174.0).value(10.0).endArray()
.startArray().value(-172.0).value(-8.0).endArray()
.startArray().value(-172.0).value(8.0).endArray()
.endArray()
.endArray()
.endObject();
XContentParser parser = createParser(polygonGeoJson);
parser.nextToken();
Shape shape = ShapeBuilder.parse(parser).build();
ElasticsearchGeoAssertions.assertPolygon(shape);
// test 2: valid ccw (right handed system) poly not crossing dateline (with 'ccw' field)
polygonGeoJson = XContentFactory.jsonBuilder()
.startObject()
.field("type", "Polygon")
.field("orientation", "ccw")
.startArray("coordinates")
.startArray()
.startArray().value(176.0).value(15.0).endArray()
.startArray().value(-177.0).value(10.0).endArray()
.startArray().value(-177.0).value(-10.0).endArray()
.startArray().value(176.0).value(-15.0).endArray()
.startArray().value(172.0).value(0.0).endArray()
.startArray().value(176.0).value(15.0).endArray()
.endArray()
.startArray()
.startArray().value(-172.0).value(8.0).endArray()
.startArray().value(174.0).value(10.0).endArray()
.startArray().value(-172.0).value(-8.0).endArray()
.startArray().value(-172.0).value(8.0).endArray()
.endArray()
.endArray()
.endObject();
parser = createParser(polygonGeoJson);
parser.nextToken();
shape = ShapeBuilder.parse(parser).build();
ElasticsearchGeoAssertions.assertPolygon(shape);
// test 3: valid ccw (right handed system) poly not crossing dateline (with 'counterclockwise' field)
polygonGeoJson = XContentFactory.jsonBuilder()
.startObject()
.field("type", "Polygon")
.field("orientation", "counterclockwise")
.startArray("coordinates")
.startArray()
.startArray().value(176.0).value(15.0).endArray()
.startArray().value(-177.0).value(10.0).endArray()
.startArray().value(-177.0).value(-10.0).endArray()
.startArray().value(176.0).value(-15.0).endArray()
.startArray().value(172.0).value(0.0).endArray()
.startArray().value(176.0).value(15.0).endArray()
.endArray()
.startArray()
.startArray().value(-172.0).value(8.0).endArray()
.startArray().value(174.0).value(10.0).endArray()
.startArray().value(-172.0).value(-8.0).endArray()
.startArray().value(-172.0).value(8.0).endArray()
.endArray()
.endArray()
.endObject();
parser = createParser(polygonGeoJson);
parser.nextToken();
shape = ShapeBuilder.parse(parser).build();
ElasticsearchGeoAssertions.assertPolygon(shape);
// test 4: valid cw (left handed system) poly crossing dateline (with 'left' field)
polygonGeoJson = XContentFactory.jsonBuilder()
.startObject()
.field("type", "Polygon")
.field("orientation", "left")
.startArray("coordinates")
.startArray()
.startArray().value(176.0).value(15.0).endArray()
.startArray().value(-177.0).value(10.0).endArray()
.startArray().value(-177.0).value(-10.0).endArray()
.startArray().value(176.0).value(-15.0).endArray()
.startArray().value(172.0).value(0.0).endArray()
.startArray().value(176.0).value(15.0).endArray()
.endArray()
.startArray()
.startArray().value(-178.0).value(8.0).endArray()
.startArray().value(178.0).value(8.0).endArray()
.startArray().value(180.0).value(-8.0).endArray()
.startArray().value(-178.0).value(8.0).endArray()
.endArray()
.endArray()
.endObject();
parser = createParser(polygonGeoJson);
parser.nextToken();
shape = ShapeBuilder.parse(parser).build();
ElasticsearchGeoAssertions.assertMultiPolygon(shape);
// test 5: valid cw multipoly (left handed system) poly crossing dateline (with 'cw' field)
polygonGeoJson = XContentFactory.jsonBuilder()
.startObject()
.field("type", "Polygon")
.field("orientation", "cw")
.startArray("coordinates")
.startArray()
.startArray().value(176.0).value(15.0).endArray()
.startArray().value(-177.0).value(10.0).endArray()
.startArray().value(-177.0).value(-10.0).endArray()
.startArray().value(176.0).value(-15.0).endArray()
.startArray().value(172.0).value(0.0).endArray()
.startArray().value(176.0).value(15.0).endArray()
.endArray()
.startArray()
.startArray().value(-178.0).value(8.0).endArray()
.startArray().value(178.0).value(8.0).endArray()
.startArray().value(180.0).value(-8.0).endArray()
.startArray().value(-178.0).value(8.0).endArray()
.endArray()
.endArray()
.endObject();
parser = createParser(polygonGeoJson);
parser.nextToken();
shape = ShapeBuilder.parse(parser).build();
ElasticsearchGeoAssertions.assertMultiPolygon(shape);
// test 6: valid cw multipoly (left handed system) poly crossing dateline (with 'clockwise' field)
polygonGeoJson = XContentFactory.jsonBuilder()
.startObject()
.field("type", "Polygon")
.field("orientation", "clockwise")
.startArray("coordinates")
.startArray()
.startArray().value(176.0).value(15.0).endArray()
.startArray().value(-177.0).value(10.0).endArray()
.startArray().value(-177.0).value(-10.0).endArray()
.startArray().value(176.0).value(-15.0).endArray()
.startArray().value(172.0).value(0.0).endArray()
.startArray().value(176.0).value(15.0).endArray()
.endArray()
.startArray()
.startArray().value(-178.0).value(8.0).endArray()
.startArray().value(178.0).value(8.0).endArray()
.startArray().value(180.0).value(-8.0).endArray()
.startArray().value(-178.0).value(8.0).endArray()
.endArray()
.endArray()
.endObject();
parser = createParser(polygonGeoJson);
parser.nextToken();
shape = ShapeBuilder.parse(parser).build();
ElasticsearchGeoAssertions.assertMultiPolygon(shape);
}
private void assertGeometryEquals(Shape expected, XContentBuilder geoJson) throws IOException {
XContentParser parser = createParser(geoJson);
parser.nextToken();
ElasticsearchGeoAssertions.assertEquals(expected, ShapeBuilder.parse(parser).build());
}
private ShapeCollection<Shape> shapeCollection(Shape... shapes) {
return new ShapeCollection<>(Arrays.asList(shapes), SPATIAL_CONTEXT);
}
private ShapeCollection<Shape> shapeCollection(Geometry... geoms) {
List<Shape> shapes = new ArrayList<>(geoms.length);
for (Geometry geom : geoms) {
shapes.add(jtsGeom(geom));
}
return new ShapeCollection<>(shapes, SPATIAL_CONTEXT);
}
private JtsGeometry jtsGeom(Geometry geom) {
return new JtsGeometry(geom, SPATIAL_CONTEXT, false, false);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.geode.management.internal.cli.commands;
import static org.assertj.core.api.Assertions.assertThat;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.eq;
import static org.mockito.Mockito.doAnswer;
import static org.mockito.Mockito.doReturn;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.spy;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.function.UnaryOperator;
import org.junit.Before;
import org.junit.ClassRule;
import org.junit.Test;
import org.mockito.ArgumentCaptor;
import org.apache.geode.cache.Region;
import org.apache.geode.cache.configuration.CacheConfig;
import org.apache.geode.cache.configuration.JndiBindingsType;
import org.apache.geode.distributed.DistributedMember;
import org.apache.geode.distributed.internal.InternalConfigurationPersistenceService;
import org.apache.geode.internal.cache.InternalCache;
import org.apache.geode.management.internal.cli.functions.DestroyJndiBindingFunction;
import org.apache.geode.management.internal.configuration.domain.Configuration;
import org.apache.geode.management.internal.functions.CliFunctionResult;
import org.apache.geode.test.junit.rules.GfshParserRule;
public class DestroyJndiBindingCommandTest {
@ClassRule
public static GfshParserRule gfsh = new GfshParserRule();
private DestroyJndiBindingCommand command;
private CacheConfig cacheConfig;
private InternalConfigurationPersistenceService ccService;
private static final String COMMAND = "destroy jndi-binding ";
@Before
public void setUp() throws Exception {
InternalCache cache = mock(InternalCache.class);
command = spy(DestroyJndiBindingCommand.class);
doReturn(cache).when(command).getCache();
cacheConfig = mock(CacheConfig.class);
ccService = mock(InternalConfigurationPersistenceService.class);
doReturn(Collections.emptySet()).when(command).findMembers(any(), any());
doReturn(ccService).when(command).getConfigurationPersistenceService();
when(ccService.getCacheConfig(any())).thenReturn(cacheConfig);
doAnswer(invocation -> {
UnaryOperator<CacheConfig> mutator = invocation.getArgument(1);
mutator.apply(cacheConfig);
return null;
}).when(ccService).updateCacheConfig(any(), any());
@SuppressWarnings("unchecked")
final Region<String, Configuration> configurationRegion = mock(Region.class);
when(ccService.getConfigurationRegion()).thenReturn(configurationRegion);
when(ccService.getConfiguration(any())).thenReturn(mock(Configuration.class));
}
@Test
public void missingMandatory() {
gfsh.executeAndAssertThat(command, COMMAND).statusIsError().containsOutput("Invalid command");
}
@Test
public void returnsErrorIfBindingDoesNotExistAndIfExistsUnspecified() {
gfsh.executeAndAssertThat(command, COMMAND + " --name=name").statusIsError()
.containsOutput("does not exist.");
}
@Test
public void skipsIfBindingDoesNotExistAndIfExistsSpecified() {
gfsh.executeAndAssertThat(command, COMMAND + " --name=name --if-exists").statusIsSuccess()
.containsOutput("does not exist.");
}
@Test
public void skipsIfBindingDoesNotExistAndIfExistsSpecifiedTrue() {
gfsh.executeAndAssertThat(command, COMMAND + " --name=name --if-exists=true").statusIsSuccess()
.containsOutput("does not exist.");
}
@Test
public void returnsErrorIfBindingDoesNotExistAndIfExistsSpecifiedFalse() {
gfsh.executeAndAssertThat(command, COMMAND + " --name=name --if-exists=false").statusIsError()
.containsOutput("does not exist.");
}
@Test
public void whenNoMembersFoundAndNoClusterConfigServiceRunningThenError() {
doReturn(Collections.emptySet()).when(command).findMembers(any(), any());
doReturn(null).when(command).getConfigurationPersistenceService();
gfsh.executeAndAssertThat(command, COMMAND + " --name=name").statusIsSuccess()
.containsOutput("No members found").containsOutput(
"Cluster configuration service is not running. Configuration change is not persisted.");
}
@Test
public void whenNoMembersFoundAndClusterConfigRunningThenUpdateClusterConfig() {
List<JndiBindingsType.JndiBinding> bindings = new ArrayList<>();
JndiBindingsType.JndiBinding jndiBinding = new JndiBindingsType.JndiBinding();
jndiBinding.setJndiName("name");
bindings.add(jndiBinding);
doReturn(bindings).when(cacheConfig).getJndiBindings();
gfsh.executeAndAssertThat(command, COMMAND + " --name=name").statusIsSuccess()
.containsOutput("No members found.")
.containsOutput("Cluster configuration for group 'cluster' is updated");
verify(ccService).updateCacheConfig(any(), any());
verify(command).updateConfigForGroup(eq("cluster"), eq(cacheConfig), any());
}
@Test
@SuppressWarnings("deprecation")
public void whenMembersFoundAndNoClusterConfigRunningThenOnlyInvokeFunction() {
Set<DistributedMember> members = new HashSet<>();
members.add(mock(DistributedMember.class));
CliFunctionResult result =
new CliFunctionResult("server1", true, "Jndi binding \"name\" destroyed on \"server1\"");
List<CliFunctionResult> results = new ArrayList<>();
results.add(result);
doReturn(members).when(command).findMembers(any(), any());
doReturn(null).when(command).getConfigurationPersistenceService();
doReturn(results).when(command).executeAndGetFunctionResult(any(), any(), any());
gfsh.executeAndAssertThat(command, COMMAND + " --name=name").statusIsSuccess()
.tableHasColumnOnlyWithValues("Member", "server1")
.tableHasColumnOnlyWithValues("Status", "OK")
.tableHasColumnOnlyWithValues("Message", "Jndi binding \"name\" destroyed on \"server1\"");
verify(ccService, times(0)).updateCacheConfig(any(), any());
ArgumentCaptor<DestroyJndiBindingFunction> function =
ArgumentCaptor.forClass(DestroyJndiBindingFunction.class);
ArgumentCaptor<Object[]> arguments = ArgumentCaptor.forClass(Object[].class);
@SuppressWarnings("unchecked")
ArgumentCaptor<Set<DistributedMember>> targetMembers = ArgumentCaptor.forClass(Set.class);
verify(command, times(1)).executeAndGetFunctionResult(function.capture(), arguments.capture(),
targetMembers.capture());
String jndiName = (String) arguments.getValue()[0];
boolean destroyingDataSource = (boolean) arguments.getValue()[1];
assertThat(function.getValue()).isInstanceOf(DestroyJndiBindingFunction.class);
assertThat(jndiName).isEqualTo("name");
assertThat(destroyingDataSource).isEqualTo(false);
assertThat(targetMembers.getValue()).isEqualTo(members);
}
@Test
@SuppressWarnings("deprecation")
public void whenMembersFoundAndClusterConfigRunningThenUpdateClusterConfigAndInvokeFunction() {
List<JndiBindingsType.JndiBinding> bindings = new ArrayList<>();
JndiBindingsType.JndiBinding jndiBinding = new JndiBindingsType.JndiBinding();
jndiBinding.setJndiName("name");
bindings.add(jndiBinding);
doReturn(bindings).when(cacheConfig).getJndiBindings();
Set<DistributedMember> members = new HashSet<>();
members.add(mock(DistributedMember.class));
CliFunctionResult result =
new CliFunctionResult("server1", true, "Jndi binding \"name\" destroyed on \"server1\"");
List<CliFunctionResult> results = new ArrayList<>();
results.add(result);
doReturn(members).when(command).findMembers(any(), any());
doReturn(results).when(command).executeAndGetFunctionResult(any(), any(), any());
gfsh.executeAndAssertThat(command, COMMAND + " --name=name").statusIsSuccess()
.tableHasColumnOnlyWithValues("Member", "server1")
.tableHasColumnOnlyWithValues("Status", "OK")
.tableHasColumnOnlyWithValues("Message", "Jndi binding \"name\" destroyed on \"server1\"");
assertThat(cacheConfig.getJndiBindings().isEmpty()).isTrue();
verify(command).updateConfigForGroup(eq("cluster"), eq(cacheConfig), any());
ArgumentCaptor<DestroyJndiBindingFunction> function =
ArgumentCaptor.forClass(DestroyJndiBindingFunction.class);
ArgumentCaptor<Object[]> arguments = ArgumentCaptor.forClass(Object[].class);
@SuppressWarnings("unchecked")
ArgumentCaptor<Set<DistributedMember>> targetMembers = ArgumentCaptor.forClass(Set.class);
verify(command, times(1)).executeAndGetFunctionResult(function.capture(), arguments.capture(),
targetMembers.capture());
String jndiName = (String) arguments.getValue()[0];
boolean destroyingDataSource = (boolean) arguments.getValue()[1];
assertThat(function.getValue()).isInstanceOf(DestroyJndiBindingFunction.class);
assertThat(jndiName).isEqualTo("name");
assertThat(destroyingDataSource).isEqualTo(false);
assertThat(targetMembers.getValue()).isEqualTo(members);
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.fusesource.hawtdb.internal.page;
import java.nio.ByteBuffer;
import java.nio.IntBuffer;
import java.util.ArrayList;
import java.util.List;
import org.fusesource.hawtdb.api.IOPagingException;
import org.fusesource.hawtdb.api.Paged;
import org.fusesource.hawtdb.api.Paged.SliceType;
import org.fusesource.hawtbuf.Buffer;
/**
* An extent is a sequence of adjacent pages which can be linked
* to subsequent extents.
*
* Extents allow you to write large streams of data to a Paged object
* contiguously to avoid fragmentation.
*
* The first page of the extent contains a header which specifies
* the size of the extent and the page id of the next extent that
* it is linked to.
*
* @author <a href="http://hiramchirino.com">Hiram Chirino</a>
*/
public class Extent {
public final static Buffer DEFAULT_MAGIC = new Buffer(new byte[]{'x'});
private final Paged paged;
private final int page;
private final Buffer magic;
private ByteBuffer buffer;
private int length;
private int next;
public Extent(Paged paged, int page) {
this(paged, page, DEFAULT_MAGIC);
}
public Extent(Paged paged, int page, Buffer magic) {
this.paged = paged;
this.page = page;
this.magic = magic;
}
@Override
public String toString() {
Integer position = null;
Integer limit = null;
if( buffer!=null ) {
position = buffer.position();
limit = buffer.limit();
}
return "{ page: "+page+", position: "+position+", limit: "+limit+", length: "+length+", next: "+next+" }";
}
public void readHeader() {
buffer = paged.slice(SliceType.READ, page, 1);
Buffer m = new Buffer(magic.length);
buffer.get(m.data);
if( !magic.equals(m) ) {
throw new IOPagingException("Invalid extent read request. The requested page was not an extent: "+page);
}
IntBuffer ib = buffer.asIntBuffer();
length = ib.get();
next = ib.get();
}
public void readOpen() {
readHeader();
int pages = paged.pages(length);
if( pages > 1 ) {
paged.unslice(buffer);
buffer = paged.slice(SliceType.READ, page, pages);
}
buffer.position(magic.length+8);
buffer.limit(length);
}
public void writeOpen(short size) {
buffer = paged.slice(SliceType.WRITE, page, size);
buffer.position(magic.length+8);
}
public int writeCloseLinked(int next) {
this.next = next;
length = buffer.position();
buffer.position(0);
buffer.put(magic.data, magic.offset, magic.length);
IntBuffer ib = buffer.asIntBuffer();
ib.put(length);
ib.put(next);
paged.unslice(buffer);
return length;
}
public void writeCloseEOF() {
int length = writeCloseLinked(-1);
int originalPages = paged.pages(buffer.limit());
int usedPages = paged.pages(length);
int remainingPages = originalPages-usedPages;
// Release un-used pages.
if (remainingPages>0) {
paged.allocator().free(page+usedPages, remainingPages);
}
paged.unslice(buffer);
}
public void readClose() {
paged.unslice(buffer);
}
boolean atEnd() {
return buffer.remaining() == 0;
}
/**
* @return true if the write fit into the extent.
*/
public boolean write(byte b) {
if (atEnd()) {
return false;
}
buffer.put(b);
return true;
}
public boolean write(Buffer source) {
while (source.length > 0) {
if (atEnd()) {
return false;
}
int count = Math.min(buffer.remaining(), source.length);
buffer.put(source.data, source.offset, count);
source.offset += count;
source.length -= count;
}
return true;
}
public int read() {
return buffer.get() & 0xFF;
}
public void read(Buffer target) {
while (target.length > 0 && !atEnd()) {
int count = Math.min(buffer.remaining(), target.length);
buffer.get(target.data, target.offset, count);
target.offset += count;
target.length -= count;
}
}
public int getNext() {
return next;
}
/**
* Gets a listing of all the pages used by the extent at the specified page.
*
* @param paged
* @param page
*/
public static List<Integer> pagesLinked(Paged paged, int page) {
return freeLinked(paged, page, DEFAULT_MAGIC);
}
public static List<Integer> pagesLinked(Paged paged, int page, Buffer magic) {
Extent extent = new Extent(paged, page, magic);
extent.readHeader();
return pages(paged, extent.getNext());
}
public static List<Integer> pages(Paged paged, int page) {
return pages(paged, page, DEFAULT_MAGIC);
}
public static List<Integer> pages(Paged paged, int page, Buffer magic) {
ArrayList<Integer> rc = new ArrayList<Integer>();
while( page>=0 ) {
Extent extent = new Extent(paged, page, magic);
extent.readHeader();
try {
int pagesInExtent = paged.pages(extent.getLength());
for( int i=0; i < pagesInExtent; i++) {
rc.add(page+i);
}
page=extent.getNext();
} finally {
extent.readClose();
}
}
return rc;
}
/**
* Frees the linked extents at the provided page id.
*
* @param paged
* @param page
*/
public static List<Integer> freeLinked(Paged paged, int page) {
return freeLinked(paged, page, DEFAULT_MAGIC);
}
public static List<Integer> freeLinked(Paged paged, int page, Buffer magic) {
Extent extent = new Extent(paged, page, magic);
extent.readHeader();
return free(paged, extent.getNext());
}
/**
* Frees the extent at the provided page id.
*
* @param paged
* @param page
*/
public static List<Integer> free(Paged paged, int page) {
return free(paged, page, DEFAULT_MAGIC);
}
public static List<Integer> free(Paged paged, int page, Buffer magic) {
ArrayList<Integer> rc = new ArrayList<Integer>();
while( page>=0 ) {
Extent extent = new Extent(paged, page, magic);
extent.readHeader();
try {
int pagesInExtent = paged.pages(extent.getLength());
paged.allocator().free(page, pagesInExtent);
for( int i=0; i < pagesInExtent; i++) {
rc.add(page+i);
}
page=extent.getNext();
} finally {
extent.readClose();
}
}
return rc;
}
/**
* Un-frees the extent at the provided page id. Basically undoes
* a previous {@link #free(PageFile, int)} operation.
*
* @param paged
* @param page
*/
public static void unfree(Paged paged, int page) {
unfree(paged, page, DEFAULT_MAGIC);
}
public static void unfree(Paged paged, int page, Buffer magic) {
while( page>=0 ) {
Extent extent = new Extent(paged, page, magic);
extent.readHeader();
try {
paged.allocator().unfree(page, paged.pages(extent.length));
page=extent.next;
} finally {
extent.readClose();
}
}
}
public int getPage() {
return page;
}
public int getLength() {
return length;
}
}
| |
/* JAI-Ext - OpenSource Java Advanced Image Extensions Library
* http://www.geo-solutions.it/
* Copyright 2014 GeoSolutions
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package it.geosolutions.jaiext.lookup;
import it.geosolutions.jaiext.range.Range;
import java.awt.Rectangle;
import java.awt.RenderingHints;
import java.awt.image.RenderedImage;
import java.awt.image.renderable.ParameterBlock;
import javax.media.jai.JAI;
import javax.media.jai.OperationDescriptorImpl;
import javax.media.jai.ParameterBlockJAI;
import javax.media.jai.PropertyGenerator;
import javax.media.jai.ROI;
import javax.media.jai.ROIShape;
import javax.media.jai.RenderedOp;
import javax.media.jai.registry.RenderedRegistryMode;
import com.sun.media.jai.util.PropertyGeneratorImpl;
/**
* This class is used for retrieving an eventual ROI object passed to the source image by calling the getProperty() method.
* This method checks if the ROI is present and if so, its bounds are intersected with the source and destination images bounds,
* and then passed as a result. If no property was found an Undefined Property object is returned.
* */
class LookupPropertyGenerator extends PropertyGeneratorImpl {
/** Constructor. */
public LookupPropertyGenerator() {
super(new String[] { "ROI" }, new Class[] { ROI.class }, new Class[] { RenderedOp.class });
}
/**
* Returns the ROI saved as a property.
*/
public Object getProperty(String name, Object opNode) {
validate(name, opNode);
if (opNode instanceof RenderedOp && name.equalsIgnoreCase("roi")) {
RenderedOp op = (RenderedOp) opNode;
ParameterBlock pb = op.getParameterBlock();
// Retrieve the rendered source image and its ROI.
RenderedImage src = pb.getRenderedSource(0);
Object property = src.getProperty("ROI");
if (property == null || property.equals(java.awt.Image.UndefinedProperty)
|| !(property instanceof ROI)) {
return java.awt.Image.UndefinedProperty;
}
ROI srcROI = (ROI) property;
// Determine the effective source bounds.
Rectangle srcBounds = null;
srcBounds = new Rectangle(src.getMinX(), src.getMinY(), src.getWidth(), src.getHeight());
// If necessary, clip the ROI to the effective source bounds.
if (!srcBounds.contains(srcROI.getBounds())) {
srcROI = srcROI.intersect(new ROIShape(srcBounds));
}
// Saves the destination ROI.
ROI dstROI = srcROI;
// Retrieve the destination bounds.
Rectangle dstBounds = op.getBounds();
// If necessary, clip the warped ROI to the destination bounds.
if (!dstBounds.contains(dstROI.getBounds())) {
dstROI = dstROI.intersect(new ROIShape(dstBounds));
}
// Return the warped and possibly clipped ROI.
return dstROI;
}
return java.awt.Image.UndefinedProperty;
}
}
/**
* An <code>OperationDescriptor</code> describing the "Lookup" operation.
*
* <p>
* The Lookup operation takes a rendered image and a lookup table, and performs general table lookup by passing the source image through the table. If
* ROI or No Data values are set then the lookupTable takes in account this 2 parameters. The out-of-ROI values or No Data values are set to
* destination no data.
*
* <p>
* The source may be a single- or multi-banded image of data types <code>byte</code>, <code>ushort</code>, <code>short</code>, or <code>int</code>.
* The lookup table may be single- or multi-banded and of any DataBuffer supported data types. The destination image must have the same data type as
* the lookup table, and its number of bands is determined based on the number of bands of the source and the table. If the source is single-banded,
* the destination has the same number of bands as the lookup table; otherwise, the destination has the same number of bands as the source.
*
* <p>
* If either the source or the table is single-banded and the other one is multi-banded, then the single band is applied to every band of the
* multi-banded object. If both are multi-banded, then their corresponding bands are matched up.
*
* <p>
* The table may have a set of offset values, one for each band. This value is subtracted from the source pixel values before indexing into the table
* data array.
*
* <p>
* It is the user's responsibility to make certain the lookup table supplied is suitable for the source image. Specifically, the table data covers the
* entire range of the source data. Otherwise, the result of this operation is undefined.
*
* <p >
* By the nature of this operation, the destination may have a different number of bands and/or data type from the source. The
* <code>SampleModel</code> of the destination is created in accordance with the actual lookup table used in a specific case.
*
* <p>
* The destination pixel values are defined by the pseudocode:
* <ul>
* <li>If the source image is single-banded and the lookup table is single- or multi-banded, then the destination image has the same number of bands
* as the lookup table:
*
* <pre>
* dst[x][y][b] = table[b][src[x][y][0] - offsets[b]]
* </pre>
*
* </li>
*
* <li>If the source image is multi-banded and the lookup table is single-banded, then the destination image has the same number of bands as the
* source image:
*
* <pre>
* dst[x][y][b] = table[0][src[x][y][b] - offsets[0]]
* </pre>
*
* </li>
*
* <li>If the source image is multi-banded and the lookup table is multi-banded, with the same number of bands as the source image, then the
* destination image will have the same number of bands as the source image:
*
* <pre>
* dst[x][y][b] = table[b][src[x][y][b] - offsets[b]]
* </pre>
*
* </li>
* </ul>
*
* <p>
* <table border=1>
* <caption>Resource List</caption>
* <tr>
* <th>Name</th>
* <th>Value</th>
* </tr>
* <tr>
* <td>GlobalName</td>
* <td>Lookup</td>
* </tr>
* <tr>
* <td>LocalName</td>
* <td>Lookup</td>
* </tr>
* <tr>
* <td>Vendor</td>
* <td>it.geosolutions.jaiext</td>
* </tr>
* <tr>
* <td>Description</td>
* <td>Lookup operation supporting ROI and No Data.</td>
* </tr>
* <tr>
* <td>DocURL</td>
* <td>Not defined</td>
* </tr>
* <tr>
* <td>Version</td>
* <td>1.0</td>
* </tr>
* <tr>
* <td>arg0Desc</td>
* <td>The lookup table to use.</td>
* </tr>
* <tr>
* <td>arg1Desc</td>
* <td>Destination No Data used for ROI or No Data.</td>
* </tr>
* <tr>
* <td>arg2Desc</td>
* <td>ROI object used.</td>
* </tr>
* <tr>
* <td>arg3Desc</td>
* <td>No Data Range used.</td>
* </tr>
* <tr>
* <td>arg4Desc</td>
* <td>Boolean checking if ROI RasterAccessor is used.</td>
* </tr>
* </table>
* </p>
*
* <p>
* <table border=1>
* <caption>Parameter List</caption>
* <tr>
* <th>Name</th>
* <th>Class Type</th>
* <th>Default Value</th>
* </tr>
* <tr>
* <td>table</td>
* <td>it.geosolutions.jaiext.lookup.LookupTable</td>
* <td>NO_PARAMETER_DEFAULT</td>
* <tr>
* <td>destinationNoData</td>
* <td>Double</td>
* <td>0</td>
* <tr>
* <td>ROI</td>
* <td>javax.media.jai.ROI</td>
* <td>NO_PARAMETER_DEFAULT</td>
* <tr>
* <td>NoData</td>
* <td>it.geosolutions.jaiext.range.Range</td>
* <td>NO_PARAMETER_DEFAULT</td>
* <tr>
* <td>useRoiAccessor</td>
* <td>Boolean</td>
* <td>false</td>
* </table>
* </p>
*
*/
public class LookupDescriptor extends OperationDescriptorImpl {
/**
* The resource strings that provide the general documentation and specify the parameter list for this operation.
*/
private static final String[][] resources = { { "GlobalName", "Lookup" },
{ "LocalName", "Lookup" }, { "Vendor", "it.geosolutions.jaiext" },
{ "Description", "Lookup operation supporting ROI and No Data" },
{ "DocURL", "Not defined" }, { "Version", "1.0" },
{ "arg0Desc", "The lookup table to use" },
{ "arg1Desc", "Destination No Data used for ROI or No Data" },
{ "arg2Desc", "ROI object used" }, { "arg3Desc", "No Data Range used" },
{ "arg4Desc", "Boolean checking if ROI RasterAccessor is used" }
};
/** The parameter class list for this operation. */
private static final Class[] paramClasses = { it.geosolutions.jaiext.lookup.LookupTable.class,
java.lang.Double.class, javax.media.jai.ROI.class, it.geosolutions.jaiext.range.Range.class,
java.lang.Boolean.class };
/** The parameter name list for this operation. */
private static final String[] paramNames = { "table", "destinationNoData", "ROI", "NoData",
"useRoiAccessor" };
/** The parameter default value list for this operation. */
private static final Object[] paramDefaults = { null, 0.0d, null, null, false };
public LookupDescriptor() {
super(resources, 1, paramClasses, paramNames, paramDefaults);
}
/**
* Returns an array of <code>PropertyGenerators</code> implementing property inheritance for the "Lookup" operation
*
* @return An array of property generators.
*/
public PropertyGenerator[] getPropertyGenerators() {
PropertyGenerator[] pg = new PropertyGenerator[1];
pg[0] = new LookupPropertyGenerator();
return pg;
}
/**
* Performs a lookup operation on an integral image.
*
* <p>
* Creates a <code>ParameterBlockJAI</code> from all supplied arguments except <code>hints</code> and invokes
* {@link JAI#create(String,ParameterBlock,RenderingHints)}.
*
* @see JAI
* @see ParameterBlockJAI
* @see RenderedOp
*
* @param source0 <code>RenderedImage</code> source 0.
* @param table The lookuptable used.
* @param destinationNoData Destination no data used for ROI or No Data.
* @param ROI Roi object on which the calculation are performed.
* @param NoData No Data range used for calculation.
* @param useRoiAccessor Boolean indicating if ROI RasterAccessor must be used.
* @param hints The <code>RenderingHints</code> to use.
* @return The <code>RenderedOp</code> destination.
* @throws IllegalArgumentException if <code>source0</code> is <code>null</code>.
*/
public static RenderedOp create(RenderedImage source0, LookupTable table,
double destinationNoData, ROI roi, Range noData, boolean useRoiAccessor,
RenderingHints hints) {
// Creation of a parameterBlockJAI containing all the operation parameters
ParameterBlockJAI pb = new ParameterBlockJAI("Lookup", RenderedRegistryMode.MODE_NAME);
// Source image
pb.setSource("source0", source0);
// Image parameters
pb.setParameter("table", table);
pb.setParameter("destinationNoData", destinationNoData);
pb.setParameter("ROI", roi);
pb.setParameter("NoData", noData);
pb.setParameter("useRoiAccessor", useRoiAccessor);
// RenderedImage creation
return JAI.create("Lookup", pb, hints);
}
}
| |
/* ###
* IP: GHIDRA
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ghidra.trace.database.symbol;
import java.io.IOException;
import java.util.*;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.tuple.ImmutablePair;
import org.apache.commons.lang3.tuple.Pair;
import com.google.common.collect.Collections2;
import com.google.common.collect.Range;
import db.DBRecord;
import generic.CatenatedCollection;
import ghidra.program.model.address.*;
import ghidra.program.model.listing.CircularDependencyException;
import ghidra.program.model.listing.Program;
import ghidra.program.model.symbol.*;
import ghidra.program.util.ProgramLocation;
import ghidra.trace.database.DBTrace;
import ghidra.trace.database.DBTraceUtils;
import ghidra.trace.database.address.DBTraceOverlaySpaceAdapter;
import ghidra.trace.database.address.DBTraceOverlaySpaceAdapter.DecodesAddresses;
import ghidra.trace.database.program.DBTraceProgramView;
import ghidra.trace.database.symbol.DBTraceSymbolManager.DBTraceSymbolIDEntry;
import ghidra.trace.database.symbol.DBTraceSymbolManager.MySymbolTypes;
import ghidra.trace.model.Trace.TraceSymbolChangeType;
import ghidra.trace.model.TraceAddressSnapRange;
import ghidra.trace.model.symbol.TraceSymbol;
import ghidra.trace.model.thread.TraceThread;
import ghidra.trace.util.TraceAddressSpace;
import ghidra.trace.util.TraceChangeRecord;
import ghidra.util.LockHold;
import ghidra.util.database.*;
import ghidra.util.database.annot.DBAnnotatedColumn;
import ghidra.util.database.annot.DBAnnotatedField;
import ghidra.util.exception.DuplicateNameException;
import ghidra.util.exception.InvalidInputException;
import ghidra.util.task.TaskMonitor;
public abstract class AbstractDBTraceSymbol extends DBAnnotatedObject
implements TraceSymbol, DecodesAddresses {
private static final byte SOURCE_MASK = 0x0F;
private static final int SOURCE_SHIFT = 0;
private static final byte SOURCE_CLEAR = ~(SOURCE_MASK << SOURCE_SHIFT);
private static final byte PRIMARY_MASK = 0x10;
@SuppressWarnings("unused")
private static final int PRIMARY_CLEAR = ~PRIMARY_MASK;
static final String NAME_COLUMN_NAME = "Name";
static final String PARENT_COLUMN_NAME = "Parent";
static final String FLAGS_COLUMN_NAME = "Flags";
@DBAnnotatedColumn(NAME_COLUMN_NAME)
static DBObjectColumn NAME_COLUMN;
@DBAnnotatedColumn(PARENT_COLUMN_NAME)
static DBObjectColumn PARENT_COLUMN;
@DBAnnotatedColumn(FLAGS_COLUMN_NAME)
static DBObjectColumn FLAGS_COLUMN;
@DBAnnotatedField(column = NAME_COLUMN_NAME, indexed = true)
String name;
@DBAnnotatedField(column = PARENT_COLUMN_NAME, indexed = true)
long parentID;
@DBAnnotatedField(column = FLAGS_COLUMN_NAME)
byte flags;
protected DBTraceNamespaceSymbol parent;
protected final DBTraceSymbolManager manager;
public AbstractDBTraceSymbol(DBTraceSymbolManager manager, DBCachedObjectStore<?> store,
DBRecord record) {
super(store, record);
this.manager = manager;
}
/**
* {@inheritDoc}
*
* NOTE: If the IDs match, then the symbols are considered equal, regardless of their other
* attributes. This mechanic seems required to support the whole "placeholder" idea. See
* {@link SymbolTable#createSymbolPlaceholder(Address, long)}.
*/
@Override
public boolean equals(Object obj) {
if (!(obj instanceof AbstractDBTraceSymbol)) {
return false;
}
if (obj == this) {
return true;
}
AbstractDBTraceSymbol that = (AbstractDBTraceSymbol) obj;
if (this.getID() == that.getID()) {
return true;
}
if (this.getSymbolType() != that.getSymbolType()) {
return false;
}
if (!this.getName().equals(that.getName())) {
return false;
}
if (!this.getAddress().equals(that.getAddress())) {
return false;
}
if (!Objects.equals(this.getParentSymbol(), that.getParentSymbol())) {
return false;
}
return true;
}
@Override
public int hashCode() {
return Long.hashCode(getID());
}
@Override
public String toString() {
return name;
}
protected void assertNotGlobal() {
if (isGlobal()) {
throw new UnsupportedOperationException("Cannot modify the global namespace");
}
}
protected DBTraceNamespaceSymbol assertIsNamespace(AbstractDBTraceSymbol symbol) {
assert symbol != null;
if (!(symbol instanceof DBTraceNamespaceSymbol)) {
throw new AssertionError(
"Trace database corrupted. Symbol has a non-namespace parent.");
}
return (DBTraceNamespaceSymbol) symbol;
}
@Override
protected void fresh(boolean created) throws IOException {
if (created) {
return;
}
parent = parentID == -1 ? null : assertIsNamespace(manager.getSymbolByID(parentID));
}
@Override
public DBTraceOverlaySpaceAdapter getOverlaySpaceAdapter() {
return manager.overlayAdapter;
}
@Override
public DBTrace getTrace() {
return manager.trace;
}
@Override
public TraceThread getThread() {
return null;
}
protected TraceAddressSpace getSpace() {
return null;
}
@Override
public long getID() {
if (isGlobal()) {
return GlobalNamespace.GLOBAL_NAMESPACE_ID;
}
return DBTraceSymbolManager.packID(getSymbolType().getID(), getKey());
}
@Override
public String getName() {
return name;
}
@Override
public Address getAddress() {
return SpecialAddress.NO_ADDRESS;
}
protected Collection<? extends TraceAddressSnapRange> getRanges() {
return new CatenatedCollection<>(Collections2.transform(manager.idMap.getActiveSpaces(),
space -> Collections2.transform(
space.getUserIndex(long.class, DBTraceSymbolIDEntry.ID_COLUMN).get(getID()),
ent -> ent.getShape())));
}
// Internal
public Range<Long> getLifespan() {
// TODO: Cache this computation and/or keep it as transient fields?
long min = Long.MAX_VALUE;
long max = Long.MIN_VALUE;
for (TraceAddressSnapRange range : getRanges()) {
min = Math.min(min, DBTraceUtils.lowerEndpoint(range.getLifespan()));
max = Math.min(max, DBTraceUtils.upperEndpoint(range.getLifespan()));
}
if (min > max) {
return null;
}
return DBTraceUtils.toRange(min, max);
}
protected void doCollectAddressSet(AddressSet set) {
for (TraceAddressSnapRange range : getRanges()) {
set.add(range.getRange());
}
}
// Internal
public AddressSet getAddressSet() {
AddressSet result = new AddressSet();
doCollectAddressSet(result);
return result;
}
@Override
public String[] getPath() {
try (LockHold hold = LockHold.lock(manager.lock.readLock())) {
checkIsValid();
if (isGlobal()) {
return new String[] { getName() };
}
ArrayList<String> list = new ArrayList<>();
if (parent != manager.globalNamespace) {
parent.doGetPath(list);
}
list.add(getName());
return list.toArray(new String[list.size()]);
}
}
@Override
public String getName(boolean includeNamespace) {
if (!includeNamespace) {
return getName();
}
return StringUtils.join(getPath(), "::");
}
@Override
public DBTraceNamespaceSymbol getParentNamespace() {
return parent;
}
@Override
public DBTraceNamespaceSymbol getParentSymbol() {
return parent;
}
@Override
public boolean isDescendant(Namespace namespace) {
for (AbstractDBTraceSymbol s = this; s != null; s = s.parent) {
if (s == namespace) {
return true;
}
}
return false;
}
@Override
public Collection<? extends DBTraceReference> getReferenceCollection() {
return manager.trace.getReferenceManager().getReferencesBySymbolId(getID());
}
@Override
public int getReferenceCount() {
return getReferenceCollection().size();
}
@Override
public boolean hasMultipleReferences() {
// TODO: Could be slightly more efficient by just iterating twice?
return getReferenceCount() > 1;
}
@Override
public boolean hasReferences() {
return !getReferenceCollection().isEmpty();
}
@Override
public DBTraceReference[] getReferences(TaskMonitor monitor) {
Collection<? extends DBTraceReference> refs = getReferenceCollection();
// NOTE: Size computation is just iteration over address spaces. Should be snappy.
DBTraceReference[] result = new DBTraceReference[refs.size()];
int i = 0;
for (DBTraceReference r : refs) {
result[i++] = r;
if (monitor.isCancelled()) {
break;
}
}
return result;
}
@Override
public DBTraceReference[] getReferences() {
return getReferences(TaskMonitor.DUMMY);
}
@SuppressWarnings("hiding")
void rawSet(String name, long parentID) {
this.name = name;
this.parentID = parentID;
update(NAME_COLUMN, PARENT_COLUMN);
}
protected void set(String name, DBTraceNamespaceSymbol parent, SourceType source) {
this.name = name;
this.parentID = parent.getID();
doSetSource(source);
update(NAME_COLUMN, PARENT_COLUMN, FLAGS_COLUMN);
this.parent = parent;
}
protected TraceChangeRecord<?, ?> doSetNameWithEvent(String newName)
throws InvalidInputException {
String oldName = name;
if (oldName.equals(newName)) {
return null;
}
this.name = newName;
return new TraceChangeRecord<>(TraceSymbolChangeType.RENAMED, getSpace(), this, oldName,
newName);
}
/**
* Checks and sets the parent
*
* The caller must still call {@link #update(DBObjectColumn...)} for {@link #PARENT_COLUMN}.
*
* @param newParent the parent namespace
* @throws CircularDependencyException
*/
protected TraceChangeRecord<?, ?> doSetParent(DBTraceNamespaceSymbol newParent)
throws CircularDependencyException {
DBTraceNamespaceSymbol oldParent = parent;
if (oldParent == newParent) {
return null;
}
if (!isValidParent(newParent)) {
throw new IllegalArgumentException(
"This symbol type cannot be a child of the given namespace type");
}
DBTraceNamespaceSymbol checkedParent = checkCircular(newParent);
this.parent = checkedParent;
this.parentID = parent.getID();
return new TraceChangeRecord<>(TraceSymbolChangeType.PARENT_CHANGED, getSpace(), this,
oldParent, checkedParent);
}
protected void doSetSource(SourceType newSource) {
flags =
(byte) ((flags & SOURCE_CLEAR) | (newSource.ordinal() & SOURCE_MASK) << SOURCE_SHIFT);
}
/**
* Sets the flags for the given source.
*
* The caller must still call {@link #update(DBObjectColumn...)} for {@link #FLAGS_COLUMN}. The
* update should be called before the returned event, if applicable, is fired.
*
* @param newSource the source type
* @return the appropriate change event, if a change was actually made
*/
protected TraceChangeRecord<?, ?> doSetSourceWithEvent(SourceType newSource) {
SourceType oldSource = getSource();
if (oldSource == newSource) {
return null;
}
doSetSource(newSource);
return new TraceChangeRecord<>(TraceSymbolChangeType.SOURCE_CHANGED, getSpace(), this,
oldSource, newSource);
}
@Override
public boolean isValidParent(Namespace ns) {
DBTraceNamespaceSymbol dbns = manager.checkIsMine(ns);
if (dbns == null) {
return false;
}
return MySymbolTypes.values()[this.getSymbolType().getID()].isValidParent(dbns);
}
protected DBTraceNamespaceSymbol checkCircular(DBTraceNamespaceSymbol newParent)
throws CircularDependencyException {
return newParent;
}
protected Pair<String, SourceType> validateNameAndSource(String newName, SourceType newSource)
throws InvalidInputException {
if ((newSource == SourceType.DEFAULT) ^ (getSource() == SourceType.DEFAULT)) {
throw new IllegalArgumentException("Cannot create or remove DEFAULT symbols");
}
DBTraceSymbolManager.assertValidName(newName);
return new ImmutablePair<>(newName, newSource);
}
@Override
public void setName(String newName, SourceType newSource)
throws DuplicateNameException, InvalidInputException {
assertNotGlobal();
Pair<String, SourceType> validated = validateNameAndSource(newName, newSource);
newName = validated.getLeft();
newSource = validated.getRight();
try (LockHold hold = LockHold.lock(manager.lock.writeLock())) {
TraceChangeRecord<?, ?> nameEvent = doSetNameWithEvent(newName);
TraceChangeRecord<?, ?> sourceEvent = doSetSourceWithEvent(newSource);
if (nameEvent != null || sourceEvent != null) {
update(NAME_COLUMN, FLAGS_COLUMN);
}
if (nameEvent != null) {
manager.trace.setChanged(nameEvent);
}
if (sourceEvent != null) {
manager.trace.setChanged(sourceEvent);
}
}
}
protected void validateNameAndParent(String newName, DBTraceNamespaceSymbol newParent)
throws DuplicateNameException {
manager.assertNotDuplicate(this, newName, newParent);
}
@Override
public void setNamespace(Namespace newNamespace)
throws DuplicateNameException, InvalidInputException, CircularDependencyException {
// TODO: Why InvalidInputException?
assertNotGlobal();
try (LockHold hold = LockHold.lock(manager.lock.writeLock())) {
DBTraceNamespaceSymbol dbnsParent = manager.assertIsMine(newNamespace);
validateNameAndParent(getName(), dbnsParent);
TraceChangeRecord<?, ?> parentEvent = doSetParent(dbnsParent);
if (parentEvent != null) {
update(PARENT_COLUMN);
manager.trace.setChanged(parentEvent);
}
}
}
@Override
public void setNameAndNamespace(String newName, Namespace newNamespace, SourceType newSource)
throws DuplicateNameException, InvalidInputException, CircularDependencyException {
assertNotGlobal();
Pair<String, SourceType> validated = validateNameAndSource(newName, newSource);
newName = validated.getLeft();
newSource = validated.getRight();
try (LockHold hold = LockHold.lock(manager.lock.writeLock())) {
TraceChangeRecord<?, ?> parentEvent = doSetParent(manager.assertIsMine(newNamespace));
TraceChangeRecord<?, ?> nameEvent = doSetNameWithEvent(newName);
TraceChangeRecord<?, ?> sourceEvent = doSetSourceWithEvent(newSource);
if (parentEvent != null || nameEvent != null || sourceEvent != null) {
update(NAME_COLUMN, PARENT_COLUMN, FLAGS_COLUMN);
}
if (parentEvent != null) {
manager.trace.setChanged(parentEvent);
}
if (nameEvent != null) {
manager.trace.setChanged(nameEvent);
}
if (sourceEvent != null) {
manager.trace.setChanged(sourceEvent);
}
}
}
@Override
public void setSource(SourceType newSource) {
assertNotGlobal();
try {
Pair<String, SourceType> validated = validateNameAndSource(getName(), newSource);
newSource = validated.getRight();
}
catch (InvalidInputException e) {
throw new AssertionError(e);
}
try (LockHold hold = LockHold.lock(manager.lock.writeLock())) {
TraceChangeRecord<?, ?> sourceEvent = doSetSourceWithEvent(newSource);
if (sourceEvent != null) {
update(FLAGS_COLUMN);
manager.trace.setChanged(sourceEvent);
}
}
}
@Override
public SourceType getSource() {
assertNotGlobal();
return SourceType.values()[(flags >> SOURCE_SHIFT) & SOURCE_MASK];
}
@Override
public boolean delete() {
assertNotGlobal();
try (LockHold hold = LockHold.lock(manager.lock.writeLock())) {
return doDelete();
}
}
protected boolean doDelete() {
return manager.doDeleteSymbol(this);
}
@Override
public boolean isDynamic() {
return false;
}
@Override
public boolean isGlobal() {
return parentID == -1;
}
@Override
public DBTraceProgramView getProgram() {
return manager.trace.getProgramView();
}
@Override
public ProgramLocation getProgramLocation() {
return new ProgramLocation(getProgram(), getAddress());
}
/**
* {@inheritDoc}
*
* Since blocks cannot be relocated as they can in a {@link Program}, it's tempting to say all
* symbols are pinned; however, this presents in the UI and is a bit confusing and/or
* distracting.
*/
@Override
public boolean isPinned() {
return false;
}
@Override
public void setPinned(boolean pinned) {
// Nothing
}
@Override
public boolean isExternal() {
return false;
}
@Override
public boolean isExternalEntryPoint() {
return false;
}
}
| |
/*******************************************************************************
* Caleydo - Visualization for Molecular Biology - http://caleydo.org
* Copyright (c) The Caleydo Team. All rights reserved.
* Licensed under the new BSD license, available at http://caleydo.org/license
*******************************************************************************/
package org.caleydo.view.domino.internal.ui;
import gleem.linalg.Vec2f;
import java.util.List;
import org.caleydo.core.data.collection.EDimension;
import org.caleydo.core.data.selection.SelectionManager;
import org.caleydo.core.id.IDCategory;
import org.caleydo.core.id.IDMappingManagerRegistry;
import org.caleydo.core.id.IDType;
import org.caleydo.core.view.opengl.canvas.IGLMouseListener.IMouseEvent;
import org.caleydo.core.view.opengl.layout2.GLElement;
import org.caleydo.core.view.opengl.layout2.GLElementContainer;
import org.caleydo.core.view.opengl.layout2.GLGraphics;
import org.caleydo.core.view.opengl.layout2.dnd.IDnDItem;
import org.caleydo.core.view.opengl.layout2.dnd.IDragGLSource;
import org.caleydo.core.view.opengl.layout2.dnd.IDragInfo;
import org.caleydo.core.view.opengl.layout2.layout.IGLLayout2;
import org.caleydo.core.view.opengl.layout2.layout.IGLLayoutElement;
import org.caleydo.core.view.opengl.picking.IPickingListener;
import org.caleydo.core.view.opengl.picking.Pick;
import org.caleydo.view.domino.internal.Domino;
import org.caleydo.view.domino.internal.ScaleLogic;
import org.caleydo.view.domino.internal.UndoStack;
import org.caleydo.view.domino.internal.dnd.ADragInfo;
import org.caleydo.view.domino.internal.dnd.RulerDragInfo;
import org.caleydo.view.domino.internal.toolbar.RulerTools;
import org.caleydo.view.domino.internal.undo.ZoomRulerCmd;
/**
* @author Samuel Gratzl
*
*/
public class Ruler extends GLElementContainer implements IDragGLSource, IPickingListener, IGLLayout2 {
private final SelectionManager manager;
private int maxElements;
private float scaleFactor = 1.f;
private EDimension dim = EDimension.RECORD;
private boolean hovered = false;
public Ruler(SelectionManager manager, UndoStack undo) {
setLayout(this);
this.manager = manager;
maxElements = Math.min(100, getTotalMax(manager.getIDType().getIDCategory()));
setVisibility(EVisibility.PICKABLE);
onPick(this);
this.add(createToolBar(undo));
updateSize();
}
/**
* @return the dim, see {@link #dim}
*/
public EDimension getDim() {
return dim;
}
/**
* @return
*/
private GLElement createToolBar(UndoStack undo) {
RulerTools tools = new RulerTools(undo, this);
tools.setSize(tools.getWidth(24), 24);
return tools;
}
@Override
public boolean doLayout(List<? extends IGLLayoutElement> children, float w, float h, IGLLayoutElement parent,
int deltaTimeMs) {
final IGLLayoutElement toolbar = children.get(0);
if (hovered) {
float wi = toolbar.getSetWidth();
if (wi > w)
toolbar.setBounds((w - wi) * 0.5f, -24, wi, 24);
else
toolbar.setBounds(w - wi, -24, wi, 24);
} else
toolbar.hide();
return false;
}
public IDType getIDType() {
return manager.getIDType();
}
public IDCategory getIDCategory() {
return manager.getIDType().getIDCategory();
}
/**
* @param idCategory
* @return
*/
public static int getTotalMax(IDCategory category) {
return IDMappingManagerRegistry.get().getIDMappingManager(category)
.getAllMappedIDs(category.getPrimaryMappingType()).size();
}
/**
* @return the manager, see {@link #manager}
*/
public SelectionManager getManager() {
return manager;
}
public void transpose() {
this.dim = this.dim.opposite();
updateSize();
}
@Override
protected void renderImpl(GLGraphics g, float w, float h) {
float max = dim.select(w, h);
// float f = max / maxElements;
// renderSelection(g, w, h, f);
renderBaseAxis(g, w, h);
// renderMarkers(g, w, h, f);
super.renderImpl(g, w, h);
}
/**
* @param maxElements
* setter, see {@link maxElements}
*/
public void setMaxElements(int maxElements) {
if (this.maxElements == maxElements)
return;
if (maxElements == 0) {// no invalid values
maxElements = 100;
}
this.maxElements = maxElements;
updateSize();
}
/**
* @return the maxElements, see {@link #maxElements}
*/
public int getMaxElements() {
return maxElements;
}
private void updateSize() {
float new_ = scaleFactor * maxElements;
if (dim.isHorizontal())
setSize(new_, 20);
else
setSize(20, new_);
relayout();
}
// private void renderMarkers(GLGraphics g, float w, float h, float f) {
// int markerDelta = determineMarkerStep(maxElements, dim.select(w, h));
// for (int i = 0; i <= maxElements; i += markerDelta) {
// float v = f * i;
// if (dim.isHorizontal()) {
// g.drawLine(v, 0, v, h*0.5f);
// } else {
// g.drawLine(0, v, w*0.5f, v);
// }
// }
// if (dim.isHorizontal()) {
// g.drawLine(w, 0, w, h * 0.5f);
// } else {
// g.drawLine(0, h, w * 0.5f, h);
// }
// renderMarkerLabels(g, w, h, f, markerDelta);
// }
// private void renderMarkerLabels(GLGraphics g, float w, float h, float f, int markerDelta) {
// int drawLabelsDelta = markerDelta * Math.round((float) Math.ceil(dim.select(30, 10) / (f * markerDelta)));
// final float hi = dim.isHorizontal() ? Math.min(h * 0.5f, 10) : Math.min(f * drawLabelsDelta, 10);
// for (int i = 0; i < maxElements; i += drawLabelsDelta) {
// float v = f * i;
// if (dim.isHorizontal()) {
// g.drawText(i + "", v + 2, h * 0.5f, f * drawLabelsDelta, hi);
// } else {
// g.drawText(i + "", 3, v + 1, w - 3, hi);
// }
// }
// if (dim.isHorizontal()) {
// g.drawText(maxElements + "", w - f * drawLabelsDelta - 2, h * 0.5f, f * drawLabelsDelta, hi, VAlign.RIGHT);
// } else {
// g.drawText(maxElements + "", 3, h - hi - 2, w - 3, hi);
// }
// }
private void renderBaseAxis(GLGraphics g, float w, float h) {
final String label = getMaxElements() + " " + getLabel(manager);
if (hovered)
g.lineWidth(3);
// g.drawRect(0, 0, w, h);
final int stitch = 12;
if (dim.isHorizontal()) {
// g.drawText(label, -306, h - 14, 300, stitch, VAlign.RIGHT);
g.drawText(label, w + 6, h - 14, 300, stitch);
g.drawLine(0, h, w, h);
g.drawLine(0, h, 0, h - stitch);
g.drawLine(w, h, w, h - stitch);
} else {
g.drawText(label, 0, -16, 300, stitch);
g.drawLine(0, 0, 0, h);
g.drawLine(0, 0, stitch, 0);
g.drawLine(0, h, stitch, h);
}
g.lineWidth(1);
}
// private static int determineMarkerStep(int maxElements, float size) {
// if (maxElements <= 100)
// return 10; // 10x
// if (maxElements <= 250)
// return 25; // 10x
// if (maxElements <= 500)
// return 50; // 10x
// if (maxElements <= 1000)
// return 100;
// return 250;
// // return Math.max(maxElements / 10, 1);
// }
@Override
public void pick(Pick pick) {
switch (pick.getPickingMode()) {
case MOUSE_WHEEL:
final Vec2f bak = getSize();
Vec2f shift = ScaleLogic.shiftLogic(((IMouseEvent) pick), bak);
float change = dim.select(shift);
if (change == 0)
return;
float ori = scaleFactor * maxElements;
float new_ = Math.max(ori + change, 1);
float scale = new_ / maxElements;
UndoStack undo = findParent(Domino.class).getUndo();
undo.push(new ZoomRulerCmd(getIDCategory(), scale, this.scaleFactor));
break;
case MOUSE_OVER:
context.getMouseLayer().addDragSource(this);
hovered = true;
relayout();
break;
case MOUSE_OUT:
context.getMouseLayer().removeDragSource(this);
hovered = false;
relayout();
break;
case RIGHT_CLICKED:
context.getSWTLayer().showContextMenu(((RulerTools) get(0)).asContextMenu());
break;
default:
break;
}
}
@Override
protected void takeDown() {
context.getMouseLayer().removeDragSource(this);
super.takeDown();
}
@Override
public IDragInfo startSWTDrag(IDragEvent event) {
return new RulerDragInfo(event.getMousePos(), this);
}
private String getLabel(SelectionManager manager) {
return manager.getIDType().getIDCategory().getCategoryName();
}
@Override
public void onDropped(IDnDItem info) {
}
@Override
public GLElement createUI(IDragInfo info) {
if (info instanceof ADragInfo) {
return ((ADragInfo) info).createUI(findParent(Domino.class));
}
return null;
}
/**
* @param shift
*/
public void shiftLocation(Vec2f shift) {
Vec2f loc = getLocation();
setLocation(loc.x() + shift.x(), loc.y() + shift.y());
}
/**
* @param shift
*/
public void zoom(Vec2f shift) {
float change = dim.select(shift);
if (change == 0)
return;
float ori = scaleFactor * maxElements;
float new_ = Math.max(ori + change, 1);
float scale = new_ / maxElements;
this.scaleFactor = scale;
updateSize();
}
/**
* @param scale
*/
public void zoom(float scale) {
this.scaleFactor = scale;
updateSize();
}
/**
* @return the scaleFactor, see {@link #scaleFactor}
*/
public float getScaleFactor() {
return scaleFactor;
}
}
| |
package com.armadialogcreator.gui.main.controlPropertiesEditor;
import com.armadialogcreator.canvas.Region;
import com.armadialogcreator.core.sv.SVColor;
import com.armadialogcreator.core.sv.SVColorArray;
import com.armadialogcreator.gui.GenericResponseFooter;
import com.armadialogcreator.gui.fxcontrol.inputfield.DoubleChecker;
import com.armadialogcreator.gui.fxcontrol.inputfield.InputField;
import com.armadialogcreator.img.icons.ADCIcons;
import com.armadialogcreator.lang.Lang;
import com.armadialogcreator.util.ReadOnlyValueObserver;
import com.armadialogcreator.util.ValueListener;
import com.armadialogcreator.util.ValueObserver;
import javafx.event.ActionEvent;
import javafx.event.EventHandler;
import javafx.geometry.Insets;
import javafx.geometry.Orientation;
import javafx.geometry.Pos;
import javafx.scene.Node;
import javafx.scene.canvas.Canvas;
import javafx.scene.canvas.GraphicsContext;
import javafx.scene.control.*;
import javafx.scene.image.ImageView;
import javafx.scene.layout.*;
import javafx.scene.paint.Color;
import javafx.scene.text.Font;
import javafx.stage.Popup;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.ResourceBundle;
/**
A {@link SVColorArray} editor
@author Kayler
@since 07/13/2016 */
public class ColorArrayValueEditor implements ValueEditor<SVColorArray> {
private final ColorPicker colorPicker = new ColorPicker();
private final HBox masterPane = new HBox(2, colorPicker);
private final ValueObserver<SVColorArray> valueObserver = new ValueObserver<>(null);
public ColorArrayValueEditor() {
Button btnBrackets = new Button("", new ImageView(ADCIcons.ICON_BRACKETS));
btnBrackets.setOnAction(event -> {
ArrayEditorPopup editor = new ArrayEditorPopup(colorPicker.getValue());
double x = btnBrackets.localToScreen(0, 0).getX();
double y = masterPane.localToScreen(0, -masterPane.getHeight()).getY();
editor.show(btnBrackets, x, y);
editor.showingProperty().addListener((observable, oldValue, showing) -> {
if (!showing && !editor.isCancelled()) {
if (editor.getColor() == null) {
valueObserver.updateValue(null);
colorPicker.setValue(null);
} else {
valueObserver.updateValue(new SVColorArray(editor.getColor()));
colorPicker.setValue(editor.getColor());
}
}
});
});
masterPane.getChildren().add(btnBrackets);
colorPicker.setValue(null);
colorPicker.setOnAction(new EventHandler<ActionEvent>() {
@Override
public void handle(ActionEvent event) {
Color newValue = colorPicker.getValue();
SVColorArray color;
if (newValue == null) {
color = null;
} else {
color = new SVColorArray(newValue);
}
valueObserver.updateValue(color);
}
});
}
@Override
public void submitCurrentData() {
}
@Override
public SVColorArray getValue() {
return valueObserver.getValue();
}
@Override
public void setValue(SVColorArray val) {
if (val == null) {
colorPicker.setValue(null);
} else {
colorPicker.setValue(val.toJavaFXColor());
}
}
@Override
public @NotNull Node getRootNode() {
return masterPane;
}
@Override
public boolean displayFullWidth() {
return false;
}
@Override
public void focusToEditor() {
colorPicker.requestFocus();
}
@NotNull
@Override
public ReadOnlyValueObserver<SVColorArray> getReadOnlyObserver() {
return valueObserver.getReadOnlyValueObserver();
}
private class ArrayEditorPopup extends Popup {
private final InputField<DoubleChecker, Double> r = new InputField<>(new DoubleChecker(), 0d);
private final InputField<DoubleChecker, Double> g = new InputField<>(new DoubleChecker(), 0d);
private final InputField<DoubleChecker, Double> b = new InputField<>(new DoubleChecker(), 0d);
private final InputField<DoubleChecker, Double> a = new InputField<>(new DoubleChecker(), 0d);
private final TextField tfAsArray = new TextField();
private boolean cancelled = true;
public ArrayEditorPopup(@Nullable Color initialColor) {
VBox root = new VBox(5);
root.setPadding(new Insets(10));
getContent().add(root);
root.setStyle("-fx-background-color:-fx-background");
root.setBorder(new Border(
new BorderStroke(Color.GRAY, BorderStrokeStyle.SOLID,
CornerRadii.EMPTY, BorderStroke.THIN
)
)
);
Canvas canvas = new Canvas(128, 32);
StackPane stackPaneCanvas = new StackPane(canvas);
stackPaneCanvas.setBorder(root.getBorder());
stackPaneCanvas.setMaxWidth(canvas.getWidth());
stackPaneCanvas.setAlignment(Pos.CENTER_LEFT);
HBox paneHeader = new HBox(5, stackPaneCanvas, tfAsArray);
root.getChildren().add(paneHeader);
HBox.setHgrow(tfAsArray, Priority.ALWAYS);
tfAsArray.setEditable(false);
GraphicsContext gc = canvas.getGraphicsContext2D();
ValueListener<Double> valListener = (observer, oldValue, newValue) -> {
Color c = getCurrentColor();
if (c != null) {
if (c.getOpacity() < 1) {
//draw a grid to show theres transparency
gc.setGlobalAlpha(1);
gc.setFill(Color.WHITE);
Region.paintCheckerboard(
gc, 0, 0, (int) canvas.getWidth(), (int) canvas.getHeight(), Color.GRAY, Color.WHITE,
5);
}
gc.setGlobalAlpha(c.getOpacity());
gc.setFill(c);
gc.fillRect(0, 0, canvas.getWidth(), canvas.getHeight());
tfAsArray.setText(SVColor.toStringF(c.getRed(), c.getGreen(), c.getBlue(), c.getOpacity()));
}
};
r.getValueObserver().addListener(valListener);
g.getValueObserver().addListener(valListener);
b.getValueObserver().addListener(valListener);
a.getValueObserver().addListener(valListener);
if (initialColor != null) {
r.getValueObserver().updateValue(getRounded(initialColor.getRed()));
g.getValueObserver().updateValue(getRounded(initialColor.getGreen()));
b.getValueObserver().updateValue(getRounded(initialColor.getBlue()));
a.getValueObserver().updateValue(getRounded(initialColor.getOpacity()));
}
//r
root.getChildren().add(getColorEditor("r", r));
//g
root.getChildren().add(getColorEditor("g", g));
//b
root.getChildren().add(getColorEditor("b", b));
//a
root.getChildren().add(getColorEditor("a", a));
//footer
root.getChildren().add(new Separator(Orientation.HORIZONTAL));
GenericResponseFooter footer = new GenericResponseFooter(true, true, false,
null,
cancelEvent -> {
cancelled = true;
ArrayEditorPopup.this.hide();
},
okEvent -> {
if (!hasInvalid()) {
return;
}
cancelled = false;
ArrayEditorPopup.this.hide();
}
);
ResourceBundle bundle = Lang.ApplicationBundle();
footer.getBtnOk().setText(bundle.getString("ValueEditors.ColorArrayEditor.use"));
root.getChildren().add(footer);
setAutoHide(true);
setHideOnEscape(true); //when push esc key, hide it
valListener.valueUpdated(r.getValueObserver(), null, null);
}
private boolean hasInvalid() {
if (r.getValue() == null) {
return false;
}
if (g.getValue() == null) {
return false;
}
if (b.getValue() == null) {
return false;
}
if (a.getValue() == null) {
return false;
}
return boundCheck(r) && boundCheck(g) && boundCheck(b) && boundCheck(a);
}
private boolean boundCheck(InputField<DoubleChecker, Double> tf) {
double v = tf.getValue();
return v >= 0 && v <= 1.0;
}
private Node getColorEditor(String colorLetter, InputField<DoubleChecker, Double> tf) {
HBox hbox = new HBox(5);
hbox.setAlignment(Pos.CENTER_LEFT);
Label lblColor = new Label(colorLetter);
lblColor.setFont(Font.font("monospace"));
hbox.getChildren().add(lblColor);
hbox.getChildren().add(tf);
Slider slider = new Slider(0, 100, tf.getValue() == null ? 0 : (tf.getValue() * 100));
slider.setShowTickLabels(true);
slider.setShowTickMarks(true);
slider.valueProperty().addListener((observable, oldValue, newValue) -> {
double rounded = getRounded(newValue.doubleValue() / 100);
if (tf.getValue() != null && equalTo(tf.getValue(), rounded)) {
return;
}
tf.getValueObserver().updateValue(rounded);
});
hbox.getChildren().add(slider);
tf.getValueObserver().addListener((observer, oldValue, newValue) -> {
slider.setValue(newValue == null ? 0 : newValue * 100);
});
return hbox;
}
@Nullable
public Color getColor() {
return cancelled ? null :
getCurrentColor();
}
@Nullable
private Color getCurrentColor() {
return !hasInvalid() ? null :
Color.color(
r.getValue().doubleValue(),
g.getValue().doubleValue(),
b.getValue().doubleValue(),
a.getValue().doubleValue()
);
}
private double getRounded(double d) {
return SVColorArray.round(d);
}
private boolean equalTo(double d1, double d2) {
return SVColorArray.equalTo(d1, d2);
}
public boolean isCancelled() {
return cancelled;
}
}
}
| |
/*******************************************************************************
* Copyright (c) 2009 IBM Corporation and others.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* IBM Corporation - initial API and implementation
* Zend Technologies
*******************************************************************************/
package org.eclipse.php.internal.ui.text.template.contentassist;
import org.eclipse.core.runtime.Assert;
import org.eclipse.jface.text.*;
import org.eclipse.jface.text.contentassist.ICompletionProposal;
import org.eclipse.jface.text.contentassist.ICompletionProposalExtension;
import org.eclipse.jface.text.contentassist.ICompletionProposalExtension2;
import org.eclipse.jface.text.contentassist.IContextInformation;
import org.eclipse.swt.graphics.Image;
import org.eclipse.swt.graphics.Point;
/**
* An enhanced implementation of the <code>ICompletionProposal</code> interface
* implementing all the extension interfaces. It uses a position to track its
* replacement offset and length. The position must be set up externally.
*/
public class PositionBasedCompletionProposal
implements ICompletionProposal, ICompletionProposalExtension, ICompletionProposalExtension2 {
/** The string to be displayed in the completion proposal popup */
private String fDisplayString;
/** The replacement string */
private String fReplacementString;
/** The replacement position. */
private Position fReplacementPosition;
/** The cursor position after this proposal has been applied */
private int fCursorPosition;
/** The image to be displayed in the completion proposal popup */
private Image fImage;
/** The context information of this proposal */
private IContextInformation fContextInformation;
/** The additional info of this proposal */
private String fAdditionalProposalInfo;
/** The trigger characters */
private char[] fTriggerCharacters;
/**
* Creates a new completion proposal based on the provided information. The
* replacement string is considered being the display string too. All
* remaining fields are set to <code>null</code>.
*
* @param replacementString
* the actual string to be inserted into the document
* @param replacementPosition
* the position of the text to be replaced
* @param cursorPosition
* the position of the cursor following the insert relative to
* replacementOffset
*/
public PositionBasedCompletionProposal(String replacementString, Position replacementPosition, int cursorPosition) {
this(replacementString, replacementPosition, cursorPosition, null, null, null, null, null);
}
/**
* Creates a new completion proposal. All fields are initialized based on
* the provided information.
*
* @param replacementString
* the actual string to be inserted into the document
* @param replacementPosition
* the position of the text to be replaced
* @param cursorPosition
* the position of the cursor following the insert relative to
* replacementOffset
* @param image
* the image to display for this proposal
* @param displayString
* the string to be displayed for the proposal
* @param contextInformation
* the context information associated with this proposal
* @param additionalProposalInfo
* the additional information associated with this proposal
* @param triggers
* the trigger characters
*/
public PositionBasedCompletionProposal(String replacementString, Position replacementPosition, int cursorPosition,
Image image, String displayString, IContextInformation contextInformation, String additionalProposalInfo,
char[] triggers) {
Assert.isNotNull(replacementString);
Assert.isTrue(replacementPosition != null);
fReplacementString = replacementString;
fReplacementPosition = replacementPosition;
fCursorPosition = cursorPosition;
fImage = image;
fDisplayString = displayString;
fContextInformation = contextInformation;
fAdditionalProposalInfo = additionalProposalInfo;
fTriggerCharacters = triggers;
}
/*
* @see ICompletionProposal#apply(IDocument)
*/
@Override
public void apply(IDocument document) {
try {
document.replace(fReplacementPosition.getOffset(), fReplacementPosition.getLength(), fReplacementString);
} catch (BadLocationException x) {
// ignore
}
}
/*
* @see ICompletionProposal#getSelection(IDocument)
*/
@Override
public Point getSelection(IDocument document) {
return new Point(fReplacementPosition.getOffset() + fCursorPosition, 0);
}
/*
* @see ICompletionProposal#getContextInformation()
*/
@Override
public IContextInformation getContextInformation() {
return fContextInformation;
}
/*
* @see ICompletionProposal#getImage()
*/
@Override
public Image getImage() {
return fImage;
}
/*
* @see
* org.eclipse.jface.text.contentassist.ICompletionProposal#getDisplayString
* ()
*/
@Override
public String getDisplayString() {
if (fDisplayString != null)
return fDisplayString;
return fReplacementString;
}
/*
* @see ICompletionProposal#getAdditionalProposalInfo()
*/
@Override
public String getAdditionalProposalInfo() {
return fAdditionalProposalInfo;
}
/*
* @see
* org.eclipse.jface.text.contentassist.ICompletionProposalExtension2#apply
* (org.eclipse.jface.text.ITextViewer, char, int, int)
*/
@Override
public void apply(ITextViewer viewer, char trigger, int stateMask, int offset) {
apply(viewer.getDocument());
}
/*
* @see org.eclipse.jface.text.contentassist.ICompletionProposalExtension2#
* selected (org.eclipse.jface.text.ITextViewer, boolean)
*/
@Override
public void selected(ITextViewer viewer, boolean smartToggle) {
}
/*
* @see org.eclipse.jface.text.contentassist.ICompletionProposalExtension2#
* unselected (org.eclipse.jface.text.ITextViewer)
*/
@Override
public void unselected(ITextViewer viewer) {
}
/*
* @see org.eclipse.jface.text.contentassist.ICompletionProposalExtension2#
* validate (org.eclipse.jface.text.IDocument, int,
* org.eclipse.jface.text.DocumentEvent)
*/
@Override
public boolean validate(IDocument document, int offset, DocumentEvent event) {
try {
String content = document.get(fReplacementPosition.getOffset(), offset - fReplacementPosition.getOffset());
if (fReplacementString.startsWith(content))
return true;
} catch (BadLocationException e) {
// ignore concurrently modified document
}
return false;
}
/*
* @see
* org.eclipse.jface.text.contentassist.ICompletionProposalExtension#apply
* (org.eclipse.jface.text.IDocument, char, int)
*/
@Override
public void apply(IDocument document, char trigger, int offset) {
// not called any more
}
/*
* @see org.eclipse.jface.text.contentassist.ICompletionProposalExtension#
* isValidFor (org.eclipse.jface.text.IDocument, int)
*/
@Override
public boolean isValidFor(IDocument document, int offset) {
// not called any more
return false;
}
/*
* @seeorg.eclipse.jface.text.contentassist.ICompletionProposalExtension#
* getTriggerCharacters()
*/
@Override
public char[] getTriggerCharacters() {
return fTriggerCharacters;
}
/*
* @seeorg.eclipse.jface.text.contentassist.ICompletionProposalExtension#
* getContextInformationPosition()
*/
@Override
public int getContextInformationPosition() {
return fReplacementPosition.getOffset();
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.datatorrent.api;
import java.io.IOException;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import org.codehaus.jackson.map.DeserializationConfig;
import org.codehaus.jackson.map.ObjectMapper;
import org.codehaus.jackson.map.ObjectReader;
import org.codehaus.jackson.map.ObjectWriter;
import org.apache.commons.beanutils.BeanUtils;
import org.apache.commons.lang.StringUtils;
import com.google.common.base.Throwables;
/**
* This interface is essentially serializer/deserializer interface which works with String as
* the serialized type. When initializing the attributes from the properties file, attribute
* values represented as Strings are needed to be converted to POJO. This class facilitates the
* conversion from and to String for attribute values.
*
* @param <T> Type of the object which can be converted to/from String.
* @since 0.9.0
*/
public interface StringCodec<T>
{
/**
* Given a string representation (typically from properties file) for an object , create object from it.
*
* @param string Type of the POJO which is created from String representation.
* @return POJO obtained as a result of deserialization
*/
T fromString(String string);
/**
* Given a POJO, serialize it to a String object (typically to be stored in properties file).
*
* @param pojo The object which needs to be serialized.
* @return Serialized representation of pojo..
*/
String toString(T pojo);
class Factory
{
public static StringCodec<?> getInstance(Class<?> cls)
{
if (cls == String.class) {
return String2String.getInstance();
} else if (cls == Integer.class) {
return Integer2String.getInstance();
} else if (cls == Long.class) {
return Long2String.getInstance();
} else if (cls == Boolean.class) {
return Boolean2String.getInstance();
} else if (Enum.class.isAssignableFrom(cls)) {
return Enum2String.getInstance(cls);
} else {
return null;
}
}
}
class String2String implements StringCodec<String>, Serializable
{
@SuppressWarnings("deprecation")
private static final String2String instance = new String2String();
public static StringCodec<String> getInstance()
{
return instance;
}
/**
* @deprecated As of release 3.5.0, replaced by {@link #getInstance()}
*/
@Deprecated
public String2String()
{
}
@Override
public String fromString(String string)
{
return string;
}
@Override
public String toString(String pojo)
{
return pojo;
}
private static final long serialVersionUID = 201310141156L;
}
class Integer2String implements StringCodec<Integer>, Serializable
{
@SuppressWarnings("deprecation")
private static final Integer2String instance = new Integer2String();
public static StringCodec<Integer> getInstance()
{
return instance;
}
/**
* @deprecated As of release 3.5.0, replaced by {@link #getInstance()}
*/
@Deprecated
public Integer2String()
{
}
@Override
public Integer fromString(String string)
{
return Integer.valueOf(string);
}
@Override
public String toString(Integer pojo)
{
return String.valueOf(pojo);
}
private static final long serialVersionUID = 201310141157L;
}
class Long2String implements StringCodec<Long>, Serializable
{
@SuppressWarnings("deprecation")
private static final Long2String instance = new Long2String();
public static StringCodec<Long> getInstance()
{
return instance;
}
/**
* @deprecated As of release 3.5.0, replaced by {@link #getInstance()}
*/
@Deprecated
public Long2String()
{
}
@Override
public Long fromString(String string)
{
return Long.valueOf(string);
}
@Override
public String toString(Long pojo)
{
return String.valueOf(pojo);
}
private static final long serialVersionUID = 201310141158L;
}
class Boolean2String implements StringCodec<Boolean>, Serializable
{
@SuppressWarnings("deprecation")
private static final Boolean2String instance = new Boolean2String();
public static StringCodec<Boolean> getInstance()
{
return instance;
}
/**
* @deprecated As of release 3.5.0, replaced by {@link #getInstance()}
*/
@Deprecated
public Boolean2String()
{
}
@Override
public Boolean fromString(String string)
{
return Boolean.valueOf(string);
}
@Override
public String toString(Boolean pojo)
{
return String.valueOf(pojo);
}
private static final long serialVersionUID = 201310141159L;
}
/**
* The attributes which represent arbitrary objects for which the schema cannot be
* standardized, we allow them to be represented as <ClassName>:<Constructor_String>:<Property_String> representation.
* This allows us to instantiate the class by invoking its constructor which takes
* <String> as argument. If only the <ClassName> is specified, then just the class is instantiated using default
* constructor. If colon is specified then class is instantiated using constructor with
* string as an argument.If properties are specified then properties will be set on the object. The properties
* are defined in property=value format separated by colon(:)
*
* Note that the {@link #toString(Object) toString} method is by default NOT the proper reverse of the {@link
* #fromString(String) fromString} method. In order for the {@link #toString(Object) toString} method to become a
* proper reverse of the {@link #fromString(String) fromString} method, T's {@link T#toString() toString} method
* must output null or <Constructor_String> or the <Constructor_String>:<Property_String> format as stated above.
*
* @param <T> Type of the object which is converted to/from String
*/
class Object2String<T> implements StringCodec<T>, Serializable
{
@SuppressWarnings("deprecation")
private static final Object2String instance = new Object2String();
public static <T> StringCodec<T> getInstance()
{
return instance;
}
public static <T> StringCodec<T> getInstance(String separator)
{
return getInstance(separator, "=");
}
@SuppressWarnings("deprecation")
public static <T> StringCodec<T> getInstance(String separator, String propertySeparator)
{
return new Object2String<>(separator, propertySeparator);
}
public final String separator;
public final String propertySeparator;
/**
* @deprecated As of release 3.5.0, replaced by {@link #getInstance()}
*/
@SuppressWarnings("deprecation")
@Deprecated
public Object2String()
{
this(":", "=");
}
/**
* @deprecated As of release 3.5.0, replaced by {@link #getInstance(String)}
*/
@SuppressWarnings("deprecation")
@Deprecated
public Object2String(String separator)
{
this(separator, "=");
}
/**
* @deprecated As of release 3.5.0, replaced by {@link #getInstance(String, String)}
*/
@Deprecated
public Object2String(String separator, String propertySeparator)
{
this.separator = separator;
this.propertySeparator = propertySeparator;
}
@Override
@SuppressWarnings({"UseSpecificCatch", "BroadCatchBlock", "TooBroadCatch"})
public T fromString(String string)
{
String[] parts = string.split(separator);
try {
@SuppressWarnings("unchecked")
Class<? extends T> clazz = (Class<? extends T>)Thread.currentThread().getContextClassLoader().loadClass(parts[0]);
if (parts.length == 1) {
return clazz.newInstance();
}
//String[] properties = parts[1].split(separator, 2);
if (parts.length == 2) {
return clazz.getConstructor(String.class).newInstance(parts[1]);
} else {
T object = clazz.getConstructor(String.class).newInstance(parts[1]);
HashMap<String, String> hashMap = new HashMap<String, String>();
for (int i = 2; i < parts.length; i++) {
String[] keyValPair = parts[i].split(propertySeparator, 2);
hashMap.put(keyValPair[0], keyValPair[1]);
}
BeanUtils.populate(object, hashMap);
return object;
}
} catch (Throwable cause) {
throw Throwables.propagate(cause);
}
}
@Override
public String toString(T pojo)
{
if (pojo == null) {
return null;
}
String arg = pojo.toString();
if (arg == null) {
return pojo.getClass().getCanonicalName();
}
return pojo.getClass().getCanonicalName() + separator + arg;
}
private static final long serialVersionUID = 201311141853L;
}
class Map2String<K, V> implements StringCodec<Map<K, V>>, Serializable
{
@SuppressWarnings("deprecation")
public static <K, V> StringCodec<Map<K, V>> getInstance(String separator, String equal, StringCodec<K> keyCodec, StringCodec<V> valueCodec)
{
return new Map2String<>(separator, equal, keyCodec, valueCodec);
}
private final StringCodec<K> keyCodec;
private final StringCodec<V> valueCodec;
private final String separator;
private final String equal;
/**
* @deprecated As of release 3.5.0, replaced by {@link #getInstance(String, String, StringCodec, StringCodec)}
*/
@Deprecated
public Map2String(String separator, String equal, StringCodec<K> keyCodec, StringCodec<V> valueCodec)
{
this.equal = equal;
this.separator = separator;
this.keyCodec = keyCodec;
this.valueCodec = valueCodec;
}
@Override
public Map<K, V> fromString(String string)
{
if (string == null) {
return null;
}
if (string.isEmpty()) {
return new HashMap<K, V>();
}
String[] parts = string.split(separator);
HashMap<K, V> map = new HashMap<K, V>();
for (String part : parts) {
String[] kvpair = part.split(equal, 2);
map.put(keyCodec.fromString(kvpair[0]), valueCodec.fromString(kvpair[1]));
}
return map;
}
@Override
public String toString(Map<K, V> map)
{
if (map == null) {
return null;
}
if (map.isEmpty()) {
return "";
}
String[] parts = new String[map.size()];
int i = 0;
for (Map.Entry<K, V> entry : map.entrySet()) {
parts[i++] = keyCodec.toString(entry.getKey()) + equal + valueCodec.toString(entry.getValue());
}
return StringUtils.join(parts, separator);
}
private static final long serialVersionUID = 201402272053L;
}
class Collection2String<T> implements StringCodec<Collection<T>>, Serializable
{
@SuppressWarnings("deprecation")
public static <T> StringCodec<Collection<T>> getInstance(String separator, StringCodec<T> codec)
{
return new Collection2String<>(separator, codec);
}
private final String separator;
private final StringCodec<T> codec;
/**
* @deprecated As of release 3.5.0, replaced by {@link #getInstance(String, StringCodec)}
*/
@Deprecated
public Collection2String(String separator, StringCodec<T> codec)
{
this.separator = separator;
this.codec = codec;
}
@Override
@SuppressWarnings("unchecked")
public Collection<T> fromString(String string)
{
if (string == null) {
return null;
}
if (string.isEmpty()) {
return Collections.EMPTY_LIST;
}
String[] parts = string.split(separator);
ArrayList<T> arrayList = new ArrayList<T>(parts.length);
for (String part : parts) {
arrayList.add(codec.fromString(part));
}
return arrayList;
}
@Override
public String toString(Collection<T> pojo)
{
if (pojo == null) {
return null;
}
if (pojo.isEmpty()) {
return "";
}
String[] parts = new String[pojo.size()];
int i = 0;
for (T o : pojo) {
parts[i++] = codec.toString(o);
}
return StringUtils.join(parts, separator);
}
private static final long serialVersionUID = 201401091806L;
}
class Enum2String<T extends Enum<T>> implements StringCodec<T>, Serializable
{
private final Class<T> clazz;
@SuppressWarnings("deprecation")
public static Enum2String getInstance(Class clazz)
{
return new Enum2String(clazz);
}
/**
* @deprecated As of release 3.5.0, replaced by {@link #getInstance(Class<T>)}
*/
@Deprecated
public Enum2String(Class<T> clazz)
{
this.clazz = clazz;
}
@Override
public T fromString(String string)
{
return Enum.valueOf(clazz, string);
}
@Override
public String toString(T pojo)
{
return pojo.name();
}
private static final long serialVersionUID = 201310181757L;
}
class Class2String<T> implements StringCodec<Class<? extends T>>, Serializable
{
@SuppressWarnings("deprecation")
private static final StringCodec instance = new Class2String<>();
public static <T> StringCodec<Class<? extends T>> getInstance()
{
return (StringCodec<Class<? extends T>>)instance;
}
/**
* @deprecated As of release 3.5.0, replaced by {@link #getInstance()}
*/
public Class2String()
{
}
@Override
@SuppressWarnings({"BroadCatchBlock", "TooBroadCatch"})
public Class<? extends T> fromString(String string)
{
try {
@SuppressWarnings({"rawtypes", "unchecked"})
Class<? extends T> clazz = (Class)Thread.currentThread().getContextClassLoader().loadClass(string);
return clazz;
} catch (Throwable cause) {
throw Throwables.propagate(cause);
}
}
@Override
public String toString(Class<? extends T> clazz)
{
return clazz.getCanonicalName();
}
private static final long serialVersionUID = 201312082053L;
}
class JsonStringCodec<T> implements StringCodec<T>, Serializable
{
private static final long serialVersionUID = 2513932518264776006L;
@SuppressWarnings("deprecation")
public static <T> StringCodec<T> getInstance(Class<T> clazz)
{
return new JsonStringCodec<>(clazz);
}
Class<?> clazz;
/**
* @deprecated As of release 3.5.0, replaced by {@link #getInstance(Class)}
*/
public JsonStringCodec(Class<T> clazz)
{
this.clazz = clazz;
}
@Override
public T fromString(String string)
{
try {
ObjectMapper mapper = new ObjectMapper();
mapper.configure(DeserializationConfig.Feature.FAIL_ON_UNKNOWN_PROPERTIES, false);
ObjectReader reader = mapper.reader(clazz);
return reader.readValue(string);
} catch (IOException e) {
throw Throwables.propagate(e);
}
}
@Override
public String toString(T pojo)
{
try {
ObjectMapper mapper = new ObjectMapper();
mapper.configure(DeserializationConfig.Feature.FAIL_ON_UNKNOWN_PROPERTIES, false);
ObjectWriter writer = mapper.writer();
return writer.writeValueAsString(pojo);
} catch (IOException e) {
throw Throwables.propagate(e);
}
}
}
}
| |
package org.opencds.cqf.cql.engine.execution;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.is;
import java.math.BigDecimal;
import java.time.format.DateTimeParseException;
import org.opencds.cqf.cql.engine.elm.execution.EquivalentEvaluator;
import org.opencds.cqf.cql.engine.exception.InvalidCast;
import org.opencds.cqf.cql.engine.runtime.Code;
import org.opencds.cqf.cql.engine.runtime.Concept;
import org.opencds.cqf.cql.engine.runtime.DateTime;
import org.opencds.cqf.cql.engine.runtime.Quantity;
import org.opencds.cqf.cql.engine.runtime.Ratio;
import org.opencds.cqf.cql.engine.runtime.Time;
import org.opencds.cqf.cql.engine.runtime.Tuple;
import org.testng.Assert;
import org.testng.annotations.Test;
public class CqlTypeOperatorsTest extends CqlExecutionTestBase {
/**
* {@link org.opencds.cqf.cql.engine.elm.execution.AsEvaluator#evaluate(Context)}
*/
@Test
public void testAs() {
Context context = new Context(library);
Object result = context.resolveExpressionRef("AsQuantity").getExpression().evaluate(context);
Assert.assertTrue(((Quantity) result).equal(new Quantity().withValue(new BigDecimal("45.5")).withUnit("g")));
result = context.resolveExpressionRef("CastAsQuantity").getExpression().evaluate(context);
Assert.assertTrue(((Quantity) result).equal(new Quantity().withValue(new BigDecimal("45.5")).withUnit("g")));
result = context.resolveExpressionRef("AsDateTime").getExpression().evaluate(context);
Assert.assertTrue(EquivalentEvaluator.equivalent(result, new DateTime(null, 2014, 1, 1)));
try {
result = context.as(1, Tuple.class, true);
Assert.fail();
}
catch (InvalidCast e) {
// pass
}
}
/**
* {@link org.opencds.cqf.cql.engine.elm.execution.ConvertEvaluator#evaluate(Context)}
*/
@Test
public void testConvert() {
Context context = new Context(library);
Object result = context.resolveExpressionRef("IntegerToDecimal").getExpression().evaluate(context);
assertThat(result, is(new BigDecimal(5)));
result = context.resolveExpressionRef("IntegerToString").getExpression().evaluate(context);
assertThat(result, is("5"));
try {
context.resolveExpressionRef("StringToIntegerError").getExpression().evaluate(context);
} catch (NumberFormatException nfe) {
assertThat(nfe.getMessage(), is("Unable to convert given string to Integer"));
}
result = context.resolveExpressionRef("StringToDateTime").getExpression().evaluate(context);
Assert.assertTrue(EquivalentEvaluator.equivalent(result, new DateTime(null, 2014, 1, 1)));
result = context.resolveExpressionRef("StringToTime").getExpression().evaluate(context);
Assert.assertTrue(EquivalentEvaluator.equivalent(result, new Time(14, 30, 0, 0)));
try {
context.resolveExpressionRef("StringToDateTimeMalformed").getExpression().evaluate(context);
} catch (DateTimeParseException iae) {
}
}
/**
* {@link org.opencds.cqf.cql.engine.elm.execution.ConvertQuantityEvaluator#evaluate(Context)}
*/
@Test
public void testConvertQuantity() {
Context context = new Context(library);
Object result = context.resolveExpressionRef("ConvertQuantity").getExpression().evaluate(context);
Assert.assertTrue(EquivalentEvaluator.equivalent(result, new Quantity().withValue(new BigDecimal("0.005")).withUnit("g")));
result = context.resolveExpressionRef("ConvertSyntax").getExpression().evaluate(context);
Assert.assertTrue(EquivalentEvaluator.equivalent(result, new Quantity().withValue(new BigDecimal("0.005")).withUnit("g")));
}
/**
* {@link org.opencds.cqf.cql.engine.elm.execution.ConvertsToBooleanEvaluator#evaluate(Context)}
*/
@Test
public void testConvertsToBoolean() {
Context context = new Context(library);
Object result = context.resolveExpressionRef("ConvertsToBooleanTrue").getExpression().evaluate(context);
Assert.assertTrue((Boolean) result);
result = context.resolveExpressionRef("ConvertsToBooleanFalse").getExpression().evaluate(context);
Assert.assertFalse((Boolean) result);
result = context.resolveExpressionRef("ConvertsToBooleanNull").getExpression().evaluate(context);
Assert.assertNull(result);
}
/**
* {@link org.opencds.cqf.cql.engine.elm.execution.ConvertsToDateEvaluator#evaluate(Context)}
*/
@Test
public void testConvertsToDate() {
Context context = new Context(library);
Object result = context.resolveExpressionRef("ConvertsToDateTrue").getExpression().evaluate(context);
Assert.assertTrue((Boolean) result);
result = context.resolveExpressionRef("ConvertsToDateFalse").getExpression().evaluate(context);
Assert.assertFalse((Boolean) result);
result = context.resolveExpressionRef("ConvertsToDateNull").getExpression().evaluate(context);
Assert.assertNull(result);
}
/**
* {@link org.opencds.cqf.cql.engine.elm.execution.ConvertsToDateTimeEvaluator#evaluate(Context)}
*/
@Test
public void testConvertsToDateTime() {
Context context = new Context(library);
Object result = context.resolveExpressionRef("ConvertsToDateTimeStringTrue").getExpression().evaluate(context);
Assert.assertTrue((Boolean) result);
result = context.resolveExpressionRef("ConvertsToDateTimeDateTrue").getExpression().evaluate(context);
Assert.assertTrue((Boolean) result);
result = context.resolveExpressionRef("ConvertsToDateTimeFalse").getExpression().evaluate(context);
Assert.assertFalse((Boolean) result);
result = context.resolveExpressionRef("ConvertsToDateTimeNull").getExpression().evaluate(context);
Assert.assertNull(result);
}
/**
* {@link org.opencds.cqf.cql.engine.elm.execution.ConvertsToDecimalEvaluator#evaluate(Context)}
*/
@Test
public void testConvertsToDecimal() {
Context context = new Context(library);
Object result = context.resolveExpressionRef("ConvertsToDecimalTrue").getExpression().evaluate(context);
Assert.assertTrue((Boolean) result);
result = context.resolveExpressionRef("ConvertsToDecimalFalse").getExpression().evaluate(context);
Assert.assertFalse((Boolean) result);
result = context.resolveExpressionRef("ConvertsToDecimalNull").getExpression().evaluate(context);
Assert.assertNull(result);
}
/**
* {@link org.opencds.cqf.cql.engine.elm.execution.ConvertsToIntegerEvaluator#evaluate(Context)}
*/
@Test
public void testConvertsToInteger() {
Context context = new Context(library);
Object result = context.resolveExpressionRef("ConvertsToIntegerTrue").getExpression().evaluate(context);
Assert.assertTrue((Boolean) result);
result = context.resolveExpressionRef("ConvertsToIntegerLong").getExpression().evaluate(context);
Assert.assertTrue((Boolean) result);
result = context.resolveExpressionRef("ConvertsToIntegerFalse").getExpression().evaluate(context);
Assert.assertFalse((Boolean) result);
result = context.resolveExpressionRef("ConvertsToIntegerNull").getExpression().evaluate(context);
Assert.assertNull(result);
}
/**
* {@link org.opencds.cqf.cql.engine.elm.execution.ConvertsToLongEvaluator#evaluate(Context)}
*/
@Test
public void testConvertsToLong() {
Context context = new Context(library);
Object result = context.resolveExpressionRef("ConvertsToLongTrue").getExpression().evaluate(context);
Assert.assertTrue((Boolean) result);
result = context.resolveExpressionRef("ConvertsToLongFalse").getExpression().evaluate(context);
Assert.assertFalse((Boolean) result);
result = context.resolveExpressionRef("ConvertsToLongNull").getExpression().evaluate(context);
Assert.assertNull(result);
}
/**
* {@link org.opencds.cqf.cql.engine.elm.execution.ConvertsToQuantityEvaluator#evaluate(Context)}
*/
@Test
public void testConvertsToQuantity() {
Context context = new Context(library);
Object result = context.resolveExpressionRef("ConvertsToQuantityStringTrue").getExpression().evaluate(context);
Assert.assertTrue((Boolean) result);
result = context.resolveExpressionRef("ConvertsToQuantityStringFalse").getExpression().evaluate(context);
Assert.assertFalse((Boolean) result);
result = context.resolveExpressionRef("ConvertsToQuantityIntegerTrue").getExpression().evaluate(context);
Assert.assertTrue((Boolean) result);
result = context.resolveExpressionRef("ConvertsToQuantityDecimalTrue").getExpression().evaluate(context);
Assert.assertTrue((Boolean) result);
result = context.resolveExpressionRef("ConvertsToQuantityRatioTrue").getExpression().evaluate(context);
Assert.assertTrue((Boolean) result);
result = context.resolveExpressionRef("ConvertsToQuantityNull").getExpression().evaluate(context);
Assert.assertNull(result);
}
/**
* {@link org.opencds.cqf.cql.engine.elm.execution.ConvertsToStringEvaluator#evaluate(Context)}
*/
@Test
public void testConvertsToString() {
Context context = new Context(library);
Object result = context.resolveExpressionRef("ConvertsToStringBoolean").getExpression().evaluate(context);
Assert.assertTrue((Boolean) result);
result = context.resolveExpressionRef("ConvertsToStringInteger").getExpression().evaluate(context);
Assert.assertTrue((Boolean) result);
result = context.resolveExpressionRef("ConvertsToStringLong").getExpression().evaluate(context);
Assert.assertTrue((Boolean) result);
result = context.resolveExpressionRef("ConvertsToStringDecimal").getExpression().evaluate(context);
Assert.assertTrue((Boolean) result);
result = context.resolveExpressionRef("ConvertsToStringQuantity").getExpression().evaluate(context);
Assert.assertTrue((Boolean) result);
result = context.resolveExpressionRef("ConvertsToStringRatio").getExpression().evaluate(context);
Assert.assertTrue((Boolean) result);
result = context.resolveExpressionRef("ConvertsToStringDate").getExpression().evaluate(context);
Assert.assertTrue((Boolean) result);
result = context.resolveExpressionRef("ConvertsToStringDateTime").getExpression().evaluate(context);
Assert.assertTrue((Boolean) result);
result = context.resolveExpressionRef("ConvertsToStringTime").getExpression().evaluate(context);
Assert.assertTrue((Boolean) result);
result = context.resolveExpressionRef("ConvertsToStringNull").getExpression().evaluate(context);
Assert.assertNull(result);
}
/**
* {@link org.opencds.cqf.cql.engine.elm.execution.ConvertsToTimeEvaluator#evaluate(Context)}
*/
@Test
public void testConvertsToTime() {
Context context = new Context(library);
Object result = context.resolveExpressionRef("ConvertsToTimeTrue").getExpression().evaluate(context);
Assert.assertTrue((Boolean) result);
result = context.resolveExpressionRef("ConvertsToTimeFalse").getExpression().evaluate(context);
Assert.assertFalse((Boolean) result);
result = context.resolveExpressionRef("ConvertsToTimeNull").getExpression().evaluate(context);
Assert.assertNull(result);
}
/**
* {@link org.opencds.cqf.cql.engine.elm.execution.IsEvaluator#evaluate(Context)}
*/
@Test
public void testIs() {
Context context = new Context(library);
Object result = context.resolveExpressionRef("IntegerIsInteger").getExpression().evaluate(context);
assertThat(result, is(true));
result = context.resolveExpressionRef("StringIsInteger").getExpression().evaluate(context);
assertThat(result, is(false));
}
/**
* {@link org.opencds.cqf.cql.engine.elm.execution.ToBooleanEvaluator#evaluate(Context)}
*/
@Test
public void testToBoolean() {
Context context = new Context(library);
Object result = context.resolveExpressionRef("StringNoToBoolean").getExpression().evaluate(context);
assertThat(result, is(false));
}
/**
* {@link org.opencds.cqf.cql.engine.elm.execution.ToConceptEvaluator#evaluate(Context)}
*/
@Test
public void testToConcept() {
Context context = new Context(library);
Object result = context.resolveExpressionRef("CodeToConcept1").getExpression().evaluate(context);
Assert.assertTrue(((Concept) result).equivalent(new Concept().withCode(new Code().withCode("8480-6"))));
}
/**
* {@link org.opencds.cqf.cql.engine.elm.execution.ToDateTimeEvaluator#evaluate(Context)}
*/
@Test
public void testToDateTime() {
// TODO: Fix timezone tests
Context context = new Context(library);
Object result = context.resolveExpressionRef("ToDateTime0").getExpression().evaluate(context);
Assert.assertTrue(EquivalentEvaluator.equivalent(result, new DateTime(null, 2014, 1)));
result = context.resolveExpressionRef("ToDateTime1").getExpression().evaluate(context);
Assert.assertTrue(EquivalentEvaluator.equivalent(result, new DateTime(null, 2014, 1, 1)));
// assertThat(((DateTime)result).getTimezoneOffset(), is(new BigDecimal("-7")));
result = context.resolveExpressionRef("ToDateTime2").getExpression().evaluate(context);
Assert.assertTrue(EquivalentEvaluator.equivalent(result, new DateTime(null, 2014, 1, 1, 12, 5)));
// assertThat(((DateTime)result).getTimezoneOffset(), is(new BigDecimal("-7")));
result = context.resolveExpressionRef("ToDateTime3").getExpression().evaluate(context);
Assert.assertTrue(EquivalentEvaluator.equivalent(result, new DateTime(null, 2014, 1, 1, 12, 5, 5, 955)));
// assertThat(((DateTime)result).getTimezoneOffset(), is(new BigDecimal("-7")));
result = context.resolveExpressionRef("ToDateTime4").getExpression().evaluate(context);
Assert.assertTrue(EquivalentEvaluator.equivalent(result, new DateTime(new BigDecimal("1.5"), 2014, 1, 1, 12, 5, 5, 955)));
// assertThat(((DateTime)result).getTimezoneOffset(), is(new BigDecimal("1.5")));
result = context.resolveExpressionRef("ToDateTime5").getExpression().evaluate(context);
Assert.assertTrue(EquivalentEvaluator.equivalent(result, new DateTime(new BigDecimal("-1.25"), 2014, 1, 1, 12, 5, 5, 955)));
// assertThat(((DateTime)result).getTimezoneOffset(), is(new BigDecimal("-1.25")));
result = context.resolveExpressionRef("ToDateTime6").getExpression().evaluate(context);
Assert.assertTrue(EquivalentEvaluator.equivalent(result, new DateTime(new BigDecimal(0), 2014, 1, 1, 12, 5, 5, 955)));
// assertThat(((DateTime)result).getTimezoneOffset(), is(new BigDecimal("-7")));
result = context.resolveExpressionRef("ToDateTimeMalformed").getExpression().evaluate(context);
Assert.assertNull(result);
}
/**
* {@link org.opencds.cqf.cql.engine.elm.execution.ToDecimalEvaluator#evaluate(Context)}
*/
@Test
public void testToDecimal() {
Context context = new Context(library);
Object result = context.resolveExpressionRef("String25D5ToDecimal").getExpression().evaluate(context);
assertThat(result, is(new BigDecimal("25.5")));
}
/**
* {@link org.opencds.cqf.cql.engine.elm.execution.ToIntegerEvaluator#evaluate(Context)}
*/
@Test
public void testToInteger() {
Context context = new Context(library);
Object result = context.resolveExpressionRef("StringNeg25ToInteger").getExpression().evaluate(context);
assertThat(result, is(-25));
}
/**
* {@link org.opencds.cqf.cql.engine.elm.execution.ToLongEvaluator#evaluate(Context)}
*/
@Test
public void testToLong() {
Context context = new Context(library);
Object result = context.resolveExpressionRef("String123ToLong").getExpression().evaluate(context);
assertThat(result, is(new Long(123)));
}
/**
* {@link org.opencds.cqf.cql.engine.elm.execution.ToQuantityEvaluator#evaluate(Context)}
*/
@Test
public void testToQuantity() {
Context context = new Context(library);
Object result = context.resolveExpressionRef("String5D5CMToQuantity").getExpression().evaluate(context);
Assert.assertTrue(((Quantity) result).equal(new Quantity().withValue(new BigDecimal("5.5")).withUnit("cm")));
result = context.resolveExpressionRef("StringInvalidToQuantityNull").getExpression().evaluate(context);
Assert.assertNull(result);
result = context.resolveExpressionRef("String100PerMinPerSqMeterToQuantity").getExpression().evaluate(context);
Assert.assertTrue(((Quantity) result).equal(new Quantity().withValue(new BigDecimal("100")).withUnit("daL/min/m2")));
result = context.resolveExpressionRef("String100UnitPer10BillionToQuantity").getExpression().evaluate(context);
Assert.assertTrue(((Quantity) result).equal(new Quantity().withValue(new BigDecimal("100")).withUnit("U/10*10{cells}")));
result = context.resolveExpressionRef("String60DayPer7DayToQuantity").getExpression().evaluate(context);
Assert.assertTrue(((Quantity) result).equal(new Quantity().withValue(new BigDecimal("60")).withUnit("d/(7.d)")));
result = context.resolveExpressionRef("String60EhrlichPer100gmToQuantity").getExpression().evaluate(context);
Assert.assertTrue(((Quantity) result).equal(new Quantity().withValue(new BigDecimal("60")).withUnit("{EhrlichU}/100.g")));
result = context.resolveExpressionRef("StringPercentToQuantity").getExpression().evaluate(context);
Assert.assertTrue(((Quantity) result).equal(new Quantity().withValue(new BigDecimal("60")).withUnit("%")));
result = context.resolveExpressionRef("StringPercentWithoutQuoteToQuantity").getExpression().evaluate(context);
Assert.assertTrue(((Quantity) result).equal(new Quantity().withValue(new BigDecimal("70")).withUnit("%")));
result = context.resolveExpressionRef("StringPercentWithTabToQuantity").getExpression().evaluate(context);
Assert.assertTrue(((Quantity) result).equal(new Quantity().withValue(new BigDecimal("80")).withUnit("%")));
result = context.resolveExpressionRef("StringPercentWithMultiSpacesToQuantity").getExpression().evaluate(context);
Assert.assertTrue(((Quantity) result).equal(new Quantity().withValue(new BigDecimal("90")).withUnit("%")));
result = context.resolveExpressionRef("StringPercentWithSpacesUnitToQuantity").getExpression().evaluate(context);
Assert.assertTrue(((Quantity) result).equal(new Quantity().withValue(new BigDecimal("10")).withUnit("ml")));
result = context.resolveExpressionRef("StringPercentWithQuoteUnitToQuantity").getExpression().evaluate(context);
Assert.assertTrue(((Quantity) result).equal(new Quantity().withValue(new BigDecimal("20")).withUnit("ml")));
}
/**
* {@link org.opencds.cqf.cql.engine.elm.execution.ToRatioEvaluator#evaluate(Context)}
*/
@Test
public void testToRatio() {
Context context = new Context(library);
Object result = context.resolveExpressionRef("ToRatioIsValid").getExpression().evaluate(context);
Assert.assertTrue(((Ratio) result).getNumerator().equal(new Quantity().withValue(new BigDecimal("1.0")).withUnit("mg")));
Assert.assertTrue(((Ratio) result).getDenominator().equal(new Quantity().withValue(new BigDecimal("2.0")).withUnit("mg")));
result = context.resolveExpressionRef("ToRatioIsNull").getExpression().evaluate(context);
Assert.assertNull(result);
}
/**
* {@link org.opencds.cqf.cql.engine.elm.execution.ToStringEvaluator#evaluate(Context)}
*/
@Test
public void testToString() {
Context context = new Context(library);
Object result = context.resolveExpressionRef("IntegerNeg5ToString").getExpression().evaluate(context);
assertThat(result, is("-5"));
result = context.resolveExpressionRef("LongNeg5ToString").getExpression().evaluate(context);
assertThat(result, is("-5"));
result = context.resolveExpressionRef("Decimal18D55ToString").getExpression().evaluate(context);
assertThat(result, is("18.55"));
result = context.resolveExpressionRef("Quantity5D5CMToString").getExpression().evaluate(context);
assertThat(result, is("5.5 'cm'"));
result = context.resolveExpressionRef("BooleanTrueToString").getExpression().evaluate(context);
assertThat(result, is("true"));
}
/**
* {@link org.opencds.cqf.cql.engine.elm.execution.ToTimeEvaluator#evaluate(Context)}
*/
@Test
public void testToTime() {
Context context = new Context(library);
Object result = context.resolveExpressionRef("ToTime1").getExpression().evaluate(context);
Assert.assertTrue(EquivalentEvaluator.equivalent(result, new Time(14, 30, 0, 0)));
result = context.resolveExpressionRef("ToTimeMalformed").getExpression().evaluate(context);
Assert.assertNull(result);
}
}
| |
/*
* The MIT License
*
* Copyright (c) 2004-2009, Sun Microsystems, Inc., Kohsuke Kawaguchi
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package hudson;
import com.gargoylesoftware.htmlunit.html.HtmlForm;
import com.gargoylesoftware.htmlunit.html.HtmlPage;
import hudson.PluginManager.UberClassLoader;
import hudson.model.Hudson;
import hudson.model.UpdateCenter;
import hudson.model.UpdateCenter.UpdateCenterJob;
import hudson.model.UpdateSite;
import hudson.scm.SubversionSCM;
import hudson.util.FormValidation;
import hudson.util.PersistedList;
import java.io.File;
import java.lang.reflect.Method;
import java.net.URL;
import java.net.URLClassLoader;
import java.util.Collections;
import java.util.List;
import java.util.concurrent.Future;
import jenkins.RestartRequiredException;
import org.apache.commons.io.FileUtils;
import org.apache.tools.ant.filters.StringInputStream;
import static org.junit.Assert.*;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.TemporaryFolder;
import org.jvnet.hudson.test.Bug;
import org.jvnet.hudson.test.JenkinsRule;
import org.jvnet.hudson.test.Url;
import org.jvnet.hudson.test.recipes.WithPlugin;
import org.jvnet.hudson.test.recipes.WithPluginManager;
/**
* @author Kohsuke Kawaguchi
*/
public class PluginManagerTest {
@Rule public JenkinsRule r = new JenkinsRule() {
@Override public void before() throws Throwable {
setPluginManager(null);
super.before();
}
};
@Rule public TemporaryFolder tmp = new TemporaryFolder();
/**
* Manual submission form.
*/
@Test public void uploadJpi() throws Exception {
HtmlPage page = r.createWebClient().goTo("pluginManager/advanced");
HtmlForm f = page.getFormByName("uploadPlugin");
File dir = tmp.newFolder();
File plugin = new File(dir, "tasks.jpi");
FileUtils.copyURLToFile(getClass().getClassLoader().getResource("plugins/tasks.jpi"),plugin);
f.getInputByName("name").setValueAttribute(plugin.getAbsolutePath());
r.submit(f);
assertTrue( new File(r.jenkins.getRootDir(),"plugins/tasks.jpi").exists() );
}
/**
* Manual submission form.
*/
@Test public void uploadHpi() throws Exception {
HtmlPage page = r.createWebClient().goTo("pluginManager/advanced");
HtmlForm f = page.getFormByName("uploadPlugin");
File dir = tmp.newFolder();
File plugin = new File(dir, "legacy.hpi");
FileUtils.copyURLToFile(getClass().getClassLoader().getResource("plugins/legacy.hpi"),plugin);
f.getInputByName("name").setValueAttribute(plugin.getAbsolutePath());
r.submit(f);
// uploaded legacy plugins get renamed to *.jpi
assertTrue( new File(r.jenkins.getRootDir(),"plugins/legacy.jpi").exists() );
}
/**
* Tests the effect of {@link WithPlugin}.
*/
@WithPlugin("tasks.jpi")
@Test public void withRecipeJpi() throws Exception {
assertNotNull(r.jenkins.getPlugin("tasks"));
}
/**
* Tests the effect of {@link WithPlugin}.
*/
@WithPlugin("legacy.hpi")
@Test public void withRecipeHpi() throws Exception {
assertNotNull(r.jenkins.getPlugin("legacy"));
}
/**
* Makes sure that plugins can see Maven2 plugin that's refactored out in 1.296.
*/
@WithPlugin("tasks.jpi")
@Test public void optionalMavenDependency() throws Exception {
PluginWrapper.Dependency m2=null;
PluginWrapper tasks = r.jenkins.getPluginManager().getPlugin("tasks");
for( PluginWrapper.Dependency d : tasks.getOptionalDependencies() ) {
if(d.shortName.equals("maven-plugin")) {
assertNull(m2);
m2 = d;
}
}
assertNotNull(m2);
// this actually doesn't really test what we need, though, because
// I thought test harness is loading the maven classes by itself.
// TODO: write a separate test that tests the optional dependency loading
tasks.classLoader.loadClass(hudson.maven.agent.AbortException.class.getName());
}
/**
* Verifies that by the time {@link Plugin#start()} is called, uber classloader is fully functioning.
* This is necessary as plugin start method can engage in XStream loading activities, and they should
* resolve all the classes in the system (for example, a plugin X can define an extension point
* other plugins implement, so when X loads its config it better sees all the implementations defined elsewhere)
*/
@WithPlugin("tasks.jpi")
@WithPluginManager(PluginManagerImpl_for_testUberClassLoaderIsAvailableDuringStart.class)
@Test public void uberClassLoaderIsAvailableDuringStart() {
assertTrue(((PluginManagerImpl_for_testUberClassLoaderIsAvailableDuringStart) r.jenkins.pluginManager).tested);
}
public static class PluginManagerImpl_for_testUberClassLoaderIsAvailableDuringStart extends LocalPluginManager {
boolean tested;
public PluginManagerImpl_for_testUberClassLoaderIsAvailableDuringStart(File rootDir) {
super(rootDir);
}
@Override
protected PluginStrategy createPluginStrategy() {
return new ClassicPluginStrategy(this) {
@Override
public void startPlugin(PluginWrapper plugin) throws Exception {
tested = true;
// plugins should be already visible in the UberClassLoader
assertTrue(!activePlugins.isEmpty());
uberClassLoader.loadClass(SubversionSCM.class.getName());
uberClassLoader.loadClass("hudson.plugins.tasks.Messages");
super.startPlugin(plugin);
}
};
}
}
/**
* Makes sure that thread context classloader isn't used by {@link UberClassLoader}, or else
* infinite cycle ensues.
*/
@Url("http://jenkins.361315.n4.nabble.com/channel-example-and-plugin-classes-gives-ClassNotFoundException-td3756092.html")
@Test public void uberClassLoaderDoesntUseContextClassLoader() throws Exception {
Thread t = Thread.currentThread();
URLClassLoader ucl = new URLClassLoader(new URL[0], r.jenkins.pluginManager.uberClassLoader);
ClassLoader old = t.getContextClassLoader();
t.setContextClassLoader(ucl);
try {
try {
ucl.loadClass("No such class");
fail();
} catch (ClassNotFoundException e) {
// as expected
}
ucl.loadClass(Hudson.class.getName());
} finally {
t.setContextClassLoader(old);
}
}
@Test public void installWithoutRestart() throws Exception {
URL res = getClass().getClassLoader().getResource("plugins/htmlpublisher.jpi");
File f = new File(r.jenkins.getRootDir(), "plugins/htmlpublisher.jpi");
FileUtils.copyURLToFile(res, f);
r.jenkins.pluginManager.dynamicLoad(f);
Class c = r.jenkins.getPluginManager().uberClassLoader.loadClass("htmlpublisher.HtmlPublisher$DescriptorImpl");
assertNotNull(r.jenkins.getDescriptorByType(c));
}
@Test public void prevalidateConfig() throws Exception {
PersistedList<UpdateSite> sites = r.jenkins.getUpdateCenter().getSites();
sites.clear();
URL url = PluginManagerTest.class.getResource("/plugins/tasks-update-center.json");
UpdateSite site = new UpdateSite(UpdateCenter.ID_DEFAULT, url.toString());
sites.add(site);
assertEquals(FormValidation.ok(), site.updateDirectly(false).get());
assertNotNull(site.getData());
assertEquals(Collections.emptyList(), r.jenkins.getPluginManager().prevalidateConfig(new StringInputStream("<whatever><runant plugin=\"ant@1.1\"/></whatever>")));
assertNull(r.jenkins.getPluginManager().getPlugin("tasks"));
List<Future<UpdateCenterJob>> jobs = r.jenkins.getPluginManager().prevalidateConfig(new StringInputStream("<whatever><tasks plugin=\"tasks@2.23\"/></whatever>"));
assertEquals(1, jobs.size());
UpdateCenterJob job = jobs.get(0).get(); // blocks for completion
assertEquals("InstallationJob", job.getType());
UpdateCenter.InstallationJob ijob = (UpdateCenter.InstallationJob) job;
assertEquals("tasks", ijob.plugin.name);
assertNotNull(r.jenkins.getPluginManager().getPlugin("tasks"));
// TODO restart scheduled (SuccessButRequiresRestart) after upgrade or Support-Dynamic-Loading: false
// TODO dependencies installed or upgraded too
// TODO required plugin installed but inactive
}
// plugin "depender" optionally depends on plugin "dependee".
// they are written like this:
// org.jenkinsci.plugins.dependencytest.dependee:
// public class Dependee {
// public static String getValue() {
// return "dependee";
// }
// }
//
// public abstract class DependeeExtensionPoint implements ExtensionPoint {
// }
//
// org.jenkinsci.plugins.dependencytest.depender:
// public class Depender {
// public static String getValue() {
// if (Jenkins.getInstance().getPlugin("dependee") != null) {
// return Dependee.getValue();
// }
// return "depender";
// }
// }
//
// @Extension(optional=true)
// public class DependerExtension extends DependeeExtensionPoint {
// }
/**
* call org.jenkinsci.plugins.dependencytest.depender.Depender.getValue().
*
* @return
* @throws Exception
*/
private String callDependerValue() throws Exception {
Class<?> c = r.jenkins.getPluginManager().uberClassLoader.loadClass("org.jenkinsci.plugins.dependencytest.depender.Depender");
Method m = c.getMethod("getValue");
return (String)m.invoke(null);
}
/**
* Load "dependee" and then load "depender".
* Asserts that "depender" can access to "dependee".
*
* @throws Exception
*/
@Test public void installDependingPluginWithoutRestart() throws Exception {
// Load dependee.
{
String target = "dependee.hpi";
URL src = getClass().getClassLoader().getResource(String.format("plugins/%s", target));
File dest = new File(r.jenkins.getRootDir(), String.format("plugins/%s", target));
FileUtils.copyURLToFile(src, dest);
r.jenkins.pluginManager.dynamicLoad(dest);
}
// before load depender, of course failed to call Depender.getValue()
try {
callDependerValue();
fail();
} catch (ClassNotFoundException _) {
}
// No extensions exist.
assertTrue(r.jenkins.getExtensionList("org.jenkinsci.plugins.dependencytest.dependee.DependeeExtensionPoint").isEmpty());
// Load depender.
{
String target = "depender.hpi";
URL src = getClass().getClassLoader().getResource(String.format("plugins/%s", target));
File dest = new File(r.jenkins.getRootDir(), String.format("plugins/%s", target));
FileUtils.copyURLToFile(src, dest);
r.jenkins.pluginManager.dynamicLoad(dest);
}
// depender successfully accesses to dependee.
assertEquals("dependee", callDependerValue());
// Extension in depender is loaded.
assertFalse(r.jenkins.getExtensionList("org.jenkinsci.plugins.dependencytest.dependee.DependeeExtensionPoint").isEmpty());
}
/**
* Load "depender" and then load "dependee".
* Asserts that "depender" can access to "dependee".
*
* @throws Exception
*/
@Bug(19976)
@Test public void installDependedPluginWithoutRestart() throws Exception {
// Load depender.
{
String target = "depender.hpi";
URL src = getClass().getClassLoader().getResource(String.format("plugins/%s", target));
File dest = new File(r.jenkins.getRootDir(), String.format("plugins/%s", target));
FileUtils.copyURLToFile(src, dest);
r.jenkins.pluginManager.dynamicLoad(dest);
}
// before load dependee, depender does not access to dependee.
assertEquals("depender", callDependerValue());
// before load dependee, of course failed to list extensions for dependee.
try {
r.jenkins.getExtensionList("org.jenkinsci.plugins.dependencytest.dependee.DependeeExtensionPoint");
fail();
} catch( ClassNotFoundException _ ){
}
// Load dependee.
{
String target = "dependee.hpi";
URL src = getClass().getClassLoader().getResource(String.format("plugins/%s", target));
File dest = new File(r.jenkins.getRootDir(), String.format("plugins/%s", target));
FileUtils.copyURLToFile(src, dest);
r.jenkins.pluginManager.dynamicLoad(dest);
}
// (MUST) Not throws an exception
// (SHOULD) depender successfully accesses to dependee.
assertEquals("dependee", callDependerValue());
// No extensions exist.
// extensions in depender is not loaded.
assertTrue(r.jenkins.getExtensionList("org.jenkinsci.plugins.dependencytest.dependee.DependeeExtensionPoint").isEmpty());
}
@Bug(12753)
@WithPlugin("tasks.jpi")
@Test public void dynamicLoadRestartRequiredException() throws Exception {
File jpi = new File(r.jenkins.getRootDir(), "plugins/tasks.jpi");
assertTrue(jpi.isFile());
FileUtils.touch(jpi);
File timestamp = new File(r.jenkins.getRootDir(), "plugins/tasks/.timestamp2");
assertTrue(timestamp.isFile());
long lastMod = timestamp.lastModified();
try {
r.jenkins.getPluginManager().dynamicLoad(jpi);
fail("should not have worked");
} catch (RestartRequiredException x) {
// good
}
assertEquals("should not have tried to delete & unpack", lastMod, timestamp.lastModified());
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.yarn.server;
import java.io.File;
import java.io.IOException;
import java.net.InetAddress;
import java.net.UnknownHostException;
import java.util.Collection;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileContext;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.ha.HAServiceProtocol;
import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem;
import org.apache.hadoop.service.AbstractService;
import org.apache.hadoop.service.CompositeService;
import org.apache.hadoop.util.Shell;
import org.apache.hadoop.util.Shell.ShellCommandExecutor;
import org.apache.hadoop.yarn.api.protocolrecords.GetClusterMetricsRequest;
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
import org.apache.hadoop.yarn.conf.HAUtil;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.event.Dispatcher;
import org.apache.hadoop.yarn.event.EventHandler;
import org.apache.hadoop.yarn.exceptions.YarnException;
import org.apache.hadoop.yarn.exceptions.YarnRuntimeException;
import org.apache.hadoop.yarn.factories.RecordFactory;
import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider;
import org.apache.hadoop.yarn.server.api.ResourceTracker;
import org.apache.hadoop.yarn.server.api.protocolrecords.NodeHeartbeatRequest;
import org.apache.hadoop.yarn.server.api.protocolrecords.NodeHeartbeatResponse;
import org.apache.hadoop.yarn.server.api.protocolrecords.RegisterNodeManagerRequest;
import org.apache.hadoop.yarn.server.api.protocolrecords.RegisterNodeManagerResponse;
import org.apache.hadoop.yarn.server.applicationhistoryservice.ApplicationHistoryServer;
import org.apache.hadoop.yarn.server.applicationhistoryservice.ApplicationHistoryStore;
import org.apache.hadoop.yarn.server.applicationhistoryservice.MemoryApplicationHistoryStore;
import org.apache.hadoop.yarn.server.applicationhistoryservice.webapp.AHSWebApp;
import org.apache.hadoop.yarn.server.nodemanager.Context;
import org.apache.hadoop.yarn.server.nodemanager.NodeHealthCheckerService;
import org.apache.hadoop.yarn.server.nodemanager.NodeManager;
import org.apache.hadoop.yarn.server.nodemanager.NodeStatusUpdater;
import org.apache.hadoop.yarn.server.nodemanager.NodeStatusUpdaterImpl;
import org.apache.hadoop.yarn.server.resourcemanager.ResourceManager;
import org.apache.hadoop.yarn.server.resourcemanager.ResourceTrackerService;
import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.RMAppAttemptEvent;
import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.RMAppAttemptEventType;
import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.event.RMAppAttemptRegistrationEvent;
import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.event.RMAppAttemptUnregistrationEvent;
import org.apache.hadoop.yarn.server.timeline.MemoryTimelineStore;
import org.apache.hadoop.yarn.server.timeline.TimelineStore;
import org.apache.hadoop.yarn.webapp.util.WebAppUtils;
import com.google.common.annotations.VisibleForTesting;
/**
* Embedded Yarn minicluster for testcases that need to interact with a cluster.
* <p/>
* In a real cluster, resource request matching is done using the hostname, and
* by default Yarn minicluster works in the exact same way as a real cluster.
* <p/>
* If a testcase needs to use multiple nodes and exercise resource request
* matching to a specific node, then the property
* {@YarnConfiguration.RM_SCHEDULER_INCLUDE_PORT_IN_NODE_NAME} should be set
* <code>true</code> in the configuration used to initialize the minicluster.
* <p/>
* With this property set to <code>true</code>, the matching will be done using
* the <code>hostname:port</code> of the namenodes. In such case, the AM must
* do resource request using <code>hostname:port</code> as the location.
*/
@InterfaceAudience.Public
@InterfaceStability.Evolving
public class MiniYARNCluster extends CompositeService {
private static final Log LOG = LogFactory.getLog(MiniYARNCluster.class);
// temp fix until metrics system can auto-detect itself running in unit test:
static {
DefaultMetricsSystem.setMiniClusterMode(true);
}
private NodeManager[] nodeManagers;
private ResourceManager[] resourceManagers;
private String[] rmIds;
private ApplicationHistoryServer appHistoryServer;
private boolean useFixedPorts;
private boolean useRpc = false;
private int failoverTimeout;
private ConcurrentMap<ApplicationAttemptId, Long> appMasters =
new ConcurrentHashMap<ApplicationAttemptId, Long>(16, 0.75f, 2);
private File testWorkDir;
// Number of nm-local-dirs per nodemanager
private int numLocalDirs;
// Number of nm-log-dirs per nodemanager
private int numLogDirs;
private boolean enableAHS;
/**
* @param testName name of the test
* @param numResourceManagers the number of resource managers in the cluster
* @param numNodeManagers the number of node managers in the cluster
* @param numLocalDirs the number of nm-local-dirs per nodemanager
* @param numLogDirs the number of nm-log-dirs per nodemanager
* @param enableAHS enable ApplicationHistoryServer or not
*/
public MiniYARNCluster(
String testName, int numResourceManagers, int numNodeManagers,
int numLocalDirs, int numLogDirs, boolean enableAHS) {
super(testName.replace("$", ""));
this.numLocalDirs = numLocalDirs;
this.numLogDirs = numLogDirs;
this.enableAHS = enableAHS;
String testSubDir = testName.replace("$", "");
File targetWorkDir = new File("target", testSubDir);
try {
FileContext.getLocalFSFileContext().delete(
new Path(targetWorkDir.getAbsolutePath()), true);
} catch (Exception e) {
LOG.warn("COULD NOT CLEANUP", e);
throw new YarnRuntimeException("could not cleanup test dir: "+ e, e);
}
if (Shell.WINDOWS) {
// The test working directory can exceed the maximum path length supported
// by some Windows APIs and cmd.exe (260 characters). To work around this,
// create a symlink in temporary storage with a much shorter path,
// targeting the full path to the test working directory. Then, use the
// symlink as the test working directory.
String targetPath = targetWorkDir.getAbsolutePath();
File link = new File(System.getProperty("java.io.tmpdir"),
String.valueOf(System.currentTimeMillis()));
String linkPath = link.getAbsolutePath();
try {
FileContext.getLocalFSFileContext().delete(new Path(linkPath), true);
} catch (IOException e) {
throw new YarnRuntimeException("could not cleanup symlink: " + linkPath, e);
}
// Guarantee target exists before creating symlink.
targetWorkDir.mkdirs();
ShellCommandExecutor shexec = new ShellCommandExecutor(
Shell.getSymlinkCommand(targetPath, linkPath));
try {
shexec.execute();
} catch (IOException e) {
throw new YarnRuntimeException(String.format(
"failed to create symlink from %s to %s, shell output: %s", linkPath,
targetPath, shexec.getOutput()), e);
}
this.testWorkDir = link;
} else {
this.testWorkDir = targetWorkDir;
}
resourceManagers = new ResourceManager[numResourceManagers];
nodeManagers = new NodeManager[numNodeManagers];
}
/**
* @param testName name of the test
* @param numResourceManagers the number of resource managers in the cluster
* @param numNodeManagers the number of node managers in the cluster
* @param numLocalDirs the number of nm-local-dirs per nodemanager
* @param numLogDirs the number of nm-log-dirs per nodemanager
*/
public MiniYARNCluster(
String testName, int numResourceManagers, int numNodeManagers,
int numLocalDirs, int numLogDirs) {
this(testName, numResourceManagers, numNodeManagers, numLocalDirs,
numLogDirs, false);
}
/**
* @param testName name of the test
* @param numNodeManagers the number of node managers in the cluster
* @param numLocalDirs the number of nm-local-dirs per nodemanager
* @param numLogDirs the number of nm-log-dirs per nodemanager
*/
public MiniYARNCluster(String testName, int numNodeManagers,
int numLocalDirs, int numLogDirs) {
this(testName, 1, numNodeManagers, numLocalDirs, numLogDirs);
}
@Override
public void serviceInit(Configuration conf) throws Exception {
useFixedPorts = conf.getBoolean(
YarnConfiguration.YARN_MINICLUSTER_FIXED_PORTS,
YarnConfiguration.DEFAULT_YARN_MINICLUSTER_FIXED_PORTS);
useRpc = conf.getBoolean(YarnConfiguration.YARN_MINICLUSTER_USE_RPC,
YarnConfiguration.DEFAULT_YARN_MINICLUSTER_USE_RPC);
failoverTimeout = conf.getInt(YarnConfiguration.RM_ZK_TIMEOUT_MS,
YarnConfiguration.DEFAULT_RM_ZK_TIMEOUT_MS);
if (useRpc && !useFixedPorts) {
throw new YarnRuntimeException("Invalid configuration!" +
" Minicluster can use rpc only when configured to use fixed ports");
}
conf.setBoolean(YarnConfiguration.IS_MINI_YARN_CLUSTER, true);
if (resourceManagers.length > 1) {
conf.setBoolean(YarnConfiguration.RM_HA_ENABLED, true);
if (conf.get(YarnConfiguration.RM_HA_IDS) == null) {
StringBuilder rmIds = new StringBuilder();
for (int i = 0; i < resourceManagers.length; i++) {
if (i != 0) {
rmIds.append(",");
}
rmIds.append("rm" + i);
}
conf.set(YarnConfiguration.RM_HA_IDS, rmIds.toString());
}
Collection<String> rmIdsCollection = HAUtil.getRMHAIds(conf);
rmIds = rmIdsCollection.toArray(new String[rmIdsCollection.size()]);
}
for (int i = 0; i < resourceManagers.length; i++) {
resourceManagers[i] = createResourceManager();
if (!useFixedPorts) {
if (HAUtil.isHAEnabled(conf)) {
setHARMConfiguration(i, conf);
} else {
setNonHARMConfiguration(conf);
}
}
addService(new ResourceManagerWrapper(i));
}
for(int index = 0; index < nodeManagers.length; index++) {
nodeManagers[index] =
useRpc ? new CustomNodeManager() : new ShortCircuitedNodeManager();
addService(new NodeManagerWrapper(index));
}
if (enableAHS) {
addService(new ApplicationHistoryServerWrapper());
}
super.serviceInit(
conf instanceof YarnConfiguration ? conf : new YarnConfiguration(conf));
}
private void setNonHARMConfiguration(Configuration conf) {
String hostname = MiniYARNCluster.getHostname();
conf.set(YarnConfiguration.RM_ADDRESS, hostname + ":0");
conf.set(YarnConfiguration.RM_ADMIN_ADDRESS, hostname + ":0");
conf.set(YarnConfiguration.RM_SCHEDULER_ADDRESS, hostname + ":0");
conf.set(YarnConfiguration.RM_RESOURCE_TRACKER_ADDRESS, hostname + ":0");
WebAppUtils.setRMWebAppHostnameAndPort(conf, hostname, 0);
}
private void setHARMConfiguration(final int index, Configuration conf) {
String hostname = MiniYARNCluster.getHostname();
for (String confKey : YarnConfiguration.getServiceAddressConfKeys(conf)) {
conf.set(HAUtil.addSuffix(confKey, rmIds[index]), hostname + ":0");
}
}
private synchronized void initResourceManager(int index, Configuration conf) {
if (HAUtil.isHAEnabled(conf)) {
conf.set(YarnConfiguration.RM_HA_ID, rmIds[index]);
}
resourceManagers[index].init(conf);
resourceManagers[index].getRMContext().getDispatcher().register(
RMAppAttemptEventType.class,
new EventHandler<RMAppAttemptEvent>() {
public void handle(RMAppAttemptEvent event) {
if (event instanceof RMAppAttemptRegistrationEvent) {
appMasters.put(event.getApplicationAttemptId(),
event.getTimestamp());
} else if (event instanceof RMAppAttemptUnregistrationEvent) {
appMasters.remove(event.getApplicationAttemptId());
}
}
});
}
private synchronized void startResourceManager(final int index) {
try {
Thread rmThread = new Thread() {
public void run() {
resourceManagers[index].start();
}
};
rmThread.setName("RM-" + index);
rmThread.start();
int waitCount = 0;
while (resourceManagers[index].getServiceState() == STATE.INITED
&& waitCount++ < 60) {
LOG.info("Waiting for RM to start...");
Thread.sleep(1500);
}
if (resourceManagers[index].getServiceState() != STATE.STARTED) {
// RM could have failed.
throw new IOException(
"ResourceManager failed to start. Final state is "
+ resourceManagers[index].getServiceState());
}
} catch (Throwable t) {
throw new YarnRuntimeException(t);
}
LOG.info("MiniYARN ResourceManager address: " +
getConfig().get(YarnConfiguration.RM_ADDRESS));
LOG.info("MiniYARN ResourceManager web address: " +
WebAppUtils.getRMWebAppURLWithoutScheme(getConfig()));
}
@InterfaceAudience.Private
@VisibleForTesting
public synchronized void stopResourceManager(int index) {
if (resourceManagers[index] != null) {
resourceManagers[index].stop();
resourceManagers[index] = null;
}
}
@InterfaceAudience.Private
@VisibleForTesting
public synchronized void restartResourceManager(int index)
throws InterruptedException {
if (resourceManagers[index] != null) {
resourceManagers[index].stop();
resourceManagers[index] = null;
}
Configuration conf = getConfig();
resourceManagers[index] = new ResourceManager();
initResourceManager(index, getConfig());
startResourceManager(index);
}
public File getTestWorkDir() {
return testWorkDir;
}
/**
* In a HA cluster, go through all the RMs and find the Active RM. In a
* non-HA cluster, return the index of the only RM.
*
* @return index of the active RM or -1 if none of them turn active
*/
@InterfaceAudience.Private
@VisibleForTesting
public int getActiveRMIndex() {
if (resourceManagers.length == 1) {
return 0;
}
int numRetriesForRMBecomingActive = failoverTimeout / 100;
while (numRetriesForRMBecomingActive-- > 0) {
for (int i = 0; i < resourceManagers.length; i++) {
if (resourceManagers[i] == null) {
continue;
}
try {
if (HAServiceProtocol.HAServiceState.ACTIVE ==
resourceManagers[i].getRMContext().getRMAdminService()
.getServiceStatus().getState()) {
return i;
}
} catch (IOException e) {
throw new YarnRuntimeException("Couldn't read the status of " +
"a ResourceManger in the HA ensemble.", e);
}
}
try {
Thread.sleep(100);
} catch (InterruptedException e) {
throw new YarnRuntimeException("Interrupted while waiting for one " +
"of the ResourceManagers to become active");
}
}
return -1;
}
/**
* @return the active {@link ResourceManager} of the cluster,
* null if none of them are active.
*/
public ResourceManager getResourceManager() {
int activeRMIndex = getActiveRMIndex();
return activeRMIndex == -1
? null
: this.resourceManagers[activeRMIndex];
}
public ResourceManager getResourceManager(int i) {
return this.resourceManagers[i];
}
public NodeManager getNodeManager(int i) {
return this.nodeManagers[i];
}
public static String getHostname() {
try {
return InetAddress.getLocalHost().getHostName();
}
catch (UnknownHostException ex) {
throw new RuntimeException(ex);
}
}
private class ResourceManagerWrapper extends AbstractService {
private int index;
public ResourceManagerWrapper(int i) {
super(ResourceManagerWrapper.class.getName() + "_" + i);
index = i;
}
@Override
protected synchronized void serviceInit(Configuration conf)
throws Exception {
initResourceManager(index, conf);
super.serviceInit(conf);
}
@Override
protected synchronized void serviceStart() throws Exception {
startResourceManager(index);
LOG.info("MiniYARN ResourceManager address: " +
getConfig().get(YarnConfiguration.RM_ADDRESS));
LOG.info("MiniYARN ResourceManager web address: " +
WebAppUtils.getRMWebAppURLWithoutScheme(getConfig()));
super.serviceStart();
}
private void waitForAppMastersToFinish(long timeoutMillis) throws InterruptedException {
long started = System.currentTimeMillis();
synchronized (appMasters) {
while (!appMasters.isEmpty() && System.currentTimeMillis() - started < timeoutMillis) {
appMasters.wait(1000);
}
}
if (!appMasters.isEmpty()) {
LOG.warn("Stopping RM while some app masters are still alive");
}
}
@Override
protected synchronized void serviceStop() throws Exception {
if (resourceManagers[index] != null) {
waitForAppMastersToFinish(5000);
resourceManagers[index].stop();
}
if (Shell.WINDOWS) {
// On Windows, clean up the short temporary symlink that was created to
// work around path length limitation.
String testWorkDirPath = testWorkDir.getAbsolutePath();
try {
FileContext.getLocalFSFileContext().delete(new Path(testWorkDirPath),
true);
} catch (IOException e) {
LOG.warn("could not cleanup symlink: " +
testWorkDir.getAbsolutePath());
}
}
super.serviceStop();
}
}
private class NodeManagerWrapper extends AbstractService {
int index = 0;
public NodeManagerWrapper(int i) {
super(NodeManagerWrapper.class.getName() + "_" + i);
index = i;
}
protected synchronized void serviceInit(Configuration conf)
throws Exception {
Configuration config = new YarnConfiguration(conf);
// create nm-local-dirs and configure them for the nodemanager
String localDirsString = prepareDirs("local", numLocalDirs);
config.set(YarnConfiguration.NM_LOCAL_DIRS, localDirsString);
// create nm-log-dirs and configure them for the nodemanager
String logDirsString = prepareDirs("log", numLogDirs);
config.set(YarnConfiguration.NM_LOG_DIRS, logDirsString);
// By default AM + 2 containers
config.setInt(YarnConfiguration.NM_PMEM_MB, 4*1024);
config.set(YarnConfiguration.NM_ADDRESS,
MiniYARNCluster.getHostname() + ":0");
config.set(YarnConfiguration.NM_LOCALIZER_ADDRESS,
MiniYARNCluster.getHostname() + ":0");
WebAppUtils
.setNMWebAppHostNameAndPort(config,
MiniYARNCluster.getHostname(), 0);
// Disable resource checks by default
if (!config.getBoolean(
YarnConfiguration.YARN_MINICLUSTER_CONTROL_RESOURCE_MONITORING,
YarnConfiguration.
DEFAULT_YARN_MINICLUSTER_CONTROL_RESOURCE_MONITORING)) {
config.setBoolean(YarnConfiguration.NM_PMEM_CHECK_ENABLED, false);
config.setBoolean(YarnConfiguration.NM_VMEM_CHECK_ENABLED, false);
}
LOG.info("Starting NM: " + index);
nodeManagers[index].init(config);
super.serviceInit(config);
}
/**
* Create local/log directories
* @param dirType type of directories i.e. local dirs or log dirs
* @param numDirs number of directories
* @return the created directories as a comma delimited String
*/
private String prepareDirs(String dirType, int numDirs) {
File []dirs = new File[numDirs];
String dirsString = "";
for (int i = 0; i < numDirs; i++) {
dirs[i]= new File(testWorkDir, MiniYARNCluster.this.getName()
+ "-" + dirType + "Dir-nm-" + index + "_" + i);
dirs[i].mkdirs();
LOG.info("Created " + dirType + "Dir in " + dirs[i].getAbsolutePath());
String delimiter = (i > 0) ? "," : "";
dirsString = dirsString.concat(delimiter + dirs[i].getAbsolutePath());
}
return dirsString;
}
protected synchronized void serviceStart() throws Exception {
try {
new Thread() {
public void run() {
nodeManagers[index].start();
}
}.start();
int waitCount = 0;
while (nodeManagers[index].getServiceState() == STATE.INITED
&& waitCount++ < 60) {
LOG.info("Waiting for NM " + index + " to start...");
Thread.sleep(1000);
}
if (nodeManagers[index].getServiceState() != STATE.STARTED) {
// RM could have failed.
throw new IOException("NodeManager " + index + " failed to start");
}
super.serviceStart();
} catch (Throwable t) {
throw new YarnRuntimeException(t);
}
}
@Override
protected synchronized void serviceStop() throws Exception {
if (nodeManagers[index] != null) {
nodeManagers[index].stop();
}
super.serviceStop();
}
}
private class CustomNodeManager extends NodeManager {
@Override
protected void doSecureLogin() throws IOException {
// Don't try to login using keytab in the testcase.
}
}
private class ShortCircuitedNodeManager extends CustomNodeManager {
@Override
protected NodeStatusUpdater createNodeStatusUpdater(Context context,
Dispatcher dispatcher, NodeHealthCheckerService healthChecker) {
return new NodeStatusUpdaterImpl(context, dispatcher,
healthChecker, metrics) {
@Override
protected ResourceTracker getRMClient() {
final ResourceTrackerService rt =
getResourceManager().getResourceTrackerService();
final RecordFactory recordFactory =
RecordFactoryProvider.getRecordFactory(null);
// For in-process communication without RPC
return new ResourceTracker() {
@Override
public NodeHeartbeatResponse nodeHeartbeat(
NodeHeartbeatRequest request) throws YarnException,
IOException {
NodeHeartbeatResponse response;
try {
response = rt.nodeHeartbeat(request);
} catch (YarnException e) {
LOG.info("Exception in heartbeat from node " +
request.getNodeStatus().getNodeId(), e);
throw e;
}
return response;
}
@Override
public RegisterNodeManagerResponse registerNodeManager(
RegisterNodeManagerRequest request)
throws YarnException, IOException {
RegisterNodeManagerResponse response;
try {
response = rt.registerNodeManager(request);
} catch (YarnException e) {
LOG.info("Exception in node registration from "
+ request.getNodeId().toString(), e);
throw e;
}
return response;
}
};
}
@Override
protected void stopRMProxy() { }
};
}
}
/**
* Wait for all the NodeManagers to connect to the ResourceManager.
*
* @param timeout Time to wait (sleeps in 100 ms intervals) in milliseconds.
* @return true if all NodeManagers connect to the (Active)
* ResourceManager, false otherwise.
* @throws YarnException
* @throws InterruptedException
*/
public boolean waitForNodeManagersToConnect(long timeout)
throws YarnException, InterruptedException {
GetClusterMetricsRequest req = GetClusterMetricsRequest.newInstance();
for (int i = 0; i < timeout / 100; i++) {
ResourceManager rm = getResourceManager();
if (rm == null) {
throw new YarnException("Can not find the active RM.");
}
else if (nodeManagers.length == rm.getClientRMService()
.getClusterMetrics(req).getClusterMetrics().getNumNodeManagers()) {
return true;
}
Thread.sleep(100);
}
return false;
}
private class ApplicationHistoryServerWrapper extends AbstractService {
public ApplicationHistoryServerWrapper() {
super(ApplicationHistoryServerWrapper.class.getName());
}
@Override
protected synchronized void serviceInit(Configuration conf)
throws Exception {
appHistoryServer = new ApplicationHistoryServer();
conf.setClass(YarnConfiguration.APPLICATION_HISTORY_STORE,
MemoryApplicationHistoryStore.class, ApplicationHistoryStore.class);
conf.setClass(YarnConfiguration.TIMELINE_SERVICE_STORE,
MemoryTimelineStore.class, TimelineStore.class);
appHistoryServer.init(conf);
super.serviceInit(conf);
}
@Override
protected synchronized void serviceStart() throws Exception {
try {
new Thread() {
public void run() {
appHistoryServer.start();
};
}.start();
int waitCount = 0;
while (appHistoryServer.getServiceState() == STATE.INITED
&& waitCount++ < 60) {
LOG.info("Waiting for Timeline Server to start...");
Thread.sleep(1500);
}
if (appHistoryServer.getServiceState() != STATE.STARTED) {
// AHS could have failed.
throw new IOException(
"ApplicationHistoryServer failed to start. Final state is "
+ appHistoryServer.getServiceState());
}
super.serviceStart();
} catch (Throwable t) {
throw new YarnRuntimeException(t);
}
LOG.info("MiniYARN ApplicationHistoryServer address: "
+ getConfig().get(YarnConfiguration.TIMELINE_SERVICE_ADDRESS));
LOG.info("MiniYARN ApplicationHistoryServer web address: "
+ getConfig().get(YarnConfiguration.TIMELINE_SERVICE_WEBAPP_ADDRESS));
}
@Override
protected synchronized void serviceStop() throws Exception {
if (appHistoryServer != null) {
appHistoryServer.stop();
}
AHSWebApp.resetInstance();
super.serviceStop();
}
}
public ApplicationHistoryServer getApplicationHistoryServer() {
return this.appHistoryServer;
}
protected ResourceManager createResourceManager() {
return new ResourceManager(){
@Override
protected void doSecureLogin() throws IOException {
// Don't try to login using keytab in the testcases.
}
};
}
public int getNumOfResourceManager() {
return this.resourceManagers.length;
}
}
| |
/*************************GO-LICENSE-START*********************************
* Copyright 2014 ThoughtWorks, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*************************GO-LICENSE-END***********************************/
package com.thoughtworks.go.config;
import com.rits.cloning.Cloner;
import com.thoughtworks.go.config.update.ConfigUpdateCheckFailedException;
import com.thoughtworks.go.config.validation.GoConfigValidity;
import com.thoughtworks.go.domain.AgentInstance;
import com.thoughtworks.go.listener.ConfigChangedListener;
import com.thoughtworks.go.metrics.domain.context.Context;
import com.thoughtworks.go.metrics.domain.probes.ProbeType;
import com.thoughtworks.go.metrics.service.MetricsProbeService;
import com.thoughtworks.go.presentation.TriStateSelection;
import com.thoughtworks.go.util.ExceptionUtils;
import com.thoughtworks.go.util.TriState;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import static com.thoughtworks.go.util.ExceptionUtils.*;
/**
* @understands how to modify the cruise config file
*/
@Component
public class GoConfigFileDao {
private CachedGoConfig cachedConfigService;
private MetricsProbeService metricsProbeService;
private final Object writeLock;
private Cloner cloner = new Cloner();
@Autowired
public GoConfigFileDao(CachedGoConfig cachedConfigService, MetricsProbeService metricsProbeService) {
this.cachedConfigService = cachedConfigService;
this.metricsProbeService = metricsProbeService;
writeLock = new Object();
}
public String fileLocation() {
return cachedConfigService.getFileLocation();
}
public void addAgent(final AgentConfig agentConfig) {
updateConfig(createAddAgentCommand(agentConfig));
}
public void updateMailHost(MailHost mailHost) {
updateConfig(mailHostUpdater(mailHost));
}
public void addPipeline(PipelineConfig pipelineConfig, String groupName) {
updateConfig(pipelineAdder(pipelineConfig, groupName));
}
public void addEnvironment(BasicEnvironmentConfig environmentConfig) {
updateConfig(environmentAdder(environmentConfig));
}
public void updateAgentResources(final String uuid, final Resources resources) {
updateConfig(new UpdateResourcesCommand(uuid, resources));
}
public void updateAgentApprovalStatus(final String uuid, final Boolean isDenied) {
updateConfig(updateApprovalStatus(uuid, isDenied));
}
public static UpdateConfigCommand updateApprovalStatus(final String uuid, final Boolean isDenied) {
return new UpdateAgentApprovalStatus(uuid, isDenied);
}
public static DeleteAgent deleteAgentCommand(String uuid) {
return new DeleteAgent(uuid);
}
public void deleteAgents(AgentInstance... agentInstances) {
updateConfig(commandForDeletingAgents(agentInstances));
}
CompositeConfigCommand commandForDeletingAgents(AgentInstance... agentInstances) {
GoConfigFileDao.CompositeConfigCommand command = new GoConfigFileDao.CompositeConfigCommand();
for (AgentInstance agentInstance : agentInstances) {
command.addCommand(deleteAgentCommand(agentInstance.getUuid()));
}
return command;
}
private static class UpdateAgentIp implements UpdateConfigCommand, UserAware {
private final String uuid;
private final String ipAddress;
private final String userName;
private UpdateAgentIp(String uuid, String ipAddress, String userName) {
this.uuid = uuid;
this.ipAddress = ipAddress;
this.userName = userName;
}
public CruiseConfig update(CruiseConfig cruiseConfig) throws Exception {
AgentConfig agentConfig = cruiseConfig.agents().getAgentByUuid(uuid);
bombIfNull(agentConfig, "Unable to set agent ipAddress; Agent [" + uuid + "] not found.");
agentConfig.setIpAddress(ipAddress);
return cruiseConfig;
}
public ConfigModifyingUser user() {
return new ConfigModifyingUser(userName);
}
}
public static class UpdateAgentHostname implements UpdateConfigCommand, UserAware {
private final String uuid;
private final String hostname;
private final String userName;
public UpdateAgentHostname(String uuid, String hostname, String userName) {
this.uuid = uuid;
this.hostname = hostname;
this.userName = userName;
}
public CruiseConfig update(CruiseConfig cruiseConfig) throws Exception {
AgentConfig agentConfig = cruiseConfig.agents().getAgentByUuid(uuid);
bombIfNull(agentConfig, "Unable to set agent hostname; Agent [" + uuid + "] not found.");
agentConfig.setHostName(hostname);
return cruiseConfig;
}
public ConfigModifyingUser user() {
return new ConfigModifyingUser(userName);
}
}
public void updateAgentIp(final String uuid, final String ipAddress, String userName) {
updateConfig(new UpdateAgentIp(uuid, ipAddress, userName));
}
public CruiseConfig loadForEditing() {
return cachedConfigService.loadForEditing();
}
public CruiseConfig load() {
return cachedConfigService.currentConfig();
}
public String md5OfConfigFile() {
return cachedConfigService.currentConfig().getMd5();
}
public ConfigSaveState updateConfig(UpdateConfigCommand command) {
Context context = metricsProbeService.begin(ProbeType.UPDATE_CONFIG);
try {
synchronized (writeLock) {
try {
if (command instanceof CheckedUpdateCommand) {
CheckedUpdateCommand checkedCommand = (CheckedUpdateCommand) command;
if (!checkedCommand.canContinue(cachedConfigService.currentConfig())) {
throw new ConfigUpdateCheckFailedException();
}
}
return cachedConfigService.writeWithLock(command);
} finally {
if (command instanceof ConfigAwareUpdate) {
((ConfigAwareUpdate) command).afterUpdate(clonedConfig());
}
}
}
} finally {
metricsProbeService.end(ProbeType.UPDATE_CONFIG, context);
}
}
private CruiseConfig clonedConfig() {
return cloner.deepClone(cachedConfigService.currentConfig());
}
public GoConfigValidity checkConfigFileValid() {
return cachedConfigService.checkConfigFileValid();
}
public void registerListener(ConfigChangedListener listener) {
cachedConfigService.registerListener(listener);
}
/**
* @deprecated Used only in tests
*/
public void reloadListeners() {
cachedConfigService.reloadListeners();
}
/**
* @deprecated Used only in tests
*/
public void forceReload() {
cachedConfigService.onTimer();
}
public static class CompositeConfigCommand implements UpdateConfigCommand {
private List<UpdateConfigCommand> commands = new ArrayList<UpdateConfigCommand>();
public CompositeConfigCommand(UpdateConfigCommand... commands) {
this.commands.addAll(Arrays.asList(commands));
}
public void addCommand(UpdateConfigCommand command) {
commands.add(command);
}
public List<UpdateConfigCommand> getCommands() {
return commands;
}
public CruiseConfig update(CruiseConfig cruiseConfig) throws Exception {
for (UpdateConfigCommand command : commands) {
cruiseConfig = command.update(cruiseConfig);
}
return cruiseConfig;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
CompositeConfigCommand command = (CompositeConfigCommand) o;
if (commands != null ? !commands.equals(command.commands) : command.commands != null) {
return false;
}
return true;
}
@Override
public int hashCode() {
return commands != null ? commands.hashCode() : 0;
}
@Override
public String toString() {
return "CompositeConfigCommand{" +
"commands=" + commands +
'}';
}
}
public static class NoOverwriteCompositeConfigCommand extends CompositeConfigCommand implements NoOverwriteUpdateConfigCommand {
private final String md5;
public NoOverwriteCompositeConfigCommand(String md5, UpdateConfigCommand... commands) {
super(commands);
this.md5 = md5;
}
public String unmodifiedMd5() {
return md5;
}
}
public static UpdateConfigCommand createAddAgentCommand(final AgentConfig agentConfig) {
return new AddAgentCommand(agentConfig);
}
private UpdateConfigCommand pipelineAdder(final PipelineConfig pipelineConfig, final String groupName) {
return new UpdateConfigCommand() {
public CruiseConfig update(CruiseConfig cruiseConfig) {
cruiseConfig.addPipeline(groupName, pipelineConfig);
return cruiseConfig;
}
};
}
private UpdateConfigCommand environmentAdder(final BasicEnvironmentConfig environmentConfig) {
return new UpdateConfigCommand() {
public CruiseConfig update(CruiseConfig cruiseConfig) {
cruiseConfig.addEnvironment(environmentConfig);
return cruiseConfig;
}
};
}
public UpdateConfigCommand mailHostUpdater(final MailHost mailHost) {
return new UpdateConfigCommand() {
public CruiseConfig update(CruiseConfig cruiseConfig) {
cruiseConfig.server().updateMailHost(mailHost);
return cruiseConfig;
}
};
}
/**
* @understands how to add an agent to the config file
*/
private static class AddAgentCommand implements UpdateConfigCommand {
private final AgentConfig agentConfig;
public AddAgentCommand(AgentConfig agentConfig) {
this.agentConfig = agentConfig;
}
public CruiseConfig update(CruiseConfig cruiseConfig) throws Exception {
cruiseConfig.agents().add(agentConfig);
return cruiseConfig;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
AddAgentCommand that = (AddAgentCommand) o;
if (agentConfig != null ? !agentConfig.equals(that.agentConfig) : that.agentConfig != null) {
return false;
}
return true;
}
@Override
public int hashCode() {
return agentConfig != null ? agentConfig.hashCode() : 0;
}
@Override
public String toString() {
return "AddAgentcommand{" +
"agentConfig=" + agentConfig +
'}';
}
}
/**
* @understands how to update the agent approval status
*/
private static class UpdateAgentApprovalStatus implements UpdateConfigCommand {
private final String uuid;
private final Boolean denied;
public UpdateAgentApprovalStatus(String uuid, Boolean denied) {
this.uuid = uuid;
this.denied = denied;
}
public CruiseConfig update(CruiseConfig cruiseConfig) {
AgentConfig agentConfig = cruiseConfig.agents().getAgentByUuid(uuid);
bombIfNull(agentConfig, "Unable to set agent approval status; Agent [" + uuid + "] not found.");
agentConfig.setDisabled(denied);
return cruiseConfig;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
UpdateAgentApprovalStatus that = (UpdateAgentApprovalStatus) o;
if (denied != null ? !denied.equals(that.denied) : that.denied != null) {
return false;
}
if (uuid != null ? !uuid.equals(that.uuid) : that.uuid != null) {
return false;
}
return true;
}
@Override
public int hashCode() {
int result = uuid != null ? uuid.hashCode() : 0;
result = 31 * result + (denied != null ? denied.hashCode() : 0);
return result;
}
@Override
public String toString() {
return "UpdateAgentApprovalStatus{" +
"uuid='" + uuid + '\'' +
", denied=" + denied +
'}';
}
}
/**
* @understands how to delete agent
*/
private static class DeleteAgent implements UpdateConfigCommand {
private final String uuid;
public DeleteAgent(String uuid) {
this.uuid = uuid;
}
public CruiseConfig update(CruiseConfig cruiseConfig) {
AgentConfig agentConfig = cruiseConfig.agents().getAgentByUuid(uuid);
if (agentConfig.isNull()) {
bomb("Unable to delete agent; Agent [" + uuid + "] not found.");
}
cruiseConfig.getEnvironments().removeAgentFromAllEnvironments(uuid);
cruiseConfig.agents().remove(agentConfig);
return cruiseConfig;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
DeleteAgent that = (DeleteAgent) o;
if (uuid != null ? !uuid.equals(that.uuid) : that.uuid != null) {
return false;
}
return true;
}
@Override
public int hashCode() {
return uuid != null ? uuid.hashCode() : 0;
}
@Override
public String toString() {
return "DeleteAgent{" +
"uuid='" + uuid + '\'' +
'}';
}
}
public static class UpdateResourcesCommand implements UpdateConfigCommand {
private final String uuid;
private final Resources resources;
public UpdateResourcesCommand(String uuid, Resources resources) {
this.uuid = uuid;
this.resources = resources;
}
public CruiseConfig update(CruiseConfig cruiseConfig) {
AgentConfig agentConfig = cruiseConfig.agents().getAgentByUuid(uuid);
bombIfNull(agentConfig, "Unable to set agent resources; Agent [" + uuid + "] not found.");
agentConfig.setResources(resources);
return cruiseConfig;
}
}
public static class ModifyResourcesCommand implements UpdateConfigCommand {
private final String uuid;
private final Resource resource;
private final TriStateSelection.Action action;
public ModifyResourcesCommand(String uuid, Resource resource, TriStateSelection.Action action) {
this.uuid = uuid;
this.resource = resource;
this.action = action;
}
public CruiseConfig update(CruiseConfig cruiseConfig) {
AgentConfig agentConfig = cruiseConfig.agents().getAgentByUuid(uuid);
bombIfNull(agentConfig, "Unable to set agent resources; Agent [" + uuid + "] not found.");
if (action.equals(TriStateSelection.Action.add)) {
agentConfig.addResource(resource);
} else if (action.equals(TriStateSelection.Action.remove)) {
agentConfig.removeResource(resource);
} else if (action.equals(TriStateSelection.Action.nochange)) {
//do nothing
} else {
bomb(String.format("unsupported action '%s'", action));
}
return cruiseConfig;
}
}
public static class ModifyRoleCommand implements UpdateConfigCommand {
private String user;
private TriStateSelection roleSelection;
public ModifyRoleCommand(String user, TriStateSelection roleSelection) {
this.user = user;
this.roleSelection = roleSelection;
}
public CruiseConfig update(CruiseConfig cruiseConfig) {
RolesConfig rolesConfig = cruiseConfig.server().security().getRoles();
String roleName = roleSelection.getValue();
Role role = rolesConfig.findByName(new CaseInsensitiveString(roleName));
switch (roleSelection.getAction()) {
case add:
if (role == null) {
role = new Role(new CaseInsensitiveString(roleName), new Users());
rolesConfig.add(role);
}
if (!role.hasMember(new CaseInsensitiveString(user))) {
role.addUser(new RoleUser(new CaseInsensitiveString(user)));
}
break;
case remove:
if (role != null) {
role.removeUser(new RoleUser(new CaseInsensitiveString(user)));
}
break;
case nochange:
break;
default:
throw ExceptionUtils.bomb("unrecognized Action: " + roleSelection.getAction());
}
return cruiseConfig;
}
}
public static class ModifyAdminPrivilegeCommand implements UpdateConfigCommand {
private String user;
private TriStateSelection adminPrivilegeSelection;
public static final UserRoleMatcher ALWAYS_FALSE_MATCHER = new UserRoleMatcher() {
public boolean match(CaseInsensitiveString userName, CaseInsensitiveString roleName) {
return false;
}
};
public ModifyAdminPrivilegeCommand(String user, TriStateSelection adminPrivilege) {
this.user = user;
this.adminPrivilegeSelection = adminPrivilege;
}
public CruiseConfig update(CruiseConfig cruiseConfig) throws Exception {
final AdminsConfig adminsConfig = cruiseConfig.server().security().adminsConfig();
switch (adminPrivilegeSelection.getAction()) {
case add:
if (!adminsConfig.hasUser(new CaseInsensitiveString(user), ALWAYS_FALSE_MATCHER)) {
adminsConfig.add(new AdminUser(new CaseInsensitiveString(user)));
}
break;
case remove:
adminsConfig.remove(new AdminUser(new CaseInsensitiveString(user)));
break;
}
return cruiseConfig;
}
}
public static class ModifyEnvironmentCommand implements UpdateConfigCommand {
private final String uuid;
private final String environmentName;
private final TriStateSelection.Action action;
public ModifyEnvironmentCommand(String uuid, String environmentName, TriStateSelection.Action action) {
this.uuid = uuid;
this.environmentName = environmentName;
this.action = action;
}
public CruiseConfig update(CruiseConfig cruiseConfig) throws Exception {
AgentConfig agentConfig = cruiseConfig.agents().getAgentByUuid(uuid);
bombIfNull(agentConfig, "Unable to set agent resources; Agent [" + uuid + "] not found.");
EnvironmentConfig environmentConfig = cruiseConfig.getEnvironments().named(new CaseInsensitiveString(environmentName));
if (action.equals(TriStateSelection.Action.add)) {
environmentConfig.addAgentIfNew(uuid);
} else if (action.equals(TriStateSelection.Action.remove)) {
environmentConfig.removeAgent(uuid);
} else if (action.equals(TriStateSelection.Action.nochange)) {
//do nothing
} else {
bomb(String.format("unsupported action '%s'", action));
}
return cruiseConfig;
}
}
public GoConfigHolder loadConfigHolder() {
return cachedConfigService.loadConfigHolder();
}
}
| |
/*
* Copyright 2016 Michael Hahn
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.trade.core.model.data.instance;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonProperty;
import org.mongodb.morphia.annotations.Entity;
import org.mongodb.morphia.annotations.Reference;
import org.mongodb.morphia.annotations.Transient;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.statefulj.persistence.annotations.State;
import org.trade.core.model.ABaseResource;
import org.trade.core.model.data.DataObject;
import org.trade.core.model.data.ILifeCycleInstanceObject;
import org.trade.core.model.lifecycle.DataObjectInstanceLifeCycle;
import org.trade.core.model.lifecycle.LifeCycleException;
import org.trade.core.persistence.IPersistenceProvider;
import org.trade.core.persistence.local.LocalPersistenceProviderFactory;
import org.trade.core.utils.events.InstanceEvents;
import org.trade.core.utils.states.InstanceStates;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.util.*;
/**
* This class represents an instance of a data object within the middleware.
* <p>
* Created by hahnml on 26.10.2016.
*/
@Entity("dataObjectInstances")
public class DataObjectInstance extends ABaseResource implements ILifeCycleInstanceObject {
private static final long serialVersionUID = 4504379941592896623L;
@Transient
private Logger logger = LoggerFactory.getLogger("org.trade.core.model.data.instance.DataObjectInstance");
@JsonProperty("creationTimestamp")
private Date creationTimestamp;
@JsonProperty("createdBy")
private String createdBy;
@JsonProperty("state")
@State
private String state;
private transient DataObjectInstanceLifeCycle lifeCycle;
private transient IPersistenceProvider<DataObjectInstance> persistProv;
@JsonProperty("dataObject")
@Reference
private DataObject dataObject;
@JsonProperty("dataElementInstances")
@Reference
private List<DataElementInstance> dataElementInstances;
@JsonProperty("correlationProperties")
private HashMap<String, String> correlationProperties;
public DataObjectInstance(DataObject dataObject, String createdBy, HashMap<String, String>
correlationProperties) {
this.createdBy = createdBy;
this.dataObject = dataObject;
this.creationTimestamp = new Date();
if (correlationProperties != null) {
this.correlationProperties = correlationProperties;
} else {
this.correlationProperties = new HashMap<>();
}
dataElementInstances = new ArrayList<>();
this.lifeCycle = new DataObjectInstanceLifeCycle(this);
this.persistProv = LocalPersistenceProviderFactory.createLocalPersistenceProvider(DataObjectInstance
.class);
}
/**
* This constructor is only used by Morphia to load data value from the database.
*/
private DataObjectInstance() {
this.lifeCycle = new DataObjectInstanceLifeCycle(this, false);
this.persistProv = LocalPersistenceProviderFactory.createLocalPersistenceProvider(DataObjectInstance
.class);
}
public Date getCreationTimestamp() {
return creationTimestamp;
}
public String getCreatedBy() {
return createdBy;
}
public String getState() {
return state;
}
public DataObject getDataObject() {
return this.dataObject;
}
public List<DataElementInstance> getDataElementInstances() {
return dataElementInstances;
}
public HashMap<String, String> getCorrelationProperties() {
return correlationProperties;
}
public void addDataElementInstance(DataElementInstance elementInstance) {
// Check if the element instance belongs to this data object instance
if (elementInstance.getDataObjectInstance() == this) {
this.dataElementInstances.add(elementInstance);
// Persist the changes at the data source
this.storeToDS();
}
}
public void removeDataElementInstance(DataElementInstance elementInstance) {
// Check if the element instance belongs to this data object instance
if (elementInstance.getDataObjectInstance() == this) {
if (this.dataElementInstances.remove(elementInstance)) {
// Persist the changes at the data source
this.storeToDS();
}
}
}
@Override
public void initialize() throws Exception {
if (this.isCreated() || this.isInitialized()) {
// Check if all related data element instance are initialized or not
boolean areInitialized = true;
Iterator<DataElementInstance> iter = this.dataElementInstances.iterator();
while (areInitialized && iter.hasNext()) {
if (!iter.next().isInitialized()) {
// Found one element instance which is not initialized.
// Stop iteration and trigger state changes, if required.
areInitialized = false;
}
}
if (areInitialized) {
// Trigger the initialized event for the data object instance since now all related data element
// instance are initialized successfully
this.lifeCycle.triggerEvent(this, InstanceEvents.initialize);
} else {
if (this.isInitialized()) {
// Trigger the create event for the data object instance since at least one of its related data
// element instances is not initialized (anymore)
this.lifeCycle.triggerEvent(this, InstanceEvents.create);
}
}
// Persist the changes at the data source
this.storeToDS();
} else {
logger.info("The data object instance ({}) can not be initialized because it is in state '{}'.", this
.getIdentifier(),
getState());
throw new LifeCycleException("The data object instance (" + this.getIdentifier() +
") can not be initialized because it is in state '" + getState() + "'.");
}
}
@Override
public void archive() throws Exception {
// TODO: 24.04.2017 Implement archiving of data object instances
// Trigger the archive event for the data object instance
this.lifeCycle.triggerEvent(this, InstanceEvents.archive);
// Persist the changes at the data source
this.storeToDS();
}
@Override
public void unarchive() throws Exception {
// TODO: 24.04.2017 Implement un-archiving of data object instances
// Trigger the unarchive event for the data object instance
this.lifeCycle.triggerEvent(this, InstanceEvents.unarchive);
// Persist the changes at the data source
this.storeToDS();
}
@Override
public void delete() throws Exception {
// Remove the data object instance from the data object
getDataObject().removeDataObjectInstance(this);
// By convention we also directly delete all related data element instances of the data object instance
for (Iterator<DataElementInstance> iter = this.dataElementInstances.iterator(); iter.hasNext(); ) {
DataElementInstance instance = iter.next();
// Remove the element instance from the list
iter.remove();
// Trigger the deletion of the element instance
instance.delete();
}
// Trigger the delete event for the data object instance. This will also trigger the deletion of the
// corresponding object at the data source through the PersistableHashMap in the corresponding IDataManager
// instance.
this.lifeCycle.triggerEvent(this, InstanceEvents.delete);
}
@JsonIgnore
@Override
public boolean isCreated() {
return getState() != null && this.getState().equals(InstanceStates
.CREATED.name());
}
@JsonIgnore
@Override
public boolean isInitialized() {
return getState() != null && this.getState().equals(InstanceStates
.INITIALIZED.name());
}
@JsonIgnore
@Override
public boolean isArchived() {
return getState() != null && this.getState().equals(InstanceStates
.ARCHIVED.name());
}
@JsonIgnore
@Override
public boolean isDeleted() {
return getState() != null && this.getState().equals(InstanceStates
.DELETED.name());
}
@Override
public void storeToDS() {
if (this.persistProv != null) {
try {
this.persistProv.storeObject(this);
} catch (Exception e) {
logger.error("Storing data object instance '" + this.getIdentifier() + "' caused an exception.", e);
}
}
}
@Override
public void deleteFromDS() {
if (this.persistProv != null) {
try {
this.persistProv.deleteObject(this.getIdentifier());
} catch (Exception e) {
logger.error("Deleting data object instance '" + this.getIdentifier() + "' caused an exception.", e);
}
}
}
private void readObject(ObjectInputStream ois) throws IOException {
try {
ois.defaultReadObject();
lifeCycle = new DataObjectInstanceLifeCycle(this, false);
this.persistProv = LocalPersistenceProviderFactory.createLocalPersistenceProvider(DataObjectInstance
.class);
} catch (ClassNotFoundException e) {
logger.error("Class not found during deserialization of data object instance '{}'", getIdentifier());
throw new IOException("Class not found during deserialization of data object instance.");
}
}
@Override
public boolean equals(Object object) {
if(object instanceof DataObjectInstance) {
DataObjectInstance s = (DataObjectInstance) object;
return this.identifier.equals(s.identifier);
}
return false;
}
@Override
public int hashCode() {
int hashCode = 0;
if (this.dataObject != null) {
if (this.dataObject.getDataModel() != null) {
// Create a query optimized hash which can be used to later identify the matching instance faster
hashCode = Objects.hash(dataObject.getDataModel().getTargetNamespace(), dataObject.getDataModel().getName(),
dataObject.getName(), correlationProperties);
} else {
hashCode = Objects.hash(identifier, creationTimestamp, createdBy, dataObject.getName(), correlationProperties);
}
} else {
hashCode = Objects.hash(identifier, creationTimestamp, createdBy);
}
return hashCode;
}
}
| |
/**
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for
* license information.
*
* Code generated by Microsoft (R) AutoRest Code Generator.
*/
package com.microsoft.azure.management.compute.v2019_03_01.implementation;
import retrofit2.Retrofit;
import com.google.common.reflect.TypeToken;
import com.microsoft.azure.AzureServiceFuture;
import com.microsoft.azure.CloudException;
import com.microsoft.azure.ListOperationCallback;
import com.microsoft.azure.Page;
import com.microsoft.azure.PagedList;
import com.microsoft.rest.ServiceCallback;
import com.microsoft.rest.ServiceFuture;
import com.microsoft.rest.ServiceResponse;
import com.microsoft.rest.Validator;
import java.io.IOException;
import java.util.List;
import okhttp3.ResponseBody;
import retrofit2.http.Body;
import retrofit2.http.GET;
import retrofit2.http.Header;
import retrofit2.http.Headers;
import retrofit2.http.HTTP;
import retrofit2.http.Path;
import retrofit2.http.PUT;
import retrofit2.http.Query;
import retrofit2.http.Url;
import retrofit2.Response;
import rx.functions.Func1;
import rx.Observable;
/**
* An instance of this class provides access to all the operations defined
* in GalleryApplications.
*/
public class GalleryApplicationsInner {
/** The Retrofit service to perform REST calls. */
private GalleryApplicationsService service;
/** The service client containing this operation class. */
private ComputeManagementClientImpl client;
/**
* Initializes an instance of GalleryApplicationsInner.
*
* @param retrofit the Retrofit instance built from a Retrofit Builder.
* @param client the instance of the service client containing this operation class.
*/
public GalleryApplicationsInner(Retrofit retrofit, ComputeManagementClientImpl client) {
this.service = retrofit.create(GalleryApplicationsService.class);
this.client = client;
}
/**
* The interface defining all the services for GalleryApplications to be
* used by Retrofit to perform actually REST calls.
*/
interface GalleryApplicationsService {
@Headers({ "Content-Type: application/json; charset=utf-8", "x-ms-logging-context: com.microsoft.azure.management.compute.v2019_03_01.GalleryApplications createOrUpdate" })
@PUT("subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/galleries/{galleryName}/applications/{galleryApplicationName}")
Observable<Response<ResponseBody>> createOrUpdate(@Path("subscriptionId") String subscriptionId, @Path("resourceGroupName") String resourceGroupName, @Path("galleryName") String galleryName, @Path("galleryApplicationName") String galleryApplicationName, @Query("api-version") String apiVersion, @Body GalleryApplicationInner galleryApplication, @Header("accept-language") String acceptLanguage, @Header("User-Agent") String userAgent);
@Headers({ "Content-Type: application/json; charset=utf-8", "x-ms-logging-context: com.microsoft.azure.management.compute.v2019_03_01.GalleryApplications beginCreateOrUpdate" })
@PUT("subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/galleries/{galleryName}/applications/{galleryApplicationName}")
Observable<Response<ResponseBody>> beginCreateOrUpdate(@Path("subscriptionId") String subscriptionId, @Path("resourceGroupName") String resourceGroupName, @Path("galleryName") String galleryName, @Path("galleryApplicationName") String galleryApplicationName, @Query("api-version") String apiVersion, @Body GalleryApplicationInner galleryApplication, @Header("accept-language") String acceptLanguage, @Header("User-Agent") String userAgent);
@Headers({ "Content-Type: application/json; charset=utf-8", "x-ms-logging-context: com.microsoft.azure.management.compute.v2019_03_01.GalleryApplications get" })
@GET("subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/galleries/{galleryName}/applications/{galleryApplicationName}")
Observable<Response<ResponseBody>> get(@Path("subscriptionId") String subscriptionId, @Path("resourceGroupName") String resourceGroupName, @Path("galleryName") String galleryName, @Path("galleryApplicationName") String galleryApplicationName, @Query("api-version") String apiVersion, @Header("accept-language") String acceptLanguage, @Header("User-Agent") String userAgent);
@Headers({ "Content-Type: application/json; charset=utf-8", "x-ms-logging-context: com.microsoft.azure.management.compute.v2019_03_01.GalleryApplications delete" })
@HTTP(path = "subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/galleries/{galleryName}/applications/{galleryApplicationName}", method = "DELETE", hasBody = true)
Observable<Response<ResponseBody>> delete(@Path("subscriptionId") String subscriptionId, @Path("resourceGroupName") String resourceGroupName, @Path("galleryName") String galleryName, @Path("galleryApplicationName") String galleryApplicationName, @Query("api-version") String apiVersion, @Header("accept-language") String acceptLanguage, @Header("User-Agent") String userAgent);
@Headers({ "Content-Type: application/json; charset=utf-8", "x-ms-logging-context: com.microsoft.azure.management.compute.v2019_03_01.GalleryApplications beginDelete" })
@HTTP(path = "subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/galleries/{galleryName}/applications/{galleryApplicationName}", method = "DELETE", hasBody = true)
Observable<Response<ResponseBody>> beginDelete(@Path("subscriptionId") String subscriptionId, @Path("resourceGroupName") String resourceGroupName, @Path("galleryName") String galleryName, @Path("galleryApplicationName") String galleryApplicationName, @Query("api-version") String apiVersion, @Header("accept-language") String acceptLanguage, @Header("User-Agent") String userAgent);
@Headers({ "Content-Type: application/json; charset=utf-8", "x-ms-logging-context: com.microsoft.azure.management.compute.v2019_03_01.GalleryApplications listByGallery" })
@GET("subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/galleries/{galleryName}/applications")
Observable<Response<ResponseBody>> listByGallery(@Path("subscriptionId") String subscriptionId, @Path("resourceGroupName") String resourceGroupName, @Path("galleryName") String galleryName, @Query("api-version") String apiVersion, @Header("accept-language") String acceptLanguage, @Header("User-Agent") String userAgent);
@Headers({ "Content-Type: application/json; charset=utf-8", "x-ms-logging-context: com.microsoft.azure.management.compute.v2019_03_01.GalleryApplications listByGalleryNext" })
@GET
Observable<Response<ResponseBody>> listByGalleryNext(@Url String nextUrl, @Header("accept-language") String acceptLanguage, @Header("User-Agent") String userAgent);
}
/**
* Create or update a gallery Application Definition.
*
* @param resourceGroupName The name of the resource group.
* @param galleryName The name of the Shared Application Gallery in which the Application Definition is to be created.
* @param galleryApplicationName The name of the gallery Application Definition to be created or updated. The allowed characters are alphabets and numbers with dots, dashes, and periods allowed in the middle. The maximum length is 80 characters.
* @param galleryApplication Parameters supplied to the create or update gallery Application operation.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @throws CloudException thrown if the request is rejected by server
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent
* @return the GalleryApplicationInner object if successful.
*/
public GalleryApplicationInner createOrUpdate(String resourceGroupName, String galleryName, String galleryApplicationName, GalleryApplicationInner galleryApplication) {
return createOrUpdateWithServiceResponseAsync(resourceGroupName, galleryName, galleryApplicationName, galleryApplication).toBlocking().last().body();
}
/**
* Create or update a gallery Application Definition.
*
* @param resourceGroupName The name of the resource group.
* @param galleryName The name of the Shared Application Gallery in which the Application Definition is to be created.
* @param galleryApplicationName The name of the gallery Application Definition to be created or updated. The allowed characters are alphabets and numbers with dots, dashes, and periods allowed in the middle. The maximum length is 80 characters.
* @param galleryApplication Parameters supplied to the create or update gallery Application operation.
* @param serviceCallback the async ServiceCallback to handle successful and failed responses.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the {@link ServiceFuture} object
*/
public ServiceFuture<GalleryApplicationInner> createOrUpdateAsync(String resourceGroupName, String galleryName, String galleryApplicationName, GalleryApplicationInner galleryApplication, final ServiceCallback<GalleryApplicationInner> serviceCallback) {
return ServiceFuture.fromResponse(createOrUpdateWithServiceResponseAsync(resourceGroupName, galleryName, galleryApplicationName, galleryApplication), serviceCallback);
}
/**
* Create or update a gallery Application Definition.
*
* @param resourceGroupName The name of the resource group.
* @param galleryName The name of the Shared Application Gallery in which the Application Definition is to be created.
* @param galleryApplicationName The name of the gallery Application Definition to be created or updated. The allowed characters are alphabets and numbers with dots, dashes, and periods allowed in the middle. The maximum length is 80 characters.
* @param galleryApplication Parameters supplied to the create or update gallery Application operation.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the observable for the request
*/
public Observable<GalleryApplicationInner> createOrUpdateAsync(String resourceGroupName, String galleryName, String galleryApplicationName, GalleryApplicationInner galleryApplication) {
return createOrUpdateWithServiceResponseAsync(resourceGroupName, galleryName, galleryApplicationName, galleryApplication).map(new Func1<ServiceResponse<GalleryApplicationInner>, GalleryApplicationInner>() {
@Override
public GalleryApplicationInner call(ServiceResponse<GalleryApplicationInner> response) {
return response.body();
}
});
}
/**
* Create or update a gallery Application Definition.
*
* @param resourceGroupName The name of the resource group.
* @param galleryName The name of the Shared Application Gallery in which the Application Definition is to be created.
* @param galleryApplicationName The name of the gallery Application Definition to be created or updated. The allowed characters are alphabets and numbers with dots, dashes, and periods allowed in the middle. The maximum length is 80 characters.
* @param galleryApplication Parameters supplied to the create or update gallery Application operation.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the observable for the request
*/
public Observable<ServiceResponse<GalleryApplicationInner>> createOrUpdateWithServiceResponseAsync(String resourceGroupName, String galleryName, String galleryApplicationName, GalleryApplicationInner galleryApplication) {
if (this.client.subscriptionId() == null) {
throw new IllegalArgumentException("Parameter this.client.subscriptionId() is required and cannot be null.");
}
if (resourceGroupName == null) {
throw new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null.");
}
if (galleryName == null) {
throw new IllegalArgumentException("Parameter galleryName is required and cannot be null.");
}
if (galleryApplicationName == null) {
throw new IllegalArgumentException("Parameter galleryApplicationName is required and cannot be null.");
}
if (galleryApplication == null) {
throw new IllegalArgumentException("Parameter galleryApplication is required and cannot be null.");
}
Validator.validate(galleryApplication);
final String apiVersion = "2019-03-01";
Observable<Response<ResponseBody>> observable = service.createOrUpdate(this.client.subscriptionId(), resourceGroupName, galleryName, galleryApplicationName, apiVersion, galleryApplication, this.client.acceptLanguage(), this.client.userAgent());
return client.getAzureClient().getPutOrPatchResultAsync(observable, new TypeToken<GalleryApplicationInner>() { }.getType());
}
/**
* Create or update a gallery Application Definition.
*
* @param resourceGroupName The name of the resource group.
* @param galleryName The name of the Shared Application Gallery in which the Application Definition is to be created.
* @param galleryApplicationName The name of the gallery Application Definition to be created or updated. The allowed characters are alphabets and numbers with dots, dashes, and periods allowed in the middle. The maximum length is 80 characters.
* @param galleryApplication Parameters supplied to the create or update gallery Application operation.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @throws CloudException thrown if the request is rejected by server
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent
* @return the GalleryApplicationInner object if successful.
*/
public GalleryApplicationInner beginCreateOrUpdate(String resourceGroupName, String galleryName, String galleryApplicationName, GalleryApplicationInner galleryApplication) {
return beginCreateOrUpdateWithServiceResponseAsync(resourceGroupName, galleryName, galleryApplicationName, galleryApplication).toBlocking().single().body();
}
/**
* Create or update a gallery Application Definition.
*
* @param resourceGroupName The name of the resource group.
* @param galleryName The name of the Shared Application Gallery in which the Application Definition is to be created.
* @param galleryApplicationName The name of the gallery Application Definition to be created or updated. The allowed characters are alphabets and numbers with dots, dashes, and periods allowed in the middle. The maximum length is 80 characters.
* @param galleryApplication Parameters supplied to the create or update gallery Application operation.
* @param serviceCallback the async ServiceCallback to handle successful and failed responses.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the {@link ServiceFuture} object
*/
public ServiceFuture<GalleryApplicationInner> beginCreateOrUpdateAsync(String resourceGroupName, String galleryName, String galleryApplicationName, GalleryApplicationInner galleryApplication, final ServiceCallback<GalleryApplicationInner> serviceCallback) {
return ServiceFuture.fromResponse(beginCreateOrUpdateWithServiceResponseAsync(resourceGroupName, galleryName, galleryApplicationName, galleryApplication), serviceCallback);
}
/**
* Create or update a gallery Application Definition.
*
* @param resourceGroupName The name of the resource group.
* @param galleryName The name of the Shared Application Gallery in which the Application Definition is to be created.
* @param galleryApplicationName The name of the gallery Application Definition to be created or updated. The allowed characters are alphabets and numbers with dots, dashes, and periods allowed in the middle. The maximum length is 80 characters.
* @param galleryApplication Parameters supplied to the create or update gallery Application operation.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the observable to the GalleryApplicationInner object
*/
public Observable<GalleryApplicationInner> beginCreateOrUpdateAsync(String resourceGroupName, String galleryName, String galleryApplicationName, GalleryApplicationInner galleryApplication) {
return beginCreateOrUpdateWithServiceResponseAsync(resourceGroupName, galleryName, galleryApplicationName, galleryApplication).map(new Func1<ServiceResponse<GalleryApplicationInner>, GalleryApplicationInner>() {
@Override
public GalleryApplicationInner call(ServiceResponse<GalleryApplicationInner> response) {
return response.body();
}
});
}
/**
* Create or update a gallery Application Definition.
*
* @param resourceGroupName The name of the resource group.
* @param galleryName The name of the Shared Application Gallery in which the Application Definition is to be created.
* @param galleryApplicationName The name of the gallery Application Definition to be created or updated. The allowed characters are alphabets and numbers with dots, dashes, and periods allowed in the middle. The maximum length is 80 characters.
* @param galleryApplication Parameters supplied to the create or update gallery Application operation.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the observable to the GalleryApplicationInner object
*/
public Observable<ServiceResponse<GalleryApplicationInner>> beginCreateOrUpdateWithServiceResponseAsync(String resourceGroupName, String galleryName, String galleryApplicationName, GalleryApplicationInner galleryApplication) {
if (this.client.subscriptionId() == null) {
throw new IllegalArgumentException("Parameter this.client.subscriptionId() is required and cannot be null.");
}
if (resourceGroupName == null) {
throw new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null.");
}
if (galleryName == null) {
throw new IllegalArgumentException("Parameter galleryName is required and cannot be null.");
}
if (galleryApplicationName == null) {
throw new IllegalArgumentException("Parameter galleryApplicationName is required and cannot be null.");
}
if (galleryApplication == null) {
throw new IllegalArgumentException("Parameter galleryApplication is required and cannot be null.");
}
Validator.validate(galleryApplication);
final String apiVersion = "2019-03-01";
return service.beginCreateOrUpdate(this.client.subscriptionId(), resourceGroupName, galleryName, galleryApplicationName, apiVersion, galleryApplication, this.client.acceptLanguage(), this.client.userAgent())
.flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<GalleryApplicationInner>>>() {
@Override
public Observable<ServiceResponse<GalleryApplicationInner>> call(Response<ResponseBody> response) {
try {
ServiceResponse<GalleryApplicationInner> clientResponse = beginCreateOrUpdateDelegate(response);
return Observable.just(clientResponse);
} catch (Throwable t) {
return Observable.error(t);
}
}
});
}
private ServiceResponse<GalleryApplicationInner> beginCreateOrUpdateDelegate(Response<ResponseBody> response) throws CloudException, IOException, IllegalArgumentException {
return this.client.restClient().responseBuilderFactory().<GalleryApplicationInner, CloudException>newInstance(this.client.serializerAdapter())
.register(200, new TypeToken<GalleryApplicationInner>() { }.getType())
.register(201, new TypeToken<GalleryApplicationInner>() { }.getType())
.register(202, new TypeToken<GalleryApplicationInner>() { }.getType())
.registerError(CloudException.class)
.build(response);
}
/**
* Retrieves information about a gallery Application Definition.
*
* @param resourceGroupName The name of the resource group.
* @param galleryName The name of the Shared Application Gallery from which the Application Definitions are to be retrieved.
* @param galleryApplicationName The name of the gallery Application Definition to be retrieved.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @throws CloudException thrown if the request is rejected by server
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent
* @return the GalleryApplicationInner object if successful.
*/
public GalleryApplicationInner get(String resourceGroupName, String galleryName, String galleryApplicationName) {
return getWithServiceResponseAsync(resourceGroupName, galleryName, galleryApplicationName).toBlocking().single().body();
}
/**
* Retrieves information about a gallery Application Definition.
*
* @param resourceGroupName The name of the resource group.
* @param galleryName The name of the Shared Application Gallery from which the Application Definitions are to be retrieved.
* @param galleryApplicationName The name of the gallery Application Definition to be retrieved.
* @param serviceCallback the async ServiceCallback to handle successful and failed responses.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the {@link ServiceFuture} object
*/
public ServiceFuture<GalleryApplicationInner> getAsync(String resourceGroupName, String galleryName, String galleryApplicationName, final ServiceCallback<GalleryApplicationInner> serviceCallback) {
return ServiceFuture.fromResponse(getWithServiceResponseAsync(resourceGroupName, galleryName, galleryApplicationName), serviceCallback);
}
/**
* Retrieves information about a gallery Application Definition.
*
* @param resourceGroupName The name of the resource group.
* @param galleryName The name of the Shared Application Gallery from which the Application Definitions are to be retrieved.
* @param galleryApplicationName The name of the gallery Application Definition to be retrieved.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the observable to the GalleryApplicationInner object
*/
public Observable<GalleryApplicationInner> getAsync(String resourceGroupName, String galleryName, String galleryApplicationName) {
return getWithServiceResponseAsync(resourceGroupName, galleryName, galleryApplicationName).map(new Func1<ServiceResponse<GalleryApplicationInner>, GalleryApplicationInner>() {
@Override
public GalleryApplicationInner call(ServiceResponse<GalleryApplicationInner> response) {
return response.body();
}
});
}
/**
* Retrieves information about a gallery Application Definition.
*
* @param resourceGroupName The name of the resource group.
* @param galleryName The name of the Shared Application Gallery from which the Application Definitions are to be retrieved.
* @param galleryApplicationName The name of the gallery Application Definition to be retrieved.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the observable to the GalleryApplicationInner object
*/
public Observable<ServiceResponse<GalleryApplicationInner>> getWithServiceResponseAsync(String resourceGroupName, String galleryName, String galleryApplicationName) {
if (this.client.subscriptionId() == null) {
throw new IllegalArgumentException("Parameter this.client.subscriptionId() is required and cannot be null.");
}
if (resourceGroupName == null) {
throw new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null.");
}
if (galleryName == null) {
throw new IllegalArgumentException("Parameter galleryName is required and cannot be null.");
}
if (galleryApplicationName == null) {
throw new IllegalArgumentException("Parameter galleryApplicationName is required and cannot be null.");
}
final String apiVersion = "2019-03-01";
return service.get(this.client.subscriptionId(), resourceGroupName, galleryName, galleryApplicationName, apiVersion, this.client.acceptLanguage(), this.client.userAgent())
.flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<GalleryApplicationInner>>>() {
@Override
public Observable<ServiceResponse<GalleryApplicationInner>> call(Response<ResponseBody> response) {
try {
ServiceResponse<GalleryApplicationInner> clientResponse = getDelegate(response);
return Observable.just(clientResponse);
} catch (Throwable t) {
return Observable.error(t);
}
}
});
}
private ServiceResponse<GalleryApplicationInner> getDelegate(Response<ResponseBody> response) throws CloudException, IOException, IllegalArgumentException {
return this.client.restClient().responseBuilderFactory().<GalleryApplicationInner, CloudException>newInstance(this.client.serializerAdapter())
.register(200, new TypeToken<GalleryApplicationInner>() { }.getType())
.registerError(CloudException.class)
.build(response);
}
/**
* Delete a gallery Application.
*
* @param resourceGroupName The name of the resource group.
* @param galleryName The name of the Shared Application Gallery in which the Application Definition is to be deleted.
* @param galleryApplicationName The name of the gallery Application Definition to be deleted.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @throws CloudException thrown if the request is rejected by server
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent
*/
public void delete(String resourceGroupName, String galleryName, String galleryApplicationName) {
deleteWithServiceResponseAsync(resourceGroupName, galleryName, galleryApplicationName).toBlocking().last().body();
}
/**
* Delete a gallery Application.
*
* @param resourceGroupName The name of the resource group.
* @param galleryName The name of the Shared Application Gallery in which the Application Definition is to be deleted.
* @param galleryApplicationName The name of the gallery Application Definition to be deleted.
* @param serviceCallback the async ServiceCallback to handle successful and failed responses.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the {@link ServiceFuture} object
*/
public ServiceFuture<Void> deleteAsync(String resourceGroupName, String galleryName, String galleryApplicationName, final ServiceCallback<Void> serviceCallback) {
return ServiceFuture.fromResponse(deleteWithServiceResponseAsync(resourceGroupName, galleryName, galleryApplicationName), serviceCallback);
}
/**
* Delete a gallery Application.
*
* @param resourceGroupName The name of the resource group.
* @param galleryName The name of the Shared Application Gallery in which the Application Definition is to be deleted.
* @param galleryApplicationName The name of the gallery Application Definition to be deleted.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the observable for the request
*/
public Observable<Void> deleteAsync(String resourceGroupName, String galleryName, String galleryApplicationName) {
return deleteWithServiceResponseAsync(resourceGroupName, galleryName, galleryApplicationName).map(new Func1<ServiceResponse<Void>, Void>() {
@Override
public Void call(ServiceResponse<Void> response) {
return response.body();
}
});
}
/**
* Delete a gallery Application.
*
* @param resourceGroupName The name of the resource group.
* @param galleryName The name of the Shared Application Gallery in which the Application Definition is to be deleted.
* @param galleryApplicationName The name of the gallery Application Definition to be deleted.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the observable for the request
*/
public Observable<ServiceResponse<Void>> deleteWithServiceResponseAsync(String resourceGroupName, String galleryName, String galleryApplicationName) {
if (this.client.subscriptionId() == null) {
throw new IllegalArgumentException("Parameter this.client.subscriptionId() is required and cannot be null.");
}
if (resourceGroupName == null) {
throw new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null.");
}
if (galleryName == null) {
throw new IllegalArgumentException("Parameter galleryName is required and cannot be null.");
}
if (galleryApplicationName == null) {
throw new IllegalArgumentException("Parameter galleryApplicationName is required and cannot be null.");
}
final String apiVersion = "2019-03-01";
Observable<Response<ResponseBody>> observable = service.delete(this.client.subscriptionId(), resourceGroupName, galleryName, galleryApplicationName, apiVersion, this.client.acceptLanguage(), this.client.userAgent());
return client.getAzureClient().getPostOrDeleteResultAsync(observable, new TypeToken<Void>() { }.getType());
}
/**
* Delete a gallery Application.
*
* @param resourceGroupName The name of the resource group.
* @param galleryName The name of the Shared Application Gallery in which the Application Definition is to be deleted.
* @param galleryApplicationName The name of the gallery Application Definition to be deleted.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @throws CloudException thrown if the request is rejected by server
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent
*/
public void beginDelete(String resourceGroupName, String galleryName, String galleryApplicationName) {
beginDeleteWithServiceResponseAsync(resourceGroupName, galleryName, galleryApplicationName).toBlocking().single().body();
}
/**
* Delete a gallery Application.
*
* @param resourceGroupName The name of the resource group.
* @param galleryName The name of the Shared Application Gallery in which the Application Definition is to be deleted.
* @param galleryApplicationName The name of the gallery Application Definition to be deleted.
* @param serviceCallback the async ServiceCallback to handle successful and failed responses.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the {@link ServiceFuture} object
*/
public ServiceFuture<Void> beginDeleteAsync(String resourceGroupName, String galleryName, String galleryApplicationName, final ServiceCallback<Void> serviceCallback) {
return ServiceFuture.fromResponse(beginDeleteWithServiceResponseAsync(resourceGroupName, galleryName, galleryApplicationName), serviceCallback);
}
/**
* Delete a gallery Application.
*
* @param resourceGroupName The name of the resource group.
* @param galleryName The name of the Shared Application Gallery in which the Application Definition is to be deleted.
* @param galleryApplicationName The name of the gallery Application Definition to be deleted.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the {@link ServiceResponse} object if successful.
*/
public Observable<Void> beginDeleteAsync(String resourceGroupName, String galleryName, String galleryApplicationName) {
return beginDeleteWithServiceResponseAsync(resourceGroupName, galleryName, galleryApplicationName).map(new Func1<ServiceResponse<Void>, Void>() {
@Override
public Void call(ServiceResponse<Void> response) {
return response.body();
}
});
}
/**
* Delete a gallery Application.
*
* @param resourceGroupName The name of the resource group.
* @param galleryName The name of the Shared Application Gallery in which the Application Definition is to be deleted.
* @param galleryApplicationName The name of the gallery Application Definition to be deleted.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the {@link ServiceResponse} object if successful.
*/
public Observable<ServiceResponse<Void>> beginDeleteWithServiceResponseAsync(String resourceGroupName, String galleryName, String galleryApplicationName) {
if (this.client.subscriptionId() == null) {
throw new IllegalArgumentException("Parameter this.client.subscriptionId() is required and cannot be null.");
}
if (resourceGroupName == null) {
throw new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null.");
}
if (galleryName == null) {
throw new IllegalArgumentException("Parameter galleryName is required and cannot be null.");
}
if (galleryApplicationName == null) {
throw new IllegalArgumentException("Parameter galleryApplicationName is required and cannot be null.");
}
final String apiVersion = "2019-03-01";
return service.beginDelete(this.client.subscriptionId(), resourceGroupName, galleryName, galleryApplicationName, apiVersion, this.client.acceptLanguage(), this.client.userAgent())
.flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<Void>>>() {
@Override
public Observable<ServiceResponse<Void>> call(Response<ResponseBody> response) {
try {
ServiceResponse<Void> clientResponse = beginDeleteDelegate(response);
return Observable.just(clientResponse);
} catch (Throwable t) {
return Observable.error(t);
}
}
});
}
private ServiceResponse<Void> beginDeleteDelegate(Response<ResponseBody> response) throws CloudException, IOException, IllegalArgumentException {
return this.client.restClient().responseBuilderFactory().<Void, CloudException>newInstance(this.client.serializerAdapter())
.register(200, new TypeToken<Void>() { }.getType())
.register(202, new TypeToken<Void>() { }.getType())
.register(204, new TypeToken<Void>() { }.getType())
.registerError(CloudException.class)
.build(response);
}
/**
* List gallery Application Definitions in a gallery.
*
* @param resourceGroupName The name of the resource group.
* @param galleryName The name of the Shared Application Gallery from which Application Definitions are to be listed.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @throws CloudException thrown if the request is rejected by server
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent
* @return the PagedList<GalleryApplicationInner> object if successful.
*/
public PagedList<GalleryApplicationInner> listByGallery(final String resourceGroupName, final String galleryName) {
ServiceResponse<Page<GalleryApplicationInner>> response = listByGallerySinglePageAsync(resourceGroupName, galleryName).toBlocking().single();
return new PagedList<GalleryApplicationInner>(response.body()) {
@Override
public Page<GalleryApplicationInner> nextPage(String nextPageLink) {
return listByGalleryNextSinglePageAsync(nextPageLink).toBlocking().single().body();
}
};
}
/**
* List gallery Application Definitions in a gallery.
*
* @param resourceGroupName The name of the resource group.
* @param galleryName The name of the Shared Application Gallery from which Application Definitions are to be listed.
* @param serviceCallback the async ServiceCallback to handle successful and failed responses.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the {@link ServiceFuture} object
*/
public ServiceFuture<List<GalleryApplicationInner>> listByGalleryAsync(final String resourceGroupName, final String galleryName, final ListOperationCallback<GalleryApplicationInner> serviceCallback) {
return AzureServiceFuture.fromPageResponse(
listByGallerySinglePageAsync(resourceGroupName, galleryName),
new Func1<String, Observable<ServiceResponse<Page<GalleryApplicationInner>>>>() {
@Override
public Observable<ServiceResponse<Page<GalleryApplicationInner>>> call(String nextPageLink) {
return listByGalleryNextSinglePageAsync(nextPageLink);
}
},
serviceCallback);
}
/**
* List gallery Application Definitions in a gallery.
*
* @param resourceGroupName The name of the resource group.
* @param galleryName The name of the Shared Application Gallery from which Application Definitions are to be listed.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the observable to the PagedList<GalleryApplicationInner> object
*/
public Observable<Page<GalleryApplicationInner>> listByGalleryAsync(final String resourceGroupName, final String galleryName) {
return listByGalleryWithServiceResponseAsync(resourceGroupName, galleryName)
.map(new Func1<ServiceResponse<Page<GalleryApplicationInner>>, Page<GalleryApplicationInner>>() {
@Override
public Page<GalleryApplicationInner> call(ServiceResponse<Page<GalleryApplicationInner>> response) {
return response.body();
}
});
}
/**
* List gallery Application Definitions in a gallery.
*
* @param resourceGroupName The name of the resource group.
* @param galleryName The name of the Shared Application Gallery from which Application Definitions are to be listed.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the observable to the PagedList<GalleryApplicationInner> object
*/
public Observable<ServiceResponse<Page<GalleryApplicationInner>>> listByGalleryWithServiceResponseAsync(final String resourceGroupName, final String galleryName) {
return listByGallerySinglePageAsync(resourceGroupName, galleryName)
.concatMap(new Func1<ServiceResponse<Page<GalleryApplicationInner>>, Observable<ServiceResponse<Page<GalleryApplicationInner>>>>() {
@Override
public Observable<ServiceResponse<Page<GalleryApplicationInner>>> call(ServiceResponse<Page<GalleryApplicationInner>> page) {
String nextPageLink = page.body().nextPageLink();
if (nextPageLink == null) {
return Observable.just(page);
}
return Observable.just(page).concatWith(listByGalleryNextWithServiceResponseAsync(nextPageLink));
}
});
}
/**
* List gallery Application Definitions in a gallery.
*
ServiceResponse<PageImpl1<GalleryApplicationInner>> * @param resourceGroupName The name of the resource group.
ServiceResponse<PageImpl1<GalleryApplicationInner>> * @param galleryName The name of the Shared Application Gallery from which Application Definitions are to be listed.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the PagedList<GalleryApplicationInner> object wrapped in {@link ServiceResponse} if successful.
*/
public Observable<ServiceResponse<Page<GalleryApplicationInner>>> listByGallerySinglePageAsync(final String resourceGroupName, final String galleryName) {
if (this.client.subscriptionId() == null) {
throw new IllegalArgumentException("Parameter this.client.subscriptionId() is required and cannot be null.");
}
if (resourceGroupName == null) {
throw new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null.");
}
if (galleryName == null) {
throw new IllegalArgumentException("Parameter galleryName is required and cannot be null.");
}
final String apiVersion = "2019-03-01";
return service.listByGallery(this.client.subscriptionId(), resourceGroupName, galleryName, apiVersion, this.client.acceptLanguage(), this.client.userAgent())
.flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<Page<GalleryApplicationInner>>>>() {
@Override
public Observable<ServiceResponse<Page<GalleryApplicationInner>>> call(Response<ResponseBody> response) {
try {
ServiceResponse<PageImpl1<GalleryApplicationInner>> result = listByGalleryDelegate(response);
return Observable.just(new ServiceResponse<Page<GalleryApplicationInner>>(result.body(), result.response()));
} catch (Throwable t) {
return Observable.error(t);
}
}
});
}
private ServiceResponse<PageImpl1<GalleryApplicationInner>> listByGalleryDelegate(Response<ResponseBody> response) throws CloudException, IOException, IllegalArgumentException {
return this.client.restClient().responseBuilderFactory().<PageImpl1<GalleryApplicationInner>, CloudException>newInstance(this.client.serializerAdapter())
.register(200, new TypeToken<PageImpl1<GalleryApplicationInner>>() { }.getType())
.registerError(CloudException.class)
.build(response);
}
/**
* List gallery Application Definitions in a gallery.
*
* @param nextPageLink The NextLink from the previous successful call to List operation.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @throws CloudException thrown if the request is rejected by server
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent
* @return the PagedList<GalleryApplicationInner> object if successful.
*/
public PagedList<GalleryApplicationInner> listByGalleryNext(final String nextPageLink) {
ServiceResponse<Page<GalleryApplicationInner>> response = listByGalleryNextSinglePageAsync(nextPageLink).toBlocking().single();
return new PagedList<GalleryApplicationInner>(response.body()) {
@Override
public Page<GalleryApplicationInner> nextPage(String nextPageLink) {
return listByGalleryNextSinglePageAsync(nextPageLink).toBlocking().single().body();
}
};
}
/**
* List gallery Application Definitions in a gallery.
*
* @param nextPageLink The NextLink from the previous successful call to List operation.
* @param serviceFuture the ServiceFuture object tracking the Retrofit calls
* @param serviceCallback the async ServiceCallback to handle successful and failed responses.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the {@link ServiceFuture} object
*/
public ServiceFuture<List<GalleryApplicationInner>> listByGalleryNextAsync(final String nextPageLink, final ServiceFuture<List<GalleryApplicationInner>> serviceFuture, final ListOperationCallback<GalleryApplicationInner> serviceCallback) {
return AzureServiceFuture.fromPageResponse(
listByGalleryNextSinglePageAsync(nextPageLink),
new Func1<String, Observable<ServiceResponse<Page<GalleryApplicationInner>>>>() {
@Override
public Observable<ServiceResponse<Page<GalleryApplicationInner>>> call(String nextPageLink) {
return listByGalleryNextSinglePageAsync(nextPageLink);
}
},
serviceCallback);
}
/**
* List gallery Application Definitions in a gallery.
*
* @param nextPageLink The NextLink from the previous successful call to List operation.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the observable to the PagedList<GalleryApplicationInner> object
*/
public Observable<Page<GalleryApplicationInner>> listByGalleryNextAsync(final String nextPageLink) {
return listByGalleryNextWithServiceResponseAsync(nextPageLink)
.map(new Func1<ServiceResponse<Page<GalleryApplicationInner>>, Page<GalleryApplicationInner>>() {
@Override
public Page<GalleryApplicationInner> call(ServiceResponse<Page<GalleryApplicationInner>> response) {
return response.body();
}
});
}
/**
* List gallery Application Definitions in a gallery.
*
* @param nextPageLink The NextLink from the previous successful call to List operation.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the observable to the PagedList<GalleryApplicationInner> object
*/
public Observable<ServiceResponse<Page<GalleryApplicationInner>>> listByGalleryNextWithServiceResponseAsync(final String nextPageLink) {
return listByGalleryNextSinglePageAsync(nextPageLink)
.concatMap(new Func1<ServiceResponse<Page<GalleryApplicationInner>>, Observable<ServiceResponse<Page<GalleryApplicationInner>>>>() {
@Override
public Observable<ServiceResponse<Page<GalleryApplicationInner>>> call(ServiceResponse<Page<GalleryApplicationInner>> page) {
String nextPageLink = page.body().nextPageLink();
if (nextPageLink == null) {
return Observable.just(page);
}
return Observable.just(page).concatWith(listByGalleryNextWithServiceResponseAsync(nextPageLink));
}
});
}
/**
* List gallery Application Definitions in a gallery.
*
ServiceResponse<PageImpl1<GalleryApplicationInner>> * @param nextPageLink The NextLink from the previous successful call to List operation.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the PagedList<GalleryApplicationInner> object wrapped in {@link ServiceResponse} if successful.
*/
public Observable<ServiceResponse<Page<GalleryApplicationInner>>> listByGalleryNextSinglePageAsync(final String nextPageLink) {
if (nextPageLink == null) {
throw new IllegalArgumentException("Parameter nextPageLink is required and cannot be null.");
}
String nextUrl = String.format("%s", nextPageLink);
return service.listByGalleryNext(nextUrl, this.client.acceptLanguage(), this.client.userAgent())
.flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<Page<GalleryApplicationInner>>>>() {
@Override
public Observable<ServiceResponse<Page<GalleryApplicationInner>>> call(Response<ResponseBody> response) {
try {
ServiceResponse<PageImpl1<GalleryApplicationInner>> result = listByGalleryNextDelegate(response);
return Observable.just(new ServiceResponse<Page<GalleryApplicationInner>>(result.body(), result.response()));
} catch (Throwable t) {
return Observable.error(t);
}
}
});
}
private ServiceResponse<PageImpl1<GalleryApplicationInner>> listByGalleryNextDelegate(Response<ResponseBody> response) throws CloudException, IOException, IllegalArgumentException {
return this.client.restClient().responseBuilderFactory().<PageImpl1<GalleryApplicationInner>, CloudException>newInstance(this.client.serializerAdapter())
.register(200, new TypeToken<PageImpl1<GalleryApplicationInner>>() { }.getType())
.registerError(CloudException.class)
.build(response);
}
}
| |
package com.coolweather.android;
import android.app.ProgressDialog;
import android.content.Intent;
import android.os.Bundle;
import android.support.annotation.Nullable;
import android.support.v4.app.Fragment;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.AdapterView;
import android.widget.ArrayAdapter;
import android.widget.Button;
import android.widget.ListView;
import android.widget.TextView;
import android.widget.Toast;
import com.coolweather.android.db.City;
import com.coolweather.android.db.County;
import com.coolweather.android.db.Province;
import com.coolweather.android.util.HttpUtil;
import com.coolweather.android.util.ResponseParseUtil;
import org.litepal.crud.DataSupport;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import okhttp3.Call;
import okhttp3.Callback;
import okhttp3.Response;
/**
* Created by czhang on 7/2/17.
*/
public class ChooseAreaFragment extends Fragment {
private static final String WEATHER_SERVICE_ROOT_URL = "http://guolin.tech/api/china";
private static final String TYPE_PROVINCE = "province";
private static final String TYPE_CITY = "city";
private static final String TYPE_COUNTY = "county";
public static final int LEVEL_PROVINCE = 0;
public static final int LEVEL_CITY = 1;
public static final int LEVEL_COUNTY = 2;
private ProgressDialog progressDialog;
private TextView titleText;
private Button backButton;
private ListView listView;
private ArrayAdapter<String> adapter;
private List<String> dataList = new ArrayList<>();
private List<Province> provinceList;
private List<City> cityList;
private List<County> countyList;
private Province selectedProvince;
private City selectedCity;
private int currentLevel;
@Override
public View onCreateView(LayoutInflater inflater, @Nullable ViewGroup container, @Nullable Bundle savedInstanceState) {
View view = inflater.inflate(R.layout.choose_area, container, false);
titleText = (TextView) view.findViewById(R.id.title_text);
backButton = (Button) view.findViewById(R.id.back_button);
listView = (ListView) view.findViewById(R.id.list_view);
adapter = new ArrayAdapter<>(getContext(), android.R.layout.simple_list_item_1, dataList);
listView.setAdapter(adapter);
return view;
}
@Override
public void onActivityCreated(@Nullable Bundle savedInstanceState) {
super.onActivityCreated(savedInstanceState);
listView.setOnItemClickListener(new AdapterView.OnItemClickListener() {
@Override
public void onItemClick(AdapterView<?> parent, View view, int position, long id) {
if (currentLevel == LEVEL_PROVINCE) {
selectedProvince = provinceList.get(position);
queryCities();
} else if (currentLevel == LEVEL_CITY) {
selectedCity = cityList.get(position);
queryCounties();
} else if (currentLevel == LEVEL_COUNTY) {
String weatherId = countyList.get(position).getWeatherId();
if (getActivity() instanceof MainActivity) {
Intent intent = new Intent(getActivity(), WeatherActivity.class);
intent.putExtra("weather_id", weatherId);
startActivity(intent);
getActivity().finish();
} else if (getActivity() instanceof WeatherActivity) {
WeatherActivity activity = (WeatherActivity) getActivity();
activity.drawerLayout.closeDrawers();
activity.swipeRefresh.setRefreshing(true);
activity.requestWeather(weatherId);
}
}
}
});
backButton.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
if (currentLevel == LEVEL_COUNTY) {
queryCities();
} else if (currentLevel == LEVEL_CITY) {
queryProvinces();
}
}
});
queryProvinces();
}
private void queryProvinces() {
titleText.setText("CHINA");
backButton.setVisibility(View.GONE);
provinceList = DataSupport.findAll(Province.class);
if (provinceList.size() > 0) {
dataList.clear();
for (Province province : provinceList) {
dataList.add(province.getProvinceName());
}
adapter.notifyDataSetChanged();
listView.setSelection(0);
currentLevel = LEVEL_PROVINCE;
} else {
String address = WEATHER_SERVICE_ROOT_URL;
queryFromServer(address, TYPE_PROVINCE);
}
}
private void queryCities() {
titleText.setText(selectedProvince.getProvinceName());
backButton.setVisibility(View.VISIBLE);
cityList = DataSupport.where("provinceid=?", String.valueOf(selectedProvince.getId())).find(City.class);
if (cityList.size() > 0) {
dataList.clear();
for (City city : cityList) {
dataList.add(city.getCityName());
}
adapter.notifyDataSetChanged();
listView.setSelection(0);
currentLevel = LEVEL_CITY;
} else {
int provinceCode = selectedProvince.getProvinceCode();
String address = WEATHER_SERVICE_ROOT_URL + "/" + provinceCode;
queryFromServer(address, TYPE_CITY);
}
}
private void queryCounties() {
titleText.setText(selectedCity.getCityName());
backButton.setVisibility(View.VISIBLE);
countyList = DataSupport.where("cityid=?", String.valueOf(selectedCity.getId())).find(County.class);
if (countyList.size() > 0) {
dataList.clear();
for (County county : countyList) {
dataList.add(county.getCountyName());
}
adapter.notifyDataSetChanged();
listView.setSelection(0);
currentLevel = LEVEL_COUNTY;
} else {
int provinceCode = selectedProvince.getProvinceCode();
int cityCode = selectedCity.getCityCode();
String address = WEATHER_SERVICE_ROOT_URL + "/" + provinceCode + "/" + cityCode;
queryFromServer(address, TYPE_COUNTY);
}
}
private void queryFromServer(String address, final String type) {
showProgressDialog();
HttpUtil.sendOkHttpRequest(address, new Callback() {
@Override
public void onResponse(Call call, Response response) throws IOException {
String responseText = response.body().string();
boolean result = false;
if (TYPE_PROVINCE.equals(type)) {
result = ResponseParseUtil.handleProvinceResponse(responseText);
} else if (TYPE_CITY.equals(type)) {
result = ResponseParseUtil.handleCityResponse(responseText, selectedProvince.getId());
} else if (TYPE_COUNTY.equals(type)) {
result = ResponseParseUtil.handleCountyResponse(responseText, selectedCity.getId());
}
if (result) {
// the callback method will be run in another thread, so we must use "runOnUiThread" to change the UI
getActivity().runOnUiThread(new Runnable() {
@Override
public void run() {
closeProgressDialog();
if (TYPE_PROVINCE.equals(type)) {
queryProvinces();
} else if (TYPE_CITY.equals(type)) {
queryCities();
} else if (TYPE_COUNTY.equals(type)) {
queryCounties();
}
}
});
}
}
@Override
public void onFailure(Call call, IOException e) {
getActivity().runOnUiThread(new Runnable() {
@Override
public void run() {
closeProgressDialog();
Toast.makeText(getContext(), "failed to load data...", Toast.LENGTH_SHORT).show();
}
});
}
});
}
private void showProgressDialog() {
if (progressDialog == null) {
progressDialog = new ProgressDialog(getActivity());
progressDialog.setMessage("loading...");
progressDialog.setCanceledOnTouchOutside(false);
}
progressDialog.show();
}
private void closeProgressDialog() {
if (progressDialog != null) {
progressDialog.dismiss();
}
}
}
| |
/*
*
* Paros and its related class files.
*
* Paros is an HTTP/HTTPS proxy for assessing web application security.
* Copyright (C) 2003-2004 Chinotec Technologies Company
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the Clarified Artistic License
* as published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* Clarified Artistic License for more details.
*
* You should have received a copy of the Clarified Artistic License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
*/
// ZAP: 2011/05/15 Support for exclusions
// ZAP: 2011/09/06 Fix alert save plus concurrent mod exceptions
// ZAP: 2011/11/04 Correct getHierarchicNodeName
// ZAP: 2011/11/29 Added blank image to node names to fix redrawing issue
// ZAP: 2012/02/11 Re-ordered icons, added spider icon and notify via SiteMap
// ZAP: 2012/03/11 Issue 280: Escape URLs in sites tree
// ZAP: 2012/03/15 Changed the method toString to use the class StringBuilder
// and reworked the method toString and getIcons. Renamed the method
// getIcons to appendIcons.
// ZAP: 2012/07/29 Issue 43: Added support for Scope
// ZAP: 2012/08/29 Issue 250 Support for authentication management
// ZAP: 2012/10/02 Issue 385: Added support for Contexts
// ZAP: 2013/01/23 Ignore Active scanner history refs
// ZAP: 2013/08/23 Make sure #nodeChanged() is called after removing a custom icon
// ZAP: 2013/11/16 Issue 869: Differentiate proxied requests from (ZAP) user requests
// ZAP: 2014/03/23 Issue 1084: NullPointerException while selecting a node in the "Sites" tab
// ZAP: 2014/04/10 Do not allow to set the parent node as itself
// ZAP: 2014/04/10 Issue 1118: Alerts Tab can get out of sync
// ZAP: 2014/05/05 Issue 1181: Vulnerable pages active scanned only once
// ZAP: 2014/05/23 Issue 1209: Reliability becomes Confidence and add levels
// ZAP: 2014/06/16 Fixed an issue in SiteNode#setHistoryReference(HistoryReference) that led
// to multiple occurrences of same HistoryReference(s) in the pastHistoryList.
// ZAP: 2014/06/16 Issue 990: Allow to delete alerts through the API
// ZAP: 2014/11/19 Issue 1412: Prevent ConcurrentModificationException when icons updated frequently
// ZAP: 2014/12/17 Issue 1174: Support a Site filter
// ZAP: 2015/04/02 Issue 1582: Low memory option
// ZAP: 2015/10/21 Issue 1576: Support data driven content
// ZAP: 2016/01/26 Fixed findbugs warning
// ZAP: 2016/03/24 Do not access EDT in daemon mode
// ZAP: 2016/04/12 Notify of changes when an alert is updated
// ZAP: 2016/08/30 Use a Set instead of a List for the alerts
package org.parosproxy.paros.model;
import java.awt.EventQueue;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Set;
import java.util.Vector;
import javax.swing.tree.DefaultMutableTreeNode;
import javax.swing.tree.MutableTreeNode;
import org.apache.commons.lang.StringEscapeUtils;
import org.apache.log4j.Logger;
import org.parosproxy.paros.Constant;
import org.parosproxy.paros.core.scanner.Alert;
import org.parosproxy.paros.view.View;
import org.zaproxy.zap.model.SessionStructure;
public class SiteNode extends DefaultMutableTreeNode {
private static final long serialVersionUID = 7987615016786179150L;
private String nodeName = null;
private String hierarchicNodeName = null;
private HistoryReference historyReference = null;
private Vector<HistoryReference> pastHistoryList = new Vector<>(10);
// ZAP: Support for linking Alerts to SiteNodes
private SiteMap siteMap = null;
private Set<Alert> alerts = Collections.synchronizedSet(new HashSet<Alert>());
private boolean justSpidered = false;
//private boolean justAJAXSpidered = false;
private ArrayList<String> icons = null;
private ArrayList<Boolean> clearIfManual = null;
private static Logger log = Logger.getLogger(SiteNode.class);
private boolean isIncludedInScope = false;
private boolean isExcludedFromScope = false;
private boolean filtered = false;
private boolean dataDriven = false;
/**
* Flag that indicates whether or not the {@link #calculateHighestAlert() highest alert needs to be calculated}, when
* {@link #appendIcons(StringBuilder) building the string representation}.
*/
private boolean calculateHighestAlert;
/**
* The {@code Alert} with highest risk (and not a false positive).
*
* @see #isHighestAlert(Alert)
*/
private Alert highestAlert;
public SiteNode(SiteMap siteMap, int type, String nodeName) {
super();
this.siteMap = siteMap;
this.nodeName = nodeName;
if (nodeName.startsWith(SessionStructure.DATA_DRIVEN_NODE_PREFIX)) {
this.dataDriven = true;
}
this.icons = new ArrayList<>();
this.clearIfManual = new ArrayList<>();
if (type == HistoryReference.TYPE_SPIDER) {
this.justSpidered = true;
}
}
public void setCustomIcons(ArrayList<String> i, ArrayList<Boolean> c) {
synchronized (this.icons) {
this.icons.clear();
this.icons.addAll(i);
this.clearIfManual = c;
}
}
public void addCustomIcon(String resourceName, boolean clearIfManual) {
synchronized (this.icons) {
if (! this.icons.contains(resourceName)) {
this.icons.add(resourceName);
this.clearIfManual.add(clearIfManual);
this.nodeChanged();
}
}
}
public void removeCustomIcon(String resourceName) {
synchronized (this.icons) {
if (this.icons.contains(resourceName)) {
int i = this.icons.indexOf(resourceName);
this.icons.remove(i);
this.clearIfManual.remove(i);
this.nodeChanged();
}
}
}
private void appendIcons(StringBuilder sb) {
if (calculateHighestAlert) {
calculateHighestAlert();
}
if (highestAlert != null) {
sb.append(" <img src=\"");
sb.append(highestAlert.getIconUrl());
sb.append("\"> ");
}
if (justSpidered) {
sb.append(" <img src=\"");
sb.append(Constant.class.getResource("/resource/icon/10/spider.png"));
sb.append("\"> ");
}
synchronized (this.icons) {
if (!this.icons.isEmpty()) {
for(String icon : this.icons) {
sb.append(" <img src=\"");
sb.append(Constant.class.getResource(icon));
sb.append("\"> ");
}
}
}
}
/**
* Calculates the highest alert.
* <p>
* After a call to this method the {@link #highestAlert} will have the highest alert (or {@code null} if none) and the flag
* {@link #calculateHighestAlert} will have the value {@code false}.
*
* @see #isHighestAlert(Alert)
*/
private void calculateHighestAlert() {
synchronized (alerts) {
highestAlert = null;
for (Alert alert : alerts) {
if (isHighestAlert(alert)) {
highestAlert = alert;
}
}
calculateHighestAlert = false;
}
}
/**
* Tells whether or not the given alert is the alert with highest risk than the current highest alert.
* <p>
* {@link Alert#CONFIDENCE_FALSE_POSITIVE False positive alerts} are ignored.
*
* @param alert the alert to check
* @return {@code true} if it's the alert with highest risk, {@code false} otherwise.
*/
private boolean isHighestAlert(Alert alert) {
if (alert.getConfidence() == Alert.CONFIDENCE_FALSE_POSITIVE) {
return false;
}
if (highestAlert == null) {
return true;
}
return alert.getRisk() > highestAlert.getRisk();
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("<html><body>");
appendIcons(sb);
sb.append(StringEscapeUtils.escapeHtml(nodeName));
sb.append("</body></html>");
return sb.toString();
}
public boolean isParentOf (String nodeName) {
if (nodeName == null) {
return false;
}
return nodeName.compareTo(this.nodeName) < 0;
}
public String getNodeName() {
return this.nodeName;
}
public String getCleanNodeName() {
return getCleanNodeName(true);
}
public String getCleanNodeName(boolean specialNodesAsRegex) {
String name = this.getNodeName();
if (specialNodesAsRegex && this.isDataDriven()) {
// Non-greedy regex pattern
name = "(.+?)";
} else if (this.isLeaf()) {
int colonIndex = name.indexOf(":");
if (colonIndex > 0) {
// Strip the GET/POST etc off
name = name.substring(colonIndex+1);
}
int bracketIndex = name.lastIndexOf("(");
if (bracketIndex > 0) {
// Strip the param summary off
name = name.substring(0, bracketIndex);
}
int quesIndex = name.indexOf("?");
if (quesIndex > 0) {
// Strip the parameters off
name = name.substring(0, quesIndex);
}
}
return name;
}
public String getHierarchicNodeName() {
return getHierarchicNodeName(true);
}
public String getHierarchicNodeName(boolean specialNodesAsRegex) {
if (hierarchicNodeName != null && specialNodesAsRegex) {
// The regex version is used most frequently, so cache
return hierarchicNodeName;
}
if (this.isRoot()) {
hierarchicNodeName = "";
} else if (this.getParent().isRoot()) {
hierarchicNodeName = this.getNodeName();
} else {
String name =
this.getParent().getHierarchicNodeName(specialNodesAsRegex) + "/" +
this.getCleanNodeName(specialNodesAsRegex);
if (!specialNodesAsRegex) {
// Dont cache the non regex version
return name;
}
hierarchicNodeName = name;
}
return hierarchicNodeName;
}
public HistoryReference getHistoryReference() {
return historyReference;
}
/**
* Set current node reference.
* If there is any existing reference, delete if spider record.
* Otherwise, put into past history list.
* @param historyReference
*/
public void setHistoryReference(HistoryReference historyReference) {
if (getHistoryReference() != null) {
// if (getHistoryReference().getHistoryType() == HistoryReference.TYPE_SPIDER) {
// getHistoryReference().delete();
// getHistoryReference().setSiteNode(null);
// } else if (!getPastHistoryReference().contains(historyReference)) {
// getPastHistoryReference().add(getHistoryReference());
// }
if (this.justSpidered && (historyReference.getHistoryType() == HistoryReference.TYPE_PROXIED ||
historyReference.getHistoryType() == HistoryReference.TYPE_ZAP_USER)) {
this.justSpidered = false;
this.nodeChanged();
}
// we remove the icons of the node that has to be cleaned when manually visiting them
if (!this.icons.isEmpty() && (historyReference.getHistoryType() == HistoryReference.TYPE_PROXIED ||
historyReference.getHistoryType() == HistoryReference.TYPE_ZAP_USER)) {
synchronized (this.icons) {
for (int i = 0; i < this.clearIfManual.size(); ++i) {
if (this.clearIfManual.get(i) && this.icons.size() > i) {
this.icons.remove(i);
this.clearIfManual.remove(i);
}
}
}
this.nodeChanged();
}
if (HistoryReference.TYPE_SCANNER == historyReference.getHistoryType()) {
getPastHistoryReference().add(historyReference);
return;
}
// above code commented as to always add all into past reference. For use in scanner
if (!getPastHistoryReference().contains(getHistoryReference())) {
getPastHistoryReference().add(getHistoryReference());
}
}
this.historyReference = historyReference;
this.historyReference.setSiteNode(this);
}
private void nodeChanged() {
if (this.siteMap == null || !View.isInitialised()) {
return;
}
if (EventQueue.isDispatchThread()) {
nodeChangedEventHandler();
} else {
try {
EventQueue.invokeLater(new Runnable() {
@Override
public void run() {
nodeChangedEventHandler();
}
});
} catch (Exception e) {
log.error(e.getMessage(), e);
}
}
}
private void nodeChangedEventHandler() {
this.siteMap.nodeChanged(this);
}
public Vector<HistoryReference> getPastHistoryReference() {
return pastHistoryList;
}
public boolean hasAlert(Alert alert) {
if (alert == null) {
throw new IllegalArgumentException("Alert must not be null");
}
return alerts.contains(alert);
}
public void addAlert(Alert alert) {
if (alert == null) {
throw new IllegalArgumentException("Alert must not be null");
}
if (!this.alerts.add(alert)) {
return;
}
if (isHighestAlert(alert)) {
highestAlert = alert;
}
if (this.getParent() != null) {
this.getParent().addAlert(alert);
}
if (this.siteMap != null) {
// Adding alert might affect the nodes visibility in a filtered tree
siteMap.applyFilter(this);
}
this.nodeChanged();
}
public void updateAlert(Alert alert) {
if (alert == null) {
throw new IllegalArgumentException("Alert must not be null");
}
boolean updated = false;
synchronized (alerts) {
for (Iterator<Alert> it = alerts.iterator(); it.hasNext();) {
if (it.next().getAlertId() == alert.getAlertId()) {
it.remove();
updated = true;
this.alerts.add(alert);
setCalculateHighestAlertIfSameAlert(alert);
break;
}
}
}
if (updated) {
if (this.getParent() != null) {
this.getParent().updateAlert(alert);
}
if (this.siteMap != null) {
// Updating an alert might affect the nodes visibility in a filtered tree
siteMap.applyFilter(this);
}
this.nodeChanged();
}
}
/**
* Gets the alerts of the node.
* <p>
* The returned {@code List} is a copy of the internal collection.
*
* @return a new {@code List} containing the {@code Alert}s
*/
public List<Alert> getAlerts() {
synchronized (alerts) {
return new ArrayList<>(alerts);
}
}
private void clearChildAlert (Alert alert, SiteNode child) {
// Alerts are propagated up, which means when one is deleted we need to work out if it still
// is present in another child node
boolean removed = true;
alerts.remove(alert);
if (this.getChildCount() > 0) {
SiteNode c = (SiteNode) this.getFirstChild();
while (c != null) {
if (! c.equals(child)) {
if (c.hasAlert(alert)) {
alerts.add(alert);
removed = false;
break;
}
}
c = (SiteNode) this.getChildAfter(c);
}
}
if (removed) {
setCalculateHighestAlertIfSameAlert(alert);
nodeChanged();
if (this.getParent() != null) {
this.getParent().clearChildAlert(alert, this);
}
}
}
public void deleteAlert(Alert alert) {
if (alert == null) {
throw new IllegalArgumentException("Alert must not be null");
}
if (!alerts.remove(alert)) {
return;
}
setCalculateHighestAlertIfSameAlert(alert);
// Remove from parents, if not in siblings
if (this.getParent() != null) {
this.getParent().clearChildAlert(alert, this);
}
if (this.siteMap != null) {
// Deleting alert might affect the nodes visibility in a filtered tree
siteMap.applyFilter(this);
}
this.nodeChanged();
}
/**
* Sets whether or not the highest alert needs to be calculated, based on the given alert.
* <p>
* The highest alert needs to be calculated if the given alert is the highest alert.
*
* @param alert the alert to check
*/
private void setCalculateHighestAlertIfSameAlert(Alert alert) {
if (highestAlert != null && highestAlert.getAlertId() == alert.getAlertId()) {
calculateHighestAlert = true;
highestAlert = null;
}
}
public void deleteAlerts(List<Alert> alerts) {
if (this.alerts.removeAll(alerts)) {
// Remove from parents, if not in siblings
if (this.getParent() != null) {
this.getParent().clearChildAlerts(alerts);
}
if (this.siteMap != null) {
// Deleting alerts might affect the nodes visibility in a filtered tree
siteMap.applyFilter(this);
}
calculateHighestAlert = true;
this.nodeChanged();
}
}
/**
* Deletes all alerts of this node and all child nodes recursively.
*/
public void deleteAllAlerts() {
for(int i = 0; i < getChildCount(); i++) {
((SiteNode) getChildAt(i)).deleteAllAlerts();
}
if (!alerts.isEmpty()) {
alerts.clear();
if (this.siteMap != null) {
// Deleting alert might affect the nodes visibility in a filtered tree
siteMap.applyFilter(this);
}
nodeChanged();
}
}
private void clearChildAlerts(List<Alert> alerts) {
List<Alert> alertsToRemove = new ArrayList<>(alerts);
if (this.getChildCount() > 0) {
SiteNode c = (SiteNode) this.getFirstChild();
while (c != null) {
alertsToRemove.removeAll(c.alerts);
c = (SiteNode) this.getChildAfter(c);
}
}
boolean changed = this.alerts.removeAll(alertsToRemove);
if (changed) {
calculateHighestAlert = true;
if (this.getParent() != null) {
this.getParent().clearChildAlerts(alertsToRemove);
}
nodeChangedEventHandler();
}
}
public boolean hasHistoryType (int type) {
if (this.historyReference == null) {
return false;
}
if (this.historyReference.getHistoryType() == type) {
return true;
}
for (HistoryReference href : this.pastHistoryList) {
if (href.getHistoryType() == type) {
return true;
}
}
return false;
}
public boolean hasJustHistoryType (int type) {
if (this.historyReference == null) {
return false;
}
if (this.historyReference.getHistoryType() != type) {
return false;
}
for (HistoryReference href : this.pastHistoryList) {
if (href.getHistoryType() != type) {
return false;
}
}
return true;
}
public boolean isIncludedInScope() {
return isIncludedInScope;
}
public void setIncludedInScope(boolean isIncludedInScope, boolean applyToChildNodes) {
this.isIncludedInScope = isIncludedInScope;
if (siteMap != null) {
// This could have affected its visibility
siteMap.applyFilter(this);
}
this.nodeChanged();
// Recurse down
if (this.getChildCount() > 0 && applyToChildNodes) {
SiteNode c = (SiteNode) this.getFirstChild();
while (c != null) {
c.setIncludedInScope(isIncludedInScope, applyToChildNodes);
c = (SiteNode) this.getChildAfter(c);
}
}
}
public boolean isExcludedFromScope() {
return isExcludedFromScope;
}
public void setExcludedFromScope(boolean isExcludedFromScope, boolean applyToChildNodes) {
this.isExcludedFromScope = isExcludedFromScope;
if (isExcludedFromScope) {
this.isIncludedInScope = false;
}
if (siteMap != null) {
// This could have affected its visibility
siteMap.applyFilter(this);
}
this.nodeChanged();
// Recurse down
if (this.getChildCount() > 0 && applyToChildNodes) {
SiteNode c = (SiteNode) this.getFirstChild();
while (c != null) {
c.setExcludedFromScope(isExcludedFromScope, applyToChildNodes);
c = (SiteNode) this.getChildAfter(c);
}
}
}
@Override
public void setParent(MutableTreeNode newParent) {
if (newParent == this) {
return;
}
super.setParent(newParent);
}
/**
* Returns this node's parent or null if this node has no parent.
*
* @return this node's parent SiteNode, or null if this node has no parent
*/
@Override
public SiteNode getParent() {
return (SiteNode)super.getParent();
}
public boolean isFiltered() {
return filtered;
}
protected void setFiltered(boolean filtered) {
this.filtered = filtered;
}
public boolean isDataDriven() {
return dataDriven;
}
}
| |
package com.hue.services;
import java.io.IOException;
import java.lang.reflect.Method;
import java.lang.reflect.Modifier;
import java.sql.Connection;
import java.sql.DatabaseMetaData;
import java.sql.ResultSet;
import java.sql.ResultSetMetaData;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.regex.Pattern;
import java.util.stream.Collectors;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.hue.common.CardinalityType;
import com.hue.common.ColumnKeyType;
import com.hue.common.DBType;
import com.hue.common.JoinType;
import com.hue.common.TableType;
import com.hue.connection.ConnUtils;
import com.hue.connection.SshException;
import com.hue.model.Column;
import com.hue.model.Datasource;
import com.hue.model.Join;
import com.hue.utils.CommonUtils;
import schemacrawler.schema.Catalog;
import schemacrawler.schema.ForeignKey;
import schemacrawler.schema.Schema;
import schemacrawler.schema.Table;
import schemacrawler.schemacrawler.RegularExpressionExclusionRule;
import schemacrawler.schemacrawler.RegularExpressionInclusionRule;
import schemacrawler.schemacrawler.SchemaCrawlerException;
import schemacrawler.schemacrawler.SchemaCrawlerOptions;
import schemacrawler.schemacrawler.SchemaInfoLevel;
import schemacrawler.schemacrawler.SchemaInfoLevelBuilder;
import schemacrawler.utility.SchemaCrawlerUtility;
public final class SchemaImportService {
private static final Logger logger = LoggerFactory.getLogger(SchemaImportService.class.getName());
private static final String TEST_QUERY = "SELECT 1+1";
private static final String TEST_QUERY_ORACLE = "SELECT 'X' FROM dual";
private static final String TEST_QUERY_DERBY = "SELECT 1+1 FROM SYSIBM.SYSDUMMY1";
private static final String ROW_COUNT_QUERY = "SELECT count(*) FROM \"%s\"";
private static final String ROW_COUNT_QUERY_WITH_SCHEMA = "SELECT count(*) FROM \"%s\".\"%s\"";
private static final String MYSQL_ROW_COUNT_QUERY = "SELECT count(*) FROM `%s`.`%s`";
private static final List<String> EXCLUDED_METHODS = Arrays.asList("getURL", "toString", "hashCode", "equals");
private static final Map<String, schemacrawler.schema.Table> schemaTables = new HashMap<String, schemacrawler.schema.Table>();
private SchemaImportService() {
}
// Import schema names only. No need for anything else.
public static List<String> importSchemas(Datasource datasource) throws ServiceException {
List<String> schemaNames = new ArrayList<>();
try (Connection connection = ConnUtils.getSchemaCrawlerConnection(datasource)) {
final SchemaCrawlerOptions options = getSchemaCrawlerOptions(datasource);
options.getSchemaInfoLevel().setRetrieveDatabaseInfo(false);
options.getSchemaInfoLevel().setRetrieveTables(false);
Catalog database = SchemaCrawlerUtility.getCatalog(connection, options);
// MySQL only has catalog name, no schema name
if (datasource.getDatabaseType() == DBType.MYSQL) {
schemaNames.addAll(
database.getSchemas().stream().map(Schema::getCatalogName).collect(Collectors.toList()));
}
else {
schemaNames.addAll(database.getSchemas().stream().map(Schema::getName).collect(Collectors.toList()));
}
}
catch (Exception e) {
logger.error(e.getMessage(), e);
throw new ServiceException(e);
}
return schemaNames;
}
public static Integer getRowCount(Datasource datasource, String schemaName, String tableName)
throws ServiceException {
DBType dbType = datasource.getDatabaseType();
try (final Connection connection = ConnUtils.getNonSshConnection(datasource)) {
String query = null;
tableName = CommonUtils.removeQuotes(tableName);
if (dbType == DBType.MYSQL || dbType == DBType.HIVE) {
query = String.format(MYSQL_ROW_COUNT_QUERY, schemaName, tableName);
}
else if (dbType == DBType.POSTGRESQL || dbType == DBType.REDSHIFT
|| dbType == DBType.MSSQL || dbType == DBType.DERBY_LOCAL
|| datasource.getDatabaseType() == DBType.AZURE
|| datasource.getDatabaseType() == DBType.VERTICA) {
query = String.format(ROW_COUNT_QUERY_WITH_SCHEMA, schemaName, tableName);
}
else {
query = String.format(ROW_COUNT_QUERY, tableName);
}
logger.debug("Count Query = " + query);
ResultSet rs = connection.createStatement().executeQuery(query);
rs.next();
return rs.getInt(1);
}
catch (Exception e) {
throw new ServiceException(e);
}
}
public static void importColumnsForTable(Datasource datasource, com.hue.model.Table table) throws ServiceException {
// TH 06/07/2015, this method will be called in multithread environment.
// So have to create
// SSH connection outside of this method.
// SshService sshService = null;
// AA 8/19/2015 - quoting table name to prevent query errors.
String q = "";
try (Connection connection = ConnUtils.getNonSshConnection(datasource)) {
q = connection.getMetaData().getIdentifierQuoteString();
// Create the options
final SchemaCrawlerOptions options = getSchemaCrawlerOptions(datasource);
// update options to only specific schema and table
String schemaName = table.getSchemaName();
if (datasource.getDatabaseType() == DBType.MSSQL
|| datasource.getDatabaseType() == DBType.AZURE
|| datasource.getDatabaseType() == DBType.REDSHIFT
|| datasource.getDatabaseType() == DBType.VERTICA) {
schemaName = datasource.getDatabaseName() + "." + schemaName;
}
options.getSchemaInfoLevel().setRetrieveTableColumns(true);
options.getSchemaInfoLevel().setRetrieveForeignKeyDefinitions(true);
options.getSchemaInfoLevel().setRetrievePrimaryKeyDefinitions(true);
options.getSchemaInfoLevel().setRetrieveForeignKeyDefinitions(true);
options.setSchemaInclusionRule(createSchemaInclusionRule(datasource, table.getSchemaName()));
options.setTableNamePattern(table.getPhysicalName());
try {
// Get the schema definition
Catalog database = SchemaCrawlerUtility.getCatalog(connection, options);
com.annimon.stream.Optional<? extends Schema> schemaO = database.lookupSchema(schemaName);
if(!schemaO.isPresent()) throw new ServiceException("Schema with name " + schemaName + " not found.");
com.annimon.stream.Optional<? extends Table> schemaTable = database.lookupTable(schemaO.get(),
quoteIdent(table.getPhysicalName(),q));
if(!schemaTable.isPresent()) throw new ServiceException("Table with name " + table.getPhysicalName() + " not found in schema " + schemaName);
// Saved schema table for join detection later.
schemaTables.put(table.getName(), schemaTable.get());
for (schemacrawler.schema.Column schemaColumn : schemaTable.get().getColumns()) {
Column column = new Column();
column.setName(stripQuoteIdent(CommonUtils.removeBackTicks(schemaColumn.getName()),q));
column.setDataType(CommonUtils.mapToDataType(
schemaColumn.getColumnDataType().getTypeMappedClass().getName(),
schemaColumn.getColumnDataType().getJavaSqlType().getJavaSqlTypeName(),
schemaColumn.getColumnDataType().getDatabaseSpecificTypeName()));
column.setKeyType(getKeyType(schemaColumn, schemaTable.get()));
table.getColumns().add(column);
}
}
catch (Exception e) {
logger.error(e.getMessage(), e);
// TH 04/22/2015, In Teradata, if user doesn't have permission
// on DBC.UDTInfo, getColumns through JDBC will
// fail. Implemented this fall back method try to retrieve
// column from ResultSet
DatabaseMetaData databaseMetaData = connection.getMetaData();
Statement statement = connection.createStatement();
ResultSet resultSet = statement.executeQuery(
"select * from " + quoteIdent(table.getPhysicalName(),q) + " where 1 = 2");
ResultSetMetaData resultSetMetaData = resultSet.getMetaData();
List<String> primaryKeys = new ArrayList<>();
List<String> foreignKeys = new ArrayList<>();
ResultSet primaryKeyResultSet = databaseMetaData.getPrimaryKeys(null, schemaName,
table.getPhysicalName());
while (primaryKeyResultSet.next()) {
String columnName = primaryKeyResultSet.getString("COLUMN_NAME");
primaryKeys.add(columnName);
}
ResultSet foreignKeyResultSet = connection.getMetaData().getImportedKeys(null, schemaName,
table.getPhysicalName());
while (foreignKeyResultSet.next()) {
String columnName = foreignKeyResultSet.getString("FKCOLUMN_NAME");
foreignKeys.add(columnName);
}
for (int i = 1; i <= resultSetMetaData.getColumnCount(); i++) {
Column column = new Column();
column.setName(resultSetMetaData.getColumnName(i));
column.setDataType(CommonUtils.mapToDataType(
resultSetMetaData.getColumnClassName(i),
CommonUtils.getJavaSqlTypeName(resultSetMetaData.getColumnType(i)),
resultSetMetaData.getColumnTypeName(i)));
if (primaryKeys.contains(column.getName()))
column.setKeyType(ColumnKeyType.PRIMARY_KEY);
else if (foreignKeys.contains(column.getName()))
column.setKeyType(ColumnKeyType.FOREIGN_KEY);
else
column.setKeyType(ColumnKeyType.NO_KEY_TYPE);
// column.setNullable(resultSetMetaData.isNullable(i) != ResultSetMetaData.columnNoNulls);
table.getColumns().add(column);
}
}
table.setTableType(getTableType(table));
}
catch (Exception e) {
throw new ServiceException("Failed to import columns for table - " + table.getName(), e);
}
}
public static TableType getTableType(com.hue.model.Table table) {
List<Column> primaryKeyColumns = table.getPrimaryKeys();
List<Column> foreignKeyColumns = table.getForeignKeys();
List<Column> nonkeyColumns = table.getKeys(ColumnKeyType.NO_KEY_TYPE);
if (primaryKeyColumns.size() == 1 && foreignKeyColumns.isEmpty()) {
return TableType.DIMENSION;
}
else if (foreignKeyColumns.size() == table.getColumns().size()
|| (foreignKeyColumns.size() > 0 && nonkeyColumns.isEmpty())) {
return TableType.BRIDGE;
}
else if (foreignKeyColumns.size() > 0 && nonkeyColumns.size() > 0) {
return TableType.FACT;
}else if (foreignKeyColumns.isEmpty() && primaryKeyColumns.isEmpty()) {
if(table.getDatasource() != null && table.getDatasource().getDatabaseType() == DBType.REDSHIFT) {
return TableType.DIMENSION;
}
return TableType.FACT;
}
return TableType.DIMENSION;
}
public static ColumnKeyType getKeyType(schemacrawler.schema.Column column, Table table) {
if (table.getPrimaryKey() != null
&& table.getPrimaryKey().getColumns().stream().anyMatch(c -> column.getName().equals(c.getName()))) {
return ColumnKeyType.PRIMARY_KEY;
}
else {
for (ForeignKey foreignKey : table.getImportedForeignKeys()) {
if (foreignKey.getColumnReferences().stream().anyMatch(
c -> c.getForeignKeyColumn().getName().equals(column.getName()))) {
return ColumnKeyType.FOREIGN_KEY;
}
}
}
return ColumnKeyType.NO_KEY_TYPE;
}
public static String quoteIdent(String physicalName, String q) {
if(physicalName.contains(" ")){
return q + physicalName + q;
}else{
return physicalName;
}
}
// Only import tables based on options. No column and key information
// loaded.
// Be aware that how much information to retrieve by this method is
// determined by options. By default,
// options excluded column information.
public static List<com.hue.model.Table> importTables(Datasource datasource, Connection connection, SchemaCrawlerOptions options)
throws ServiceException {
List<com.hue.model.Table> availableTables = new ArrayList<com.hue.model.Table>();
schemaTables.clear();
// Get the schema definition
Catalog database;
String q = "";
try {
database = SchemaCrawlerUtility.getCatalog(connection, options);
q= connection.getMetaData().getIdentifierQuoteString();
}
catch (SchemaCrawlerException | SQLException e) {
throw new ServiceException("Cannot crawl database catalog", e);
}
String databaseVendor = database.getDatabaseInfo().getProductName();
if (databaseVendor == null) {
databaseVendor = datasource.getDatabaseType().getVendorName();
}
// datasource.setDatabaseVendor(databaseVendor);
String databaseVersion = database.getDatabaseInfo().getProductVersion();
if (databaseVersion == null) {
databaseVersion = "?";
}
// datasource.setDatabaseVersion(databaseVersion);
for (Schema schema : database.getSchemas()) {
logger.debug("Import schema - " + schema.getName());
for (schemacrawler.schema.Table schemaTable : database.getTables(schema)) {
com.hue.model.Table table = new com.hue.model.Table();
table.setDatasource(datasource);
table.setName(stripQuoteIdent(schemaTable.getName(),q));
table.setPhysicalName(stripQuoteIdent(schemaTable.getName(),q));
Schema tableSchema = schemaTable.getSchema();
String schemaName = tableSchema.getName();
if (schemaName == null) {
schemaName = tableSchema.getFullName();
}
table.setSchemaName(schemaName);
// System.err.println("Table name = " + schemaTable.getName());
// if (schemaTable.getPrimaryKey() != null) {
// schemaTable.getPrimaryKey().getColumns().forEach(c ->
// System.err.println("Primary Key Column: " + c.getName()));
// }
//
// if (schemaTable.getImportedForeignKeys().size() > 0) {
// schemaTable.getImportedForeignKeys().forEach(fk ->
// {System.err.println("Imported Foreign Key Name : " +
// fk.getName());
// fk.getColumnReferences().forEach(cr ->
// System.err.println("Imported Foreign Key Column Reference : "
// +
// cr.getForeignKeyColumn().getParent().getName() + "." +
// cr.getForeignKeyColumn().getName() + " "
// + cr.getPrimaryKeyColumn().getParent().getName() + "." +
// cr.getPrimaryKeyColumn().getName()));
// });
// }
//
// if (schemaTable.getExportedForeignKeys().size() > 0) {
// schemaTable.getExportedForeignKeys().forEach(fk ->
// {System.err.println("Exported Foreign Key Name : " +
// fk.getName());
// fk.getColumnReferences().forEach(cr ->
// System.err.println("Exported Foreign Key Column References :
// "
// + cr.getForeignKeyColumn().getParent().getName() + "." +
// cr.getForeignKeyColumn().getName()
// + " " + cr.getPrimaryKeyColumn().getParent().getName() + "."
// + cr.getPrimaryKeyColumn().getName()));
// });
// }
//
// if (schemaTable.getPrivileges().size() > 0) {
// schemaTable.getPrivileges().forEach(p ->
// System.err.println("Table Privileges" + p.getFullName()));
// }
// TH 04/02/2015, no longer load column information here.
// for (schemacrawler.schema.Column schemaColumn :
// schemaTable.getColumns()) {
// Column column = new Column();
// column.setName(schemaColumn.getName());
// column.setDataType(schemaColumn.getColumnDataType().getDatabaseSpecificTypeName());
// column.setKeyType(UIUtils.getKeyType(schemaColumn,
// schemaTable));
// column.setNullable(schemaColumn.isNullable());
//
// table.addColumn(column);
// }
// table.setTableType(UIUtils.getTableType(table));
// TH 09/22/2014, do not load row count when import tables
// ResultSet rs =
// connection.createStatement().executeQuery(String.format(ROW_COUNT_QUERY,
// schemaTable.getName()));
// rs.next();
// table.setRowCount(rs.getInt(1));
availableTables.add(table);
// schemaTables.put(table.getName(), schemaTable);
}
}
return availableTables;
}
private static String stripQuoteIdent(String name, String q) {
String pattern = "^\\" + q + "|\\.\\" + q + "|\\" + q +"$";
return name.replaceAll(pattern, "");
}
public static String getDefaultSchema(Datasource datasource, List<String> schemas) throws ServiceException {
if (schemas == null || schemas.isEmpty()) throw new ServiceException("No schema found in target database");
String schemaName = null;
if (datasource.getDatabaseType() == DBType.ORACLE) {
schemaName = datasource.getUserName();
}
else if (datasource.getDatabaseType() == DBType.MSSQL
|| datasource.getDatabaseType() == DBType.AZURE) {
schemaName = "dbo";
}
else if (datasource.getDatabaseType() == DBType.POSTGRESQL || datasource.getDatabaseType() == DBType.REDSHIFT || datasource.getDatabaseType() == DBType.VERTICA) {
schemaName = "public";
}
else {
schemaName = datasource.getDatabaseName();
}
// go through given schema list to find out exact name in database. If it doesn't exist, pick the first
// one as default schema name
final String name = schemaName;
Optional<String> result = schemas.stream().filter(n -> n.equalsIgnoreCase(name)).findAny();
if (result.isPresent()) {
schemaName = result.get();
}
else {
schemaName = schemas.get(0);
}
return schemaName;
}
public static List<com.hue.model.Table> importTablesForSchemas(Datasource datasource, List<String> schemaNames) throws ServiceException {
try (Connection connection = ConnUtils.getConnection(datasource)) {
// Create the options
final SchemaCrawlerOptions options = getSchemaCrawlerOptions(datasource);
options.setSchemaInclusionRule(createSchemaInclusionRule(datasource, schemaNames));
return importTables(datasource, connection, options);
}
catch (SQLException e) {
throw new ServiceException("Database exception", e);
}
catch (SshException e) {
throw new ServiceException("SSH connection failed.", e);
}
catch (ClassNotFoundException e) {
throw new ServiceException(e);
}
catch (IOException e) {
throw new ServiceException(e);
}
}
public static List<com.hue.model.Table> importTablesForSchema(Datasource datasource, String schemaName) throws ServiceException {
return importTablesForSchemas(datasource, Arrays.asList(schemaName));
}
public static SchemaCrawlerOptions getSchemaCrawlerOptions(Datasource datasource) {
final SchemaCrawlerOptions options = new SchemaCrawlerOptions();
// Set what details are required in the schema - this affects the
// time taken to crawl the schema
SchemaInfoLevel schemaInfoLevel = SchemaInfoLevelBuilder.minimum();
schemaInfoLevel.setRetrieveAdditionalJdbcDriverInfo(false);
schemaInfoLevel.setRetrieveRoutines(false);
schemaInfoLevel.setRetrieveColumnDataTypes(false);
schemaInfoLevel.setRetrieveTableColumns(false);
schemaInfoLevel.setRetrieveForeignKeys(true);
options.setSchemaInfoLevel(schemaInfoLevel);
DBType dbType = datasource.getDatabaseType();
if (dbType == DBType.REDSHIFT) {
options.getSchemaInfoLevel().setRetrieveForeignKeys(false);
}
switch (dbType) {
case POSTGRESQL:
case REDSHIFT:
// options.setSchemaInclusionRule(new
// RegularExpressionExclusionRule(Pattern.compile(
// "INFORMATION_SCHEMA|PG_CATALOG|PG_INTERNAL",
// Pattern.CASE_INSENSITIVE)));
break;
case MYSQL:
// options.setSchemaInclusionRule(new
// RegularExpressionExclusionRule(Pattern.compile("INFORMATION_SCHEMA",
// Pattern.CASE_INSENSITIVE)));
break;
case MSSQL:
case AZURE:
options.setSchemaInclusionRule(new RegularExpressionInclusionRule(Pattern.compile(
String.format("%s\\..*", datasource.getDatabaseName()), Pattern.CASE_INSENSITIVE)));
break;
case TERADATA:
// options.setSchemaInclusionRule(new
// RegularExpressionExclusionRule(Pattern.compile(
// "\"All\"|Crashdumps|\"DBC\"|EXTUSER|SQLJ|SYSLIB|SYSSPATIAL|SYSUDTLIB|SysAdmin|Sys_Calendar|SystemFe|TDPUSER|TDStats|TD_SYSFNLIB|dbcmngr|tdwm",
// Pattern.CASE_INSENSITIVE)));
// options.setSchemaInclusionRule(new
// RegularExpressionInclusionRule(Pattern.compile(
// datasource.getDatabaseName(), Pattern.CASE_INSENSITIVE)));
break;
case DERBY_LOCAL:
case DERBY_REMOTE:
options.setSchemaInclusionRule(
new RegularExpressionExclusionRule(Pattern.compile("SYS", Pattern.CASE_INSENSITIVE)));
break;
case ORACLE:
// options.setSchemaInclusionRule(new
// RegularExpressionExclusionRule(Pattern.compile(
// "ANONYMOUS|APEX_PUBLIC_USER|CTXSYS|DBSNMP|DIP|EXFSYS|FLOWS_%|FLOWS_FILES|LBACSYS|MDDATA|MDSYS|MGMT_VIEW|OLAPSYS|ORACLE_OCM|ORDDATA|ORDPLUGINS|ORDSYS|OUTLN|OWBSYS|SI_INFORMTN_SCHEMA|SPATIAL_CSW_ADMIN_USR|SPATIAL_WFS_ADMIN_USR|SYS|SYSMAN|SYSTEM|WKPROXY|WKSYS|WK_TEST|WMSYS|XDB|XS$NULL|APPQOSSYS|RDSADMIN|\"SYSTEM\"|DBSNMP|CTXSYS|DIP|SYS|OUTLN",
// Pattern.CASE_INSENSITIVE)));
break;
case HIVE:
schemaInfoLevel.setRetrieveForeignKeys(false);
schemaInfoLevel.setRetrieveColumnDataTypes(false);
schemaInfoLevel.setRetrieveDatabaseInfo(false);
// schemaInfoLevel.setRetrieveAdditionalSchemaCrawlerInfo(false);
schemaInfoLevel.setRetrieveAdditionalJdbcDriverInfo(false);
break;
case VERTICA:
case ACCESS:
case UNKNOWN:
default:
break;
}
return options;
}
public static boolean testConnection(Datasource datasource) throws Exception {
try (Connection connection = ConnUtils.getSchemaCrawlerConnection(datasource)) {
DBType dbType = datasource.getDatabaseType();
String query = null;
switch (dbType) {
default:
case POSTGRESQL:
case MSSQL:
case AZURE:
case TERADATA:
case MYSQL:
case ACCESS:
case HIVE:
case REDSHIFT:
case VERTICA:
case UNKNOWN:
query = TEST_QUERY;
break;
case DERBY_LOCAL:
case DERBY_REMOTE:
query = TEST_QUERY_DERBY;
break;
case ORACLE:
query = TEST_QUERY_ORACLE;
break;
}
connection.createStatement().execute(query);
return true;
}
catch (Exception e) {
throw e;
}
}
// This method have to be called after importSchema() method
public static List<Join> getJoinDefs(List<com.hue.model.Table> tables) {
List<Join> joinDefs = new ArrayList<Join>();
tables.forEach(t -> {
schemacrawler.schema.Table rightSchemaTable = schemaTables.get(t.getName());
// TH 06/15/2015, for adding additional table case, schemaTables
// does not contain the tables which
// already loaded.
if (rightSchemaTable != null) {
rightSchemaTable.getImportedForeignKeys().forEach(fk -> {
schemacrawler.schema.Table leftSchemaTable = fk.getColumnReferences().get(
0).getPrimaryKeyColumn().getParent();
// ignore the self join
if (!rightSchemaTable.equals(leftSchemaTable)) {
Optional<com.hue.model.Table> leftTable = tables.stream().filter(
t1 -> t1.getName().equals(leftSchemaTable.getName())).findFirst();
if (leftTable.isPresent()) {
// create join def
Join joinDef = new Join();
joinDef.setCardinalityType(CardinalityType.ONE_TO_MANY);
joinDef.setJoinType(JoinType.INNER_JOIN);
joinDef.setLeft(leftTable.get());
joinDef.setRight(t);
final StringBuffer formula = new StringBuffer("");
fk.getColumnReferences().stream().filter(
cr -> cr.getForeignKeyColumn().getParent().equals(rightSchemaTable)).forEach(cr -> {
if (!formula.toString().equals("")) {
formula.append(" and ");
}
formula.append(CommonUtils.getOptionallyQuotedName(leftTable.get().getName()) + ".")
.append(CommonUtils.getOptionallyQuotedName(cr.getPrimaryKeyColumn().getName()))
.append(" = ").append(CommonUtils.getOptionallyQuotedName(t.getName()) + ".")
.append(CommonUtils.getOptionallyQuotedName(cr.getForeignKeyColumn().getName()));
});
joinDef.setSql(formula.toString());
joinDefs.add(joinDef);
}
}
});
}
});
if (logger.isInfoEnabled()) {
StringBuffer sb = new StringBuffer();
joinDefs.forEach(jd -> sb.append(((Table) jd.getLeft()).getName()).append(" ").append(
jd.getJoinType()).append(" ").append(((Table) jd.getRight()).getName()).append(" ").append(
jd.getCardinalityType()).append(" ").append(jd.getSql()).append(
System.lineSeparator()));
logger.info(sb.toString());
}
return joinDefs;
}
public static RegularExpressionInclusionRule createSchemaInclusionRule(Datasource datasource, List<String> schemaNames) {
RegularExpressionInclusionRule inclusionRule = null;
if (datasource.getDatabaseType() == DBType.MSSQL || datasource.getDatabaseType() == DBType.REDSHIFT
|| datasource.getDatabaseType() == DBType.VERTICA || datasource.getDatabaseType() == DBType.AZURE) {
String rule = schemaNames.stream().map(n -> String.format("%s\\.%s", datasource.getDatabaseName(), n)).collect(Collectors.joining("|"));
inclusionRule = new RegularExpressionInclusionRule(Pattern.compile(rule, Pattern.CASE_INSENSITIVE));
}
else {
inclusionRule = new RegularExpressionInclusionRule(Pattern.compile(String.join("|", schemaNames), Pattern.CASE_INSENSITIVE));
}
return inclusionRule;
}
public static RegularExpressionInclusionRule createSchemaInclusionRule(Datasource datasource, String schemaName) {
return createSchemaInclusionRule(datasource, Arrays.asList(schemaName));
}
public static Map<String, Object> importDbProps(Datasource datasource) throws ServiceException {
Map<String, Object> dbProps = new HashMap<String, Object>();
try (Connection connection = ConnUtils.getSchemaCrawlerConnection(datasource)) {
DatabaseMetaData metaData = connection.getMetaData();
Method[] methods = metaData.getClass().getMethods();
for (Method method : methods) {
if (method.getParameterCount() == 0 && !EXCLUDED_METHODS.contains(method.getName()) && !Modifier.isAbstract(method.getModifiers())
&& (method.getReturnType() == int.class || method.getReturnType() == boolean.class || method.getReturnType() == String.class)) {
try {
dbProps.put(method.getName(), method.invoke(metaData));
}
catch (Exception e) {
logger.error("Failed to import property - " + method.getName());
}
}
}
// dbProps.forEach((k, v) -> System.out.println("Key = " + k + " v = " + v));
}
catch (Exception e) {
logger.error(e.getMessage(), e);
throw new ServiceException(e);
}
return dbProps;
}
}
| |
package com.alcedomoreno.sirme.core.dao;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import java.util.List;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
import org.springframework.test.context.support.AnnotationConfigContextLoader;
import org.springframework.transaction.annotation.Transactional;
import com.alcedomoreno.sirme.core.AppTestConfig;
import com.alcedomoreno.sirme.core.data.RoleData;
@RunWith(SpringJUnit4ClassRunner.class)
@ContextConfiguration(loader=AnnotationConfigContextLoader.class, classes=AppTestConfig.class)
public class AbstractDaoTest {
@Autowired
RolesDao rolDao;
@Test
@Transactional
public void findOneTest() {
try {
rolDao.findOne(1);
} catch (Exception e) {
fail(e.getMessage());
}
try {
rolDao.findOne(null);
} catch (Exception e) {
assertTrue(e.getClass().equals(IllegalArgumentException.class));
}
}
@Test
@Transactional
public void findAllTest() {
try {
rolDao.findAll();
} catch (Exception e) {
fail(e.getMessage());
}
}
@Test
@Transactional
public void createTest() {
try {
RoleData rol = new RoleData();
rol.setIdRole(234);
rol.setDescriptionRole("descripcion234");
rol.setCodeRole("rolPrueba");
rolDao.create(rol);
assertTrue(rolDao.findOne(234).getCodeRole().equals("rolPrueba"));
} catch (Exception e) {
fail(e.getMessage());
}
try {
rolDao.create(null);
} catch (NullPointerException e) {
assertTrue(true);
}
}
@Test
@Transactional
public void updateTest() {
try {
RoleData rol = new RoleData();
rol.setIdRole(234);
rol.setDescriptionRole("descripcion234");
rol.setCodeRole("rolPrueba");
rolDao.create(rol);
assertTrue(rolDao.findOne(234).getCodeRole().equals("rolPrueba"));
rol.setCodeRole("rolPrueba2");
rolDao.update(rol);
RoleData role2 = rolDao.findOne(234);
assertTrue(role2.getCodeRole().equals("rolPrueba2"));
} catch (Exception e) {
fail(e.getMessage());
}
try {
rolDao.update(null);
} catch (NullPointerException e) {
assertTrue(true);
}
}
@Test
@Transactional
public void deleteTest() {
try {
RoleData rol = new RoleData();
rol.setIdRole(234);
rol.setCodeRole("code");
rolDao.delete(rol);
assertNull(rolDao.findOne(234));
} catch (Exception e) {
fail(e.getMessage());
}
}
@Test
@Transactional
public void deleteByIdTest() {
try {
rolDao.deleteById(1);
assertNull(rolDao.findOne(1));
} catch (Exception e) {
fail(e.getMessage());
}
try {
rolDao.deleteById(0);
fail("Impossible to arrive here!");
} catch (Exception e) {
assertTrue(true);
}
}
@Test
@Transactional
public void evictTest() {
try {
RoleData rol = new RoleData();
rol.setIdRole(234);
rol.setDescriptionRole("descripcion234");
rol.setCodeRole("rolPrueba");
rolDao.create(rol);
assertTrue(rolDao.findOne(234).getCodeRole().equals("rolPrueba"));
rol.setCodeRole("CHANGED!!!!!");
rolDao.evict(rol);
rol = rolDao.findOne(234);
assertNull(rol);
} catch (Exception e) {
fail(e.getMessage());
}
}
@Test
@Transactional
public void countTest() {
try {
long count = rolDao.count();
assertEquals(count, 4);
} catch (Exception e) {
fail(e.getMessage());
}
}
@Test
@Transactional
public void findAllPaginatedTest() {
try {
List<RoleData> roles = rolDao.findAll(0, 1);
assertEquals(roles.size(), 1);
roles = rolDao.findAll(0, 4);
assertEquals(roles.size(), 4);
roles = rolDao.findAll(3, 4);
assertEquals(roles.size(), 1);
} catch (Exception e) {
fail(e.getMessage());
}
}
}
| |
/*
* Copyright 2016-present Open Networking Laboratory
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.onosproject.rest.resources;
import com.eclipsesource.json.Json;
import com.eclipsesource.json.JsonArray;
import com.eclipsesource.json.JsonObject;
import com.google.common.collect.ImmutableSet;
import org.hamcrest.Description;
import org.hamcrest.Matchers;
import org.hamcrest.TypeSafeMatcher;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.onlab.osgi.ServiceDirectory;
import org.onlab.osgi.TestServiceDirectory;
import org.onlab.rest.BaseResource;
import org.onosproject.codec.CodecService;
import org.onosproject.codec.impl.CodecManager;
import org.onosproject.codec.impl.GroupCodec;
import org.onosproject.core.ApplicationId;
import org.onosproject.core.CoreService;
import org.onosproject.core.DefaultApplicationId;
import org.onosproject.core.DefaultGroupId;
import org.onosproject.core.GroupId;
import org.onosproject.net.DefaultDevice;
import org.onosproject.net.Device;
import org.onosproject.net.DeviceId;
import org.onosproject.net.NetTestTools;
import org.onosproject.net.device.DeviceService;
import org.onosproject.net.group.DefaultGroupKey;
import org.onosproject.net.group.Group;
import org.onosproject.net.group.GroupBucket;
import org.onosproject.net.group.GroupBuckets;
import org.onosproject.net.group.GroupDescription;
import org.onosproject.net.group.GroupKey;
import org.onosproject.net.group.GroupService;
import javax.ws.rs.client.Entity;
import javax.ws.rs.client.WebTarget;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import java.io.InputStream;
import java.net.HttpURLConnection;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import static org.easymock.EasyMock.anyObject;
import static org.easymock.EasyMock.anyShort;
import static org.easymock.EasyMock.createMock;
import static org.easymock.EasyMock.expect;
import static org.easymock.EasyMock.expectLastCall;
import static org.easymock.EasyMock.replay;
import static org.easymock.EasyMock.verify;
import static org.hamcrest.Matchers.hasSize;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.notNullValue;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertThat;
import static org.onosproject.net.NetTestTools.APP_ID;
/**
* Unit tests for Groups REST APIs.
*/
public class GroupsResourceTest extends ResourceTest {
final GroupService mockGroupService = createMock(GroupService.class);
CoreService mockCoreService = createMock(CoreService.class);
final DeviceService mockDeviceService = createMock(DeviceService.class);
final HashMap<DeviceId, Set<Group>> groups = new HashMap<>();
final DeviceId deviceId1 = DeviceId.deviceId("1");
final DeviceId deviceId2 = DeviceId.deviceId("2");
final DeviceId deviceId3 = DeviceId.deviceId("3");
final Device device1 = new DefaultDevice(null, deviceId1, Device.Type.OTHER,
"", "", "", "", null);
final Device device2 = new DefaultDevice(null, deviceId2, Device.Type.OTHER,
"", "", "", "", null);
final MockGroup group1 = new MockGroup(deviceId1, 1, "0x111", 1);
final MockGroup group2 = new MockGroup(deviceId1, 2, "0x222", 2);
final MockGroup group3 = new MockGroup(deviceId2, 3, "0x333", 3);
final MockGroup group4 = new MockGroup(deviceId2, 4, "0x444", 4);
final MockGroup group5 = new MockGroup(deviceId3, 5, "0x555", 5);
final MockGroup group6 = new MockGroup(deviceId3, 6, "0x666", 6);
/**
* Mock class for a group.
*/
private static class MockGroup implements Group {
final DeviceId deviceId;
final ApplicationId appId;
final GroupKey appCookie;
final long baseValue;
final List<GroupBucket> bucketList;
GroupBuckets buckets;
public MockGroup(DeviceId deviceId, int appId, String appCookie, int id) {
this.deviceId = deviceId;
this.appId = new DefaultApplicationId(appId, String.valueOf(appId));
this.appCookie = new DefaultGroupKey(appCookie.getBytes());
this.baseValue = id * 100;
this.bucketList = new ArrayList<>();
this.buckets = new GroupBuckets(bucketList);
}
@Override
public GroupId id() {
return new DefaultGroupId((int) baseValue + 55);
}
@Override
public GroupState state() {
return GroupState.ADDED;
}
@Override
public long life() {
return baseValue + 11;
}
@Override
public long packets() {
return baseValue + 22;
}
@Override
public long bytes() {
return baseValue + 33;
}
@Override
public long referenceCount() {
return baseValue + 44;
}
@Override
public int age() {
return 0;
}
@Override
public Type type() {
return GroupDescription.Type.ALL;
}
@Override
public DeviceId deviceId() {
return this.deviceId;
}
@Override
public ApplicationId appId() {
return this.appId;
}
@Override
public GroupKey appCookie() {
return this.appCookie;
}
@Override
public Integer givenGroupId() {
return (int) baseValue + 55;
}
@Override
public GroupBuckets buckets() {
return this.buckets;
}
}
/**
* Populates some groups used as testing data.
*/
private void setupMockGroups() {
final Set<Group> groups1 = new HashSet<>();
groups1.add(group1);
groups1.add(group2);
final Set<Group> groups2 = new HashSet<>();
groups2.add(group3);
groups2.add(group4);
groups.put(deviceId1, groups1);
groups.put(deviceId2, groups2);
expect(mockGroupService.getGroups(deviceId1))
.andReturn(groups.get(deviceId1)).anyTimes();
expect(mockGroupService.getGroups(deviceId2))
.andReturn(groups.get(deviceId2)).anyTimes();
}
/**
* Sets up the global values for all the tests.
*/
@Before
public void setUpTest() {
// Mock device service
expect(mockDeviceService.getDevice(deviceId1))
.andReturn(device1);
expect(mockDeviceService.getDevice(deviceId2))
.andReturn(device2);
expect(mockDeviceService.getDevices())
.andReturn(ImmutableSet.of(device1, device2));
// Mock Core Service
expect(mockCoreService.getAppId(anyShort()))
.andReturn(NetTestTools.APP_ID).anyTimes();
expect(mockCoreService.registerApplication(GroupCodec.REST_APP_ID))
.andReturn(APP_ID).anyTimes();
replay(mockCoreService);
// Register the services needed for the test
final CodecManager codecService = new CodecManager();
codecService.activate();
ServiceDirectory testDirectory =
new TestServiceDirectory()
.add(GroupService.class, mockGroupService)
.add(DeviceService.class, mockDeviceService)
.add(CodecService.class, codecService)
.add(CoreService.class, mockCoreService);
BaseResource.setServiceDirectory(testDirectory);
}
/**
* Cleans up and verifies the mocks.
*/
@After
public void tearDownTest() {
verify(mockGroupService);
verify(mockCoreService);
}
/**
* Hamcrest matcher to check that a group representation in JSON matches
* the actual group.
*/
public static class GroupJsonMatcher extends TypeSafeMatcher<JsonObject> {
private final Group group;
private final String expectedAppId;
private String reason = "";
public GroupJsonMatcher(Group groupValue, String expectedAppIdValue) {
group = groupValue;
expectedAppId = expectedAppIdValue;
}
@Override
public boolean matchesSafely(JsonObject jsonGroup) {
// check id
final String jsonId = jsonGroup.get("id").asString();
final String groupId = group.id().toString();
if (!jsonId.equals(groupId)) {
reason = "id " + group.id().toString();
return false;
}
// check application id
final String jsonAppId = jsonGroup.get("appId").asString();
final String appId = group.appId().name();
if (!jsonAppId.equals(appId)) {
reason = "appId " + group.appId().name();
return false;
}
// check device id
final String jsonDeviceId = jsonGroup.get("deviceId").asString();
if (!jsonDeviceId.equals(group.deviceId().toString())) {
reason = "deviceId " + group.deviceId();
return false;
}
// check bucket array
if (group.buckets().buckets() != null) {
final JsonArray jsonBuckets = jsonGroup.get("buckets").asArray();
if (group.buckets().buckets().size() != jsonBuckets.size()) {
reason = "buckets array size of " +
Integer.toString(group.buckets().buckets().size());
return false;
}
for (final GroupBucket groupBucket : group.buckets().buckets()) {
boolean groupBucketFound = false;
for (int groupBucketIndex = 0; groupBucketIndex < jsonBuckets.size(); groupBucketIndex++) {
final String jsonType = jsonBuckets.get(groupBucketIndex).asObject().get("type").asString();
final String bucketType = groupBucket.type().name();
if (jsonType.equals(bucketType)) {
groupBucketFound = true;
}
}
if (!groupBucketFound) {
reason = "group bucket " + groupBucket.toString();
return false;
}
}
}
return true;
}
@Override
public void describeTo(Description description) {
description.appendText(reason);
}
}
/**
* Factory to allocate a group matcher.
*
* @param group group object we are looking for
* @return matcher
*/
private static GroupJsonMatcher matchesGroup(Group group, String expectedAppName) {
return new GroupJsonMatcher(group, expectedAppName);
}
/**
* Hamcrest matcher to check that a group is represented properly in a JSON
* array of flows.
*/
public static class GroupJsonArrayMatcher extends TypeSafeMatcher<JsonArray> {
private final Group group;
private String reason = "";
public GroupJsonArrayMatcher(Group groupValue) {
group = groupValue;
}
@Override
public boolean matchesSafely(JsonArray json) {
boolean groupFound = false;
for (int jsonGroupIndex = 0; jsonGroupIndex < json.size();
jsonGroupIndex++) {
final JsonObject jsonGroup = json.get(jsonGroupIndex).asObject();
final String groupId = group.id().toString();
final String jsonGroupId = jsonGroup.get("id").asString();
if (jsonGroupId.equals(groupId)) {
groupFound = true;
// We found the correct group, check attribute values
assertThat(jsonGroup, matchesGroup(group, APP_ID.name()));
}
}
if (!groupFound) {
reason = "Group with id " + group.id().toString() + " not found";
return false;
} else {
return true;
}
}
@Override
public void describeTo(Description description) {
description.appendText(reason);
}
}
/**
* Factory to allocate a group array matcher.
*
* @param group group object we are looking for
* @return matcher
*/
private static GroupJsonArrayMatcher hasGroup(Group group) {
return new GroupJsonArrayMatcher(group);
}
/**
* Tests the result of the rest api GET when there are no groups.
*/
@Test
public void testGroupsEmptyArray() {
expect(mockGroupService.getGroups(deviceId1)).andReturn(null).anyTimes();
expect(mockGroupService.getGroups(deviceId2)).andReturn(null).anyTimes();
replay(mockGroupService);
replay(mockDeviceService);
final WebTarget wt = target();
final String response = wt.path("groups").request().get(String.class);
assertThat(response, is("{\"groups\":[]}"));
}
/**
* Tests the result of the rest api GET when there are active groups.
*/
@Test
public void testGroupsPopulatedArray() {
setupMockGroups();
replay(mockGroupService);
replay(mockDeviceService);
final WebTarget wt = target();
final String response = wt.path("groups").request().get(String.class);
final JsonObject result = Json.parse(response).asObject();
assertThat(result, notNullValue());
assertThat(result.names(), hasSize(1));
assertThat(result.names().get(0), is("groups"));
final JsonArray jsonGroups = result.get("groups").asArray();
assertThat(jsonGroups, notNullValue());
assertThat(jsonGroups, hasGroup(group1));
assertThat(jsonGroups, hasGroup(group2));
assertThat(jsonGroups, hasGroup(group3));
assertThat(jsonGroups, hasGroup(group4));
}
/**
* Tests the result of a rest api GET for a device.
*/
@Test
public void testGroupsSingleDevice() {
setupMockGroups();
final Set<Group> groups = new HashSet<>();
groups.add(group5);
groups.add(group6);
expect(mockGroupService.getGroups(anyObject()))
.andReturn(groups).anyTimes();
replay(mockGroupService);
replay(mockDeviceService);
final WebTarget wt = target();
final String response = wt.path("groups/" + deviceId3).request().get(String.class);
final JsonObject result = Json.parse(response).asObject();
assertThat(result, notNullValue());
assertThat(result.names(), hasSize(1));
assertThat(result.names().get(0), is("groups"));
final JsonArray jsonGroups = result.get("groups").asArray();
assertThat(jsonGroups, notNullValue());
assertThat(jsonGroups, hasGroup(group5));
assertThat(jsonGroups, hasGroup(group6));
}
/**
* Test the result of a rest api GET with specifying device id and appcookie.
*/
@Test
public void testGroupByDeviceIdAndAppCookie() {
setupMockGroups();
expect(mockGroupService.getGroup(anyObject(), anyObject()))
.andReturn(group5).anyTimes();
replay(mockGroupService);
final WebTarget wt = target();
final String response = wt.path("groups/" + deviceId3 + "/" + "0x111")
.request().get(String.class);
final JsonObject result = Json.parse(response).asObject();
assertThat(result, notNullValue());
assertThat(result.names(), hasSize(1));
assertThat(result.names().get(0), is("groups"));
final JsonArray jsonFlows = result.get("groups").asArray();
assertThat(jsonFlows, notNullValue());
assertThat(jsonFlows, hasGroup(group5));
}
/**
* Test whether the REST API returns 404 if no entry has been found.
*/
@Test
public void testGroupByDeviceIdAndAppCookieNull() {
setupMockGroups();
expect(mockGroupService.getGroup(anyObject(), anyObject()))
.andReturn(null).anyTimes();
replay(mockGroupService);
final WebTarget wt = target();
final Response response = wt.path("groups/" + deviceId3 + "/" + "0x222").request().get();
assertEquals(404, response.getStatus());
}
/**
* Tests creating a group with POST.
*/
@Test
public void testPost() {
mockGroupService.addGroup(anyObject());
expectLastCall();
replay(mockGroupService);
WebTarget wt = target();
InputStream jsonStream = GroupsResourceTest.class
.getResourceAsStream("post-group.json");
Response response = wt.path("groups/of:0000000000000001")
.request(MediaType.APPLICATION_JSON_TYPE)
.post(Entity.json(jsonStream));
assertThat(response.getStatus(), is(HttpURLConnection.HTTP_CREATED));
String location = response.getLocation().getPath();
assertThat(location, Matchers.startsWith("/groups/of:0000000000000001/"));
}
/**
* Tests deleting a group.
*/
@Test
public void testDelete() {
setupMockGroups();
mockGroupService.removeGroup(anyObject(), anyObject(), anyObject());
expectLastCall();
replay(mockGroupService);
WebTarget wt = target();
String location = "/groups/1/0x111";
Response deleteResponse = wt.path(location)
.request(MediaType.APPLICATION_JSON_TYPE)
.delete();
assertThat(deleteResponse.getStatus(),
is(HttpURLConnection.HTTP_NO_CONTENT));
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.openejb.core.stateful;
import org.apache.openejb.ApplicationException;
import org.apache.openejb.Injection;
import org.apache.openejb.InvalidateReferenceException;
import org.apache.openejb.OpenEJBException;
import org.apache.openejb.SystemException;
import org.apache.openejb.DeploymentInfo;
import org.apache.openejb.core.BaseContext;
import org.apache.openejb.core.CoreDeploymentInfo;
import org.apache.openejb.core.CoreUserTransaction;
import org.apache.openejb.core.Operation;
import org.apache.openejb.core.ThreadContext;
import org.apache.openejb.core.interceptor.InterceptorData;
import org.apache.openejb.core.interceptor.InterceptorStack;
import org.apache.openejb.core.ivm.IntraVmCopyMonitor;
import org.apache.openejb.core.transaction.TransactionRolledbackException;
import org.apache.openejb.persistence.JtaEntityManagerRegistry;
import org.apache.openejb.spi.SecurityService;
import org.apache.openejb.util.Index;
import org.apache.openejb.util.LogCategory;
import org.apache.openejb.util.Logger;
import org.apache.xbean.recipe.ObjectRecipe;
import org.apache.xbean.recipe.Option;
import org.apache.xbean.recipe.StaticRecipe;
import org.apache.xbean.recipe.ConstructionException;
import javax.ejb.EJBException;
import javax.ejb.SessionContext;
import javax.ejb.SessionBean;
import javax.naming.Context;
import javax.naming.NamingException;
import javax.persistence.EntityManager;
import javax.persistence.EntityManagerFactory;
import javax.transaction.Transaction;
import javax.transaction.TransactionManager;
import java.lang.reflect.Method;
import java.rmi.NoSuchObjectException;
import java.rmi.RemoteException;
import java.util.Hashtable;
import java.util.LinkedList;
import java.util.Map;
import java.util.HashMap;
import java.util.List;
import java.io.Serializable;
public class StatefulInstanceManager {
public static final Logger logger = Logger.getInstance(LogCategory.OPENEJB, "org.apache.openejb.util.resources");
private final long timeOut;
// queue of beans for LRU algorithm
private final BeanEntryQueue lruQueue;
private final PassivationStrategy passivator;
private final int bulkPassivationSize;
private final TransactionManager transactionManager;
private final SecurityService securityService;
private final JtaEntityManagerRegistry jtaEntityManagerRegistry;
public StatefulInstanceManager(TransactionManager transactionManager, SecurityService securityService, JtaEntityManagerRegistry jtaEntityManagerRegistry, Class passivatorClass, int timeout, int poolSize, int bulkPassivate) throws OpenEJBException {
this.transactionManager = transactionManager;
this.securityService = securityService;
this.jtaEntityManagerRegistry = jtaEntityManagerRegistry;
this.lruQueue = new BeanEntryQueue(poolSize);
if (poolSize == 0) {
this.bulkPassivationSize = 1;
} else {
this.bulkPassivationSize = Math.min(bulkPassivate, poolSize);
}
this.timeOut = timeout * 60 * 1000;
try {
passivatorClass = (passivatorClass == null) ? SimplePassivater.class : passivatorClass;
passivator = (PassivationStrategy) passivatorClass.newInstance();
} catch (Exception e) {
throw new OpenEJBException("Could not create the passivator " + passivatorClass.getName(), e);
}
}
public void deploy(CoreDeploymentInfo deploymentInfo, Index<Method, StatefulContainer.MethodType> index) throws OpenEJBException {
deploymentInfo.setContainerData(new Data(index));
}
public void undeploy(CoreDeploymentInfo deploymentInfo) throws OpenEJBException {
Data data = (Data) deploymentInfo.getContainerData();
if (data != null) {
for (BeanEntry entry: data.getBeanIndex().values()) {
lruQueue.remove(entry);
}
deploymentInfo.setContainerData(null);
}
}
Index<Method, StatefulContainer.MethodType> getMethodIndex(CoreDeploymentInfo deploymentInfo) {
Data data = (Data) deploymentInfo.getContainerData();
return data.getMethodIndex();
}
public Transaction getBeanTransaction(ThreadContext callContext) throws OpenEJBException {
BeanEntry entry = getBeanEntry(callContext);
if (entry == null) return null;
return entry.beanTransaction;
}
public void setBeanTransaction(ThreadContext callContext, Transaction beanTransaction) throws OpenEJBException {
BeanEntry entry = getBeanEntry(callContext);
entry.beanTransaction = beanTransaction;
}
public Map<EntityManagerFactory, EntityManager> getEntityManagers(ThreadContext callContext, Index<EntityManagerFactory, Map> factories) throws OpenEJBException {
BeanEntry entry = getBeanEntry(callContext);
return entry.getEntityManagers(factories);
}
public void setEntityManagers(ThreadContext callContext, Index<EntityManagerFactory, EntityManager> entityManagers) throws OpenEJBException {
BeanEntry entry = getBeanEntry(callContext);
entry.setEntityManagers(entityManagers);
}
public Object newInstance(Object primaryKey, Class beanClass) throws OpenEJBException {
Object bean = null;
ThreadContext threadContext = ThreadContext.getThreadContext();
Operation currentOperation = threadContext.getCurrentOperation();
try {
ObjectRecipe objectRecipe = new ObjectRecipe(beanClass);
objectRecipe.allow(Option.FIELD_INJECTION);
objectRecipe.allow(Option.PRIVATE_PROPERTIES);
// objectRecipe.allow(Option.IGNORE_MISSING_PROPERTIES);
ThreadContext callContext = ThreadContext.getThreadContext();
CoreDeploymentInfo deploymentInfo = callContext.getDeploymentInfo();
Context ctx = deploymentInfo.getJndiEnc();
SessionContext sessionContext;
synchronized (this) {
try {
sessionContext = (SessionContext) ctx.lookup("java:comp/EJBContext");
} catch (NamingException e1) {
sessionContext = createSessionContext();
ctx.bind("java:comp/EJBContext", sessionContext);
}
}
if (javax.ejb.SessionBean.class.isAssignableFrom(beanClass) || hasSetSessionContext(beanClass)) {
callContext.setCurrentOperation(Operation.INJECTION);
objectRecipe.setProperty("sessionContext", new StaticRecipe(sessionContext));
}
fillInjectionProperties(objectRecipe, beanClass, deploymentInfo, ctx);
bean = objectRecipe.create(beanClass.getClassLoader());
Map unsetProperties = objectRecipe.getUnsetProperties();
if (unsetProperties.size() > 0) {
for (Object property : unsetProperties.keySet()) {
logger.warning("Injection: No such property '" + property + "' in class " + beanClass.getName());
}
}
HashMap<String, Object> interceptorInstances = new HashMap<String, Object>();
for (InterceptorData interceptorData : deploymentInfo.getAllInterceptors()) {
if (interceptorData.getInterceptorClass().equals(beanClass)) continue;
Class clazz = interceptorData.getInterceptorClass();
ObjectRecipe interceptorRecipe = new ObjectRecipe(clazz);
interceptorRecipe.allow(Option.FIELD_INJECTION);
interceptorRecipe.allow(Option.PRIVATE_PROPERTIES);
interceptorRecipe.allow(Option.IGNORE_MISSING_PROPERTIES);
fillInjectionProperties(interceptorRecipe, clazz, deploymentInfo, ctx);
try {
Object interceptorInstance = interceptorRecipe.create(clazz.getClassLoader());
interceptorInstances.put(clazz.getName(), interceptorInstance);
} catch (ConstructionException e) {
throw new Exception("Failed to create interceptor: " + clazz.getName(), e);
}
}
interceptorInstances.put(beanClass.getName(), bean);
callContext.setCurrentOperation(Operation.POST_CONSTRUCT);
List<InterceptorData> callbackInterceptors = deploymentInfo.getCallbackInterceptors();
InterceptorStack interceptorStack = new InterceptorStack(bean, null, Operation.POST_CONSTRUCT, callbackInterceptors, interceptorInstances);
interceptorStack.invoke();
bean = new Instance(bean, interceptorInstances);
} catch (Throwable callbackException) {
/*
In the event of an exception, OpenEJB is required to log the exception, evict the instance,
and mark the transaction for rollback. If there is a transaction to rollback, then the a
javax.transaction.TransactionRolledbackException must be throw to the client. Otherwise a
java.rmi.RemoteException is thrown to the client.
See EJB 1.1 specification, section 12.3.2
See EJB 2.0 specification, section 18.3.3
*/
handleCallbackException(callbackException, bean, threadContext, "setSessionContext");
} finally {
threadContext.setCurrentOperation(currentOperation);
}
// add to index
BeanEntry entry = new BeanEntry(bean, primaryKey, timeOut);
getBeanIndex(threadContext).put(primaryKey, entry);
return bean;
}
private static void fillInjectionProperties(ObjectRecipe objectRecipe, Class clazz, CoreDeploymentInfo deploymentInfo, Context context) {
for (Injection injection : deploymentInfo.getInjections()) {
if (!injection.getTarget().isAssignableFrom(clazz)) continue;
try {
String jndiName = injection.getJndiName();
Object object = context.lookup("java:comp/env/" + jndiName);
if (object instanceof String) {
String string = (String) object;
// Pass it in raw so it could be potentially converted to
// another data type by an xbean-reflect property editor
objectRecipe.setProperty(injection.getTarget().getName() + "/" + injection.getName(), string);
} else {
objectRecipe.setProperty(injection.getTarget().getName() + "/" + injection.getName(), new StaticRecipe(object));
}
} catch (NamingException e) {
logger.warning("Injection data not found in enc: jndiName='" + injection.getJndiName() + "', target=" + injection.getTarget() + "/" + injection.getName());
}
}
}
private boolean hasSetSessionContext(Class beanClass) {
try {
beanClass.getMethod("setSessionContext", SessionContext.class);
return true;
} catch (NoSuchMethodException e) {
return false;
}
}
private SessionContext createSessionContext() {
StatefulUserTransaction userTransaction = new StatefulUserTransaction(new CoreUserTransaction(transactionManager), jtaEntityManagerRegistry);
return new StatefulContext(transactionManager, securityService, userTransaction);
}
public Object obtainInstance(Object primaryKey, ThreadContext callContext) throws OpenEJBException {
if (primaryKey == null) {
throw new SystemException(new NullPointerException("Cannot obtain an instance of the stateful session bean with a null session id"));
}
// look for entry in index
BeanEntry entry = getBeanIndex(callContext).get(primaryKey);
// if we didn't find the bean in the index, try to activate it
if (entry == null) {
Object bean = activateInstance(primaryKey, callContext);
return bean;
}
// if the bean is already in a transaction, just return it
if (entry.beanTransaction != null) {
return entry.bean;
}
// remove from the queue so it is not passivated while in use
BeanEntry queueEntry = lruQueue.remove(entry);
if (queueEntry != null) {
// if bean is timed out, destroy it
if (entry.isTimedOut()) {
entry = getBeanIndex(callContext).remove(entry.primaryKey);
handleTimeout(entry, callContext);
throw new InvalidateReferenceException(new NoSuchObjectException("Stateful SessionBean has timed-out"));
}
return entry.bean;
} else {
// if it is not in the queue, the bean is already being invoked
// the only reentrant/concurrent operations allowed are Session synchronization callbacks
Operation currentOperation = callContext.getCurrentOperation();
if (currentOperation != Operation.AFTER_COMPLETION && currentOperation != Operation.BEFORE_COMPLETION) {
throw new ApplicationException(new RemoteException("Concurrent calls not allowed"));
}
return entry.bean;
}
}
private Object activateInstance(Object primaryKey, ThreadContext callContext) throws SystemException, ApplicationException {
// attempt to active a passivated entity
BeanEntry entry = activate(primaryKey);
if (entry == null) {
throw new InvalidateReferenceException(new NoSuchObjectException("Not Found"));
}
if (entry.isTimedOut()) {
// Since the bean instance hasn't had its ejbActivate() method called yet,
// it is still considered to be passivated at this point. Instances that timeout
// while passivated must be evicted WITHOUT having their ejbRemove()
// method invoked. Section 6.6 of EJB 1.1 specification.
throw new InvalidateReferenceException(new NoSuchObjectException("Timed Out"));
}
// call the activate method
Operation currentOperation = callContext.getCurrentOperation();
callContext.setCurrentOperation(Operation.ACTIVATE);
try {
CoreDeploymentInfo deploymentInfo = callContext.getDeploymentInfo();
StatefulInstanceManager.Instance instance = (StatefulInstanceManager.Instance) entry.bean;
Method remove = instance.bean instanceof SessionBean? SessionBean.class.getMethod("ejbActivate"): null;
List<InterceptorData> callbackInterceptors = deploymentInfo.getCallbackInterceptors();
InterceptorStack interceptorStack = new InterceptorStack(instance.bean, remove, Operation.ACTIVATE, callbackInterceptors, instance.interceptors);
interceptorStack.invoke();
} catch (Throwable callbackException) {
/*
In the event of an exception, OpenEJB is required to log the exception, evict the instance,
and mark the transaction for rollback. If there is a transaction to rollback, then the a
javax.transaction.TransactionRolledbackException must be throw to the client. Otherwise a
java.rmi.RemoteException is thrown to the client.
See EJB 1.1 specification, section 12.3.2
*/
handleCallbackException(callbackException, entry.bean, callContext, "ejbActivate");
} finally {
callContext.setCurrentOperation(currentOperation);
}
// add it to the index
getBeanIndex(callContext).put(primaryKey, entry);
return entry.bean;
}
protected void handleTimeout(BeanEntry entry, ThreadContext threadContext) {
Operation currentOperation = threadContext.getCurrentOperation();
threadContext.setCurrentOperation(Operation.PRE_DESTROY);
BaseContext.State[] originalAllowedStates = threadContext.setCurrentAllowedStates(StatefulContext.getStates());
CoreDeploymentInfo deploymentInfo = threadContext.getDeploymentInfo();
Instance instance = (Instance) entry.bean;
try {
Method remove = instance.bean instanceof SessionBean? SessionBean.class.getMethod("ejbRemove"): null;
List<InterceptorData> callbackInterceptors = deploymentInfo.getCallbackInterceptors();
InterceptorStack interceptorStack = new InterceptorStack(instance.bean, remove, Operation.PRE_DESTROY, callbackInterceptors, instance.interceptors);
interceptorStack.invoke();
} catch (Throwable callbackException) {
/*
Exceptions are processed "quietly"; they are not reported to the client since
the timeout that caused the ejbRemove() operation did not, "technically", take
place in the context of a client call. Logically, it may have timeout sometime
before the client call.
*/
String logMessage = "An unexpected exception occured while invoking the ejbRemove method on the timed-out Stateful SessionBean instance; " + callbackException.getClass().getName() + " " + callbackException.getMessage();
/* [1] Log the exception or error */
logger.error(logMessage);
} finally {
logger.info("Removing the timed-out stateful session bean instance " + entry.primaryKey);
threadContext.setCurrentOperation(currentOperation);
threadContext.setCurrentAllowedStates(originalAllowedStates);
}
}
public void poolInstance(ThreadContext callContext, Object bean) throws OpenEJBException {
Object primaryKey = callContext.getPrimaryKey();
if (primaryKey == null || bean == null) {
throw new SystemException("Invalid arguments");
}
BeanEntry entry = getBeanIndex(callContext).get(primaryKey);
if (entry == null) {
entry = activate(primaryKey);
if (entry == null) {
throw new SystemException("Invalid primaryKey:" + primaryKey);
}
} else if (entry.bean != bean) {
throw new SystemException("Invalid ID for bean");
}
if (entry.beanTransaction == null) {
if (callContext.getCurrentOperation() != Operation.CREATE){
try {
entry.beanTransaction = transactionManager.getTransaction();
} catch (javax.transaction.SystemException se) {
throw new SystemException("TransactionManager failure", se);
}
}
// only put in LRU if no current transaction
if (entry.beanTransaction == null) {
// add it to end of Queue; the most reciently used bean
lruQueue.add(entry);
}
}
}
public Object freeInstance(ThreadContext threadContext) throws SystemException {
Object primaryKey = threadContext.getPrimaryKey();
BeanEntry entry = getBeanIndex(threadContext).remove(primaryKey);// remove frm index
if (entry == null) {
entry = activate(primaryKey);
} else {
lruQueue.remove(entry);
}
if (entry == null) {
return null;
}
return entry.bean;
}
protected void passivate() throws SystemException {
final ThreadContext threadContext = ThreadContext.getThreadContext();
Hashtable<Object, BeanEntry> stateTable = new Hashtable<Object, BeanEntry>(bulkPassivationSize);
BeanEntry currentEntry;
final Operation currentOperation = threadContext.getCurrentOperation();
final BaseContext.State[] originalAllowedStates = threadContext.setCurrentAllowedStates(StatefulContext.getStates());
CoreDeploymentInfo deploymentInfo = threadContext.getDeploymentInfo();
try {
for (int i = 0; i < bulkPassivationSize; ++i) {
currentEntry = lruQueue.first();
if (currentEntry == null) {
break;
}
getBeanIndex(threadContext).remove(currentEntry.primaryKey);
if (currentEntry.isTimedOut()) {
handleTimeout(currentEntry, threadContext);
} else {
threadContext.setCurrentOperation(Operation.PASSIVATE);
try {
StatefulInstanceManager.Instance instance = (StatefulInstanceManager.Instance) currentEntry.bean;
Method passivate = instance.bean instanceof SessionBean? SessionBean.class.getMethod("ejbPassivate"): null;
List<InterceptorData> callbackInterceptors = deploymentInfo.getCallbackInterceptors();
InterceptorStack interceptorStack = new InterceptorStack(instance.bean, passivate, Operation.PASSIVATE, callbackInterceptors, instance.interceptors);
interceptorStack.invoke();
} catch (Throwable e) {
String logMessage = "An unexpected exception occured while invoking the ejbPassivate method on the Stateful SessionBean instance; " + e.getClass().getName() + " " + e.getMessage();
/* [1] Log the exception or error */
logger.error(logMessage);
}
stateTable.put(currentEntry.primaryKey, currentEntry);
}
}
} finally {
threadContext.setCurrentOperation(currentOperation);
threadContext.setCurrentAllowedStates(originalAllowedStates);
}
/*
the IntraVmCopyMonitor.prePssivationOperation() demarcates
the begining of passivation; used by EjbHomeProxyHandler,
EjbObjectProxyHandler, IntraVmMetaData, and IntraVmHandle
to deterime how serialization for these artifacts.
*/
try {
IntraVmCopyMonitor.prePassivationOperation();
passivator.passivate(stateTable);
} finally {
IntraVmCopyMonitor.postPassivationOperation();
}
}
protected BeanEntry activate(Object primaryKey) throws SystemException {
return (BeanEntry) passivator.activate(primaryKey);
}
protected InvalidateReferenceException destroy(ThreadContext callContext, BeanEntry entry, Exception t) throws SystemException {
getBeanIndex(callContext).remove(entry.primaryKey);// remove frm index
lruQueue.remove(entry);// remove from queue
if (entry.beanTransaction != null) {
try {
entry.beanTransaction.setRollbackOnly();
} catch (javax.transaction.SystemException se) {
throw new SystemException(se);
} catch (IllegalStateException ise) {
throw new SystemException("Attempt to rollback a non-tx context", ise);
} catch (SecurityException lse) {
throw new SystemException("Container not authorized to rollback tx", lse);
}
return new InvalidateReferenceException(new TransactionRolledbackException(t));
} else if (t instanceof RemoteException) {
return new InvalidateReferenceException(t);
} else {
EJBException e = (EJBException) t;
return new InvalidateReferenceException(new RemoteException(e.getMessage(), e.getCausedByException()));
}
}
protected BeanEntry getBeanEntry(ThreadContext callContext) throws OpenEJBException {
Object primaryKey = callContext.getPrimaryKey();
if (primaryKey == null) {
throw new SystemException(new NullPointerException("The primary key is null. Cannot get the bean entry"));
}
BeanEntry entry = getBeanIndex(callContext).get(primaryKey);
if (entry == null) {
Object bean = this.obtainInstance(primaryKey, ThreadContext.getThreadContext());
this.poolInstance(callContext, bean);
entry = getBeanIndex(callContext).get(primaryKey);
}
return entry;
}
private Hashtable<Object, BeanEntry> getBeanIndex(ThreadContext threadContext) {
CoreDeploymentInfo deployment = threadContext.getDeploymentInfo();
Data data = (Data) deployment.getContainerData();
return data.getBeanIndex();
}
private static class Data {
private final Index<Method, StatefulContainer.MethodType> methodIndex;
private final Hashtable<Object, BeanEntry> beanIndex = new Hashtable<Object, BeanEntry>();
private Data(Index<Method, StatefulContainer.MethodType> methodIndex) {
this.methodIndex = methodIndex;
}
public Index<Method, StatefulContainer.MethodType> getMethodIndex() {
return methodIndex;
}
public Hashtable<Object, BeanEntry> getBeanIndex() {
return beanIndex;
}
}
class BeanEntryQueue {
private final LinkedList<BeanEntry> list;
private final int capacity;
protected BeanEntryQueue(int preferedCapacity) {
capacity = preferedCapacity;
list = new LinkedList<BeanEntry>();
}
protected synchronized BeanEntry first() {
return list.removeFirst();
}
protected synchronized void add(BeanEntry entry) throws SystemException {
entry.resetTimeOut();
list.addLast(entry);
entry.inQueue = true;
if (list.size() >= capacity) {// is the LRU QUE full?
passivate();
}
}
protected synchronized BeanEntry remove(BeanEntry entry) {
if (!entry.inQueue) {
return null;
}
if (list.remove(entry)) {
entry.inQueue = false;
return entry;
} else {
return null;
}
}
}
protected void handleCallbackException(Throwable e, Object instance, ThreadContext callContext, String callBack) throws ApplicationException, SystemException {
String remoteMessage = "An unexpected exception occured while invoking the " + callBack + " method on the Stateful SessionBean instance";
String logMessage = remoteMessage + "; " + e.getClass().getName() + " " + e.getMessage();
/* [1] Log the exception or error */
logger.error(logMessage);
/* [2] If the instance is in a transaction, mark the transaction for rollback. */
Transaction transaction = null;
try {
transaction = transactionManager.getTransaction();
} catch (Throwable t) {
logger.error("Could not retreive the current transaction from the transaction manager while handling a callback exception from the " + callBack + " method of bean " + callContext.getPrimaryKey());
}
if (transaction != null) {
markTxRollbackOnly(transaction);
}
/* [3] Discard the instance */
freeInstance(callContext);
/* [4] throw the java.rmi.RemoteException to the client */
if (transaction == null) {
throw new InvalidateReferenceException(new RemoteException(remoteMessage, e));
} else {
throw new InvalidateReferenceException(new TransactionRolledbackException(remoteMessage, e));
}
}
protected void markTxRollbackOnly(Transaction tx) throws SystemException {
try {
if (tx != null) {
tx.setRollbackOnly();
}
} catch (javax.transaction.SystemException se) {
throw new SystemException(se);
}
}
public static class Instance implements Serializable {
public final Object bean;
public final Map<String,Object> interceptors;
public Instance(Object bean, Map<String, Object> interceptors) {
this.bean = bean;
this.interceptors = interceptors;
}
}
}
| |
/**
* Copyright 2014 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package rx.joins.operators;
import static org.junit.Assert.fail;
import static org.mockito.Matchers.any;
import static org.mockito.Mockito.*;
import java.util.Arrays;
import org.junit.*;
import org.mockito.*;
import rx.*;
import rx.functions.*;
import rx.internal.util.UtilityFunctions;
import rx.joins.*;
import rx.observables.JoinObservable;
import rx.observers.TestSubscriber;
import rx.subjects.PublishSubject;
public class OperatorJoinsTest {
@Mock
Observer<Integer> observer;
static final class Adder implements
Func2<Integer, Integer, Integer>,
Func3<Integer, Integer, Integer, Integer>,
Func4<Integer, Integer, Integer, Integer, Integer>,
Func5<Integer, Integer, Integer, Integer, Integer, Integer>,
Func6<Integer, Integer, Integer, Integer, Integer, Integer, Integer>,
Func7<Integer, Integer, Integer, Integer, Integer, Integer, Integer, Integer>,
Func8<Integer, Integer, Integer, Integer, Integer, Integer, Integer, Integer, Integer>,
Func9<Integer, Integer, Integer, Integer, Integer, Integer, Integer, Integer, Integer, Integer>,
FuncN<Integer>
{
@Override
public Integer call(Object... args) {
int sum = 0;
for(Object o : args) {
sum += (Integer)o;
}
return sum;
}
@Override
public Integer call(Integer t1, Integer t2, Integer t3, Integer t4,
Integer t5, Integer t6, Integer t7, Integer t8, Integer t9) {
return t1 + t2 + t3 + t4 + t5 + t6 + t7 + t8 + t9;
}
@Override
public Integer call(Integer t1, Integer t2, Integer t3, Integer t4,
Integer t5, Integer t6, Integer t7, Integer t8) {
return t1 + t2 + t3 + t4 + t5 + t6 + t7 + t8;
}
@Override
public Integer call(Integer t1, Integer t2, Integer t3, Integer t4,
Integer t5, Integer t6, Integer t7) {
return t1 + t2 + t3 + t4 + t5 + t6 + t7;
}
@Override
public Integer call(Integer t1, Integer t2, Integer t3, Integer t4,
Integer t5, Integer t6) {
return t1 + t2 + t3 + t4 + t5 + t6;
}
@Override
public Integer call(Integer t1, Integer t2, Integer t3, Integer t4,
Integer t5) {
return t1 + t2 + t3 + t4 + t5;
}
@Override
public Integer call(Integer t1, Integer t2, Integer t3, Integer t4) {
return t1 + t2 + t3 + t4;
}
@Override
public Integer call(Integer t1, Integer t2, Integer t3) {
return t1 + t2 + t3;
}
@Override
public Integer call(Integer t1, Integer t2) {
return t1 + t2;
}
}
Adder add = new Adder();
Func2<Integer, Integer, Integer> mul2 = new Func2<Integer, Integer, Integer>() {
@Override
public Integer call(Integer t1, Integer t2) {
return t1 * t2;
}
};
Func2<Integer, Integer, Integer> sub2 = new Func2<Integer, Integer, Integer>() {
@Override
public Integer call(Integer t1, Integer t2) {
return t1 - t2;
}
};
static final class ThrowFunc<R> implements
Func0<R>,
Func1<Integer, R>,
Func2<Integer, Integer, R>,
Func3<Integer, Integer, Integer, R>,
Func4<Integer, Integer, Integer, Integer, R>,
Func5<Integer, Integer, Integer, Integer, Integer, R>,
Func6<Integer, Integer, Integer, Integer, Integer, Integer, R>,
Func7<Integer, Integer, Integer, Integer, Integer, Integer, Integer, R>,
Func8<Integer, Integer, Integer, Integer, Integer, Integer, Integer, Integer, R>,
Func9<Integer, Integer, Integer, Integer, Integer, Integer, Integer, Integer, Integer, R>,
FuncN<R>
{
@Override
public R call() {
throw new TestException("Forced failure");
}
@Override
public R call(Integer t1) {
return call();
}
@Override
public R call(Object... args) {
return call();
}
@Override
public R call(Integer t1, Integer t2, Integer t3, Integer t4, Integer t5,
Integer t6, Integer t7, Integer t8, Integer t9) {
return call();
}
@Override
public R call(Integer t1, Integer t2, Integer t3, Integer t4, Integer t5,
Integer t6, Integer t7, Integer t8) {
return call();
}
@Override
public R call(Integer t1, Integer t2, Integer t3, Integer t4, Integer t5,
Integer t6, Integer t7) {
return call();
}
@Override
public R call(Integer t1, Integer t2, Integer t3, Integer t4, Integer t5,
Integer t6) {
return call();
}
@Override
public R call(Integer t1, Integer t2, Integer t3, Integer t4, Integer t5) {
return call();
}
@Override
public R call(Integer t1, Integer t2, Integer t3, Integer t4) {
return call();
}
@Override
public R call(Integer t1, Integer t2, Integer t3) {
return call();
}
@Override
public R call(Integer t1, Integer t2) {
return call();
}
}
ThrowFunc<Integer> throwFunc = new ThrowFunc<Integer>();
Observable<Integer> some = Observable.just(1);
Observable<Integer> error = Observable.error(new TestException("Forced failure"));
@Before
public void before() {
MockitoAnnotations.initMocks(this);
}
@Test(expected = NullPointerException.class)
public void and2ArgumentNull() {
JoinObservable.from(some).and(null);
}
@Test(expected = NullPointerException.class)
public void and3argumentNull() {
JoinObservable.from(some).and(some).and(null);
}
void verifyAnd(JoinObservable<Integer> m, int count) {
@SuppressWarnings("unchecked")
Observer<Integer> o = mock(Observer.class);
m.toObservable().subscribe(o);
verify(o, never()).onError(any(Throwable.class));
verify(o, times(1)).onNext(count);
verify(o, times(1)).onCompleted();
}
void verifyError(JoinObservable<Integer> m) {
@SuppressWarnings("unchecked")
Observer<Integer> o = mock(Observer.class);
m.toObservable().subscribe(o);
verify(o, times(1)).onError(any(TestException.class));
verify(o, never()).onNext(any(Integer.class));
verify(o, never()).onCompleted();
}
@Test
public void and2() {
verifyAnd(JoinObservable.when(JoinObservable.from(some).and(some).then(add)), 2);
}
@Test
public void and2Error1() {
verifyError(JoinObservable.when(JoinObservable.from(error).and(some).then(add)));
}
@Test
public void and2Error2() {
verifyError(JoinObservable.when(JoinObservable.from(some).and(error).then(add)));
}
@Test
public void and3() {
verifyAnd(JoinObservable.when(JoinObservable.from(some).and(some).and(some).then(add)), 3);
}
@Test
public void and3Error1() {
verifyError(JoinObservable.when(JoinObservable.from(error).and(some).and(some).then(add)));
}
@Test
public void and3Error2() {
verifyError(JoinObservable.when(JoinObservable.from(some).and(error).and(some).then(add)));
}
@Test
public void and3Error3() {
verifyError(JoinObservable.when(JoinObservable.from(some).and(some).and(error).then(add)));
}
@Test(expected = NullPointerException.class)
public void thenArgumentNull() {
JoinObservable.from(some).then(null);
}
@Test(expected = NullPointerException.class)
public void then2ArgumentNull() {
JoinObservable.from(some).and(some).then(null);
}
@Test(expected = NullPointerException.class)
public void then3ArgumentNull() {
JoinObservable.from(some).and(some).and(some).then(null);
}
@Test(expected = NullPointerException.class)
public void then4ArgumentNull() {
JoinObservable.from(some).and(some).and(some).and(some).then(null);
}
@Test(expected = NullPointerException.class)
public void then5ArgumentNull() {
JoinObservable.from(some).and(some).and(some).and(some).and(some).then(null);
}
@Test(expected = NullPointerException.class)
public void then6ArgumentNull() {
JoinObservable.from(some).and(some).and(some).and(some).and(some).and(some).then(null);
}
@Test(expected = NullPointerException.class)
public void then7ArgumentNull() {
JoinObservable.from(some).and(some).and(some).and(some).and(some).and(some).and(some).then(null);
}
@Test(expected = NullPointerException.class)
public void then8ArgumentNull() {
JoinObservable.from(some).and(some).and(some).and(some).and(some).and(some).and(some).and(some).then(null);
}
@Test(expected = NullPointerException.class)
public void then9ArgumentNull() {
JoinObservable.from(some).and(some).and(some).and(some).and(some).and(some).and(some).and(some).and(some).then(null);
}
@Test
public void thenNArgumentNull() {
for (int n = 10; n < 100; n++) {
PatternN p = JoinObservable.from(some).and(some)
.and(some).and(some)
.and(some).and(some)
.and(some).and(some)
.and(some).and(some);
try {
for (int j = 0; j < n - 10; j++) {
p = p.and(some);
}
p.then(null);
fail("Failed to throw exception with pattern length " + n);
} catch (NullPointerException ex) {
// expected, continue
}
}
}
@Test(expected = NullPointerException.class)
public void then10ArgumentNull() {
JoinObservable.from(some).and(some).and(some).and(some).and(some).and(some).and(some).and(some).and(some).and(some).then(null);
}
@Test
public void then1() {
verifyAnd(JoinObservable.when(JoinObservable.from(some).then(UtilityFunctions.<Integer> identity())), 1);
}
@Test
public void then1Error() {
verifyError(JoinObservable.when(JoinObservable.from(error).then(UtilityFunctions.<Integer> identity())));
}
@Test
public void then1Throws() {
verifyError(JoinObservable.when(JoinObservable.from(some).then(throwFunc)));
}
@Test
public void then2Throws() {
verifyError(JoinObservable.when(JoinObservable.from(some).and(some).then(throwFunc)));
}
@Test
public void then3Throws() {
verifyError(JoinObservable.when(JoinObservable.from(some).and(some).and(some).then(throwFunc)));
}
@Test(expected = NullPointerException.class)
public void whenArgumentNull1() {
JoinObservable.when((Plan0<Object>[]) null);
}
@Test(expected = NullPointerException.class)
public void whenArgumentNull2() {
JoinObservable.when((Iterable<Plan0<Object>>) null);
}
@Test
public void whenMultipleSymmetric() {
Observable<Integer> source1 = Observable.just(1, 2, 3);
Observable<Integer> source2 = Observable.just(4, 5, 6);
Observable<Integer> m = JoinObservable.when(JoinObservable.from(source1).and(source2).then(add)).toObservable();
m.subscribe(observer);
verify(observer, never()).onError(any(Throwable.class));
verify(observer, times(1)).onNext(1 + 4);
verify(observer, times(1)).onNext(2 + 5);
verify(observer, times(1)).onNext(3 + 6);
verify(observer, times(1)).onCompleted();
}
@Test
public void whenMultipleAsymSymmetric() {
Observable<Integer> source1 = Observable.just(1, 2, 3);
Observable<Integer> source2 = Observable.just(4, 5);
Observable<Integer> m = JoinObservable.when(JoinObservable.from(source1).and(source2).then(add)).toObservable();
m.subscribe(observer);
verify(observer, never()).onError(any(Throwable.class));
verify(observer, times(1)).onNext(1 + 4);
verify(observer, times(1)).onNext(2 + 5);
verify(observer, times(1)).onCompleted();
}
@Test
public void whenEmptyEmpty() {
Observable<Integer> source1 = Observable.empty();
Observable<Integer> source2 = Observable.empty();
Observable<Integer> m = JoinObservable.when(JoinObservable.from(source1).and(source2).then(add)).toObservable();
m.subscribe(observer);
verify(observer, never()).onError(any(Throwable.class));
verify(observer, never()).onNext(any(Integer.class));
verify(observer, times(1)).onCompleted();
}
@Test
public void whenNeverNever() {
Observable<Integer> source1 = Observable.never();
Observable<Integer> source2 = Observable.never();
Observable<Integer> m = JoinObservable.when(JoinObservable.from(source1).and(source2).then(add)).toObservable();
m.subscribe(observer);
verify(observer, never()).onError(any(Throwable.class));
verify(observer, never()).onNext(any(Integer.class));
verify(observer, never()).onCompleted();
}
@Test
public void whenThrowNonEmpty() {
Observable<Integer> source1 = Observable.empty();
Observable<Integer> source2 = Observable.error(new TestException("Forced failure"));
Observable<Integer> m = JoinObservable.when(JoinObservable.from(source1).and(source2).then(add)).toObservable();
m.subscribe(observer);
verify(observer, times(1)).onError(any(Throwable.class));
verify(observer, never()).onNext(any(Integer.class));
verify(observer, never()).onCompleted();
}
@Test
public void whenComplicated() {
PublishSubject<Integer> xs = PublishSubject.create();
PublishSubject<Integer> ys = PublishSubject.create();
PublishSubject<Integer> zs = PublishSubject.create();
Observable<Integer> m = JoinObservable.when(
JoinObservable.from(xs).and(ys).then(add), // 1+4=5, 2+5=7, 3+6=9
JoinObservable.from(xs).and(zs).then(mul2), // 1*7=7, 2*8=16, 3*9=27
JoinObservable.from(ys).and(zs).then(sub2) // 4-7=-3, 5-8=-3, 6-9=-3
).toObservable();
TestSubscriber<Integer> to = new TestSubscriber<Integer>(observer);
m.subscribe(to);
xs.onNext(1); // t == 210, xs[1], ys[], zs[]
xs.onNext(2); // t == 220, xs[1, 2], ys[], zs[]
zs.onNext(7); // t == 220, xs[1, 2], ys[], zs[7] triggers x and z; emit 1 * 7, remains xs[2], ys[], zs[]
xs.onNext(3); // t == 230, xs[2,3], ys[], zs[]
zs.onNext(8); // t == 230, xs[2,3], ys[], zs[8] triggers x and z, emit 2 * 8, remains xs[3], ys[], zs[]
ys.onNext(4); // t == 240, xs[], ys[4], zs[] triggers x and y, emit 3 + 4, remains xs[], ys[], zs[]
zs.onNext(9); // t == 240, xs[], ys[], zs[9]
xs.onCompleted(); // t == 240, completed 1
ys.onNext(5); // t == 250, xs[], ys[5], zs[9], triggers ys and zs, emits 5 - 9, remains xs[], ys[], zs[]
ys.onNext(6); // t == 260, xs[], ys[6], zs[]
ys.onCompleted(); // t == 270, completed 2
zs.onCompleted(); // t == 300, completed 3, triggers when() oncompleted
System.out.println("Events: " + to.getOnNextEvents());
to.assertReceivedOnNext(Arrays.asList(7, 16, 7, -4));
to.assertTerminalEvent();
InOrder inOrder = inOrder(observer);
inOrder.verify(observer, times(1)).onNext(1 * 7);
inOrder.verify(observer, times(1)).onNext(2 * 8);
inOrder.verify(observer, times(1)).onNext(3 + 4);
inOrder.verify(observer, times(1)).onNext(5 - 9);
inOrder.verify(observer, times(1)).onCompleted();
verify(observer, never()).onError(any(Throwable.class));
}
// -----------------
@Test
public void and4() {
verifyAnd(JoinObservable.when(JoinObservable.from(some)
.and(some)
.and(some)
.and(some)
.then(add)), 4);
}
@Test
public void and4Error1() {
verifyError(JoinObservable.when(
JoinObservable.from(error)
.and(some)
.and(some)
.and(some)
.then(add)));
}
@Test
public void and4Error2() {
verifyError(JoinObservable.when(
JoinObservable.from(some)
.and(error)
.and(some)
.and(some)
.then(add)));
}
@Test
public void and4Error3() {
verifyError(JoinObservable.when(
JoinObservable.from(some)
.and(some)
.and(error)
.and(some)
.then(add)));
}
@Test
public void and4Error4() {
verifyError(JoinObservable.when(
JoinObservable.from(some)
.and(some)
.and(some)
.and(error)
.then(add)));
}
@Test
public void then4Throws() {
verifyError(JoinObservable.when(
JoinObservable
.from(some)
.and(some)
.and(some)
.and(some)
.then(throwFunc)));
}
// -----------------
@Test
public void and5() {
verifyAnd(JoinObservable.when(JoinObservable.from(some)
.and(some)
.and(some)
.and(some)
.and(some)
.then(add)), 5);
}
@Test
public void and5Error1() {
verifyError(JoinObservable.when(
JoinObservable.from(error)
.and(some)
.and(some)
.and(some)
.and(some)
.then(add)));
}
@Test
public void and5Error2() {
verifyError(JoinObservable.when(
JoinObservable.from(some)
.and(error)
.and(some)
.and(some)
.and(some)
.then(add)));
}
@Test
public void and5Error3() {
verifyError(JoinObservable.when(
JoinObservable.from(some)
.and(some)
.and(error)
.and(some)
.and(some)
.then(add)));
}
@Test
public void and5Error4() {
verifyError(JoinObservable.when(
JoinObservable.from(some)
.and(some)
.and(some)
.and(error)
.and(some)
.then(add)));
}
@Test
public void and5Error5() {
verifyError(JoinObservable.when(
JoinObservable.from(some)
.and(some)
.and(some)
.and(some)
.and(error)
.then(add)));
}
@Test
public void then5Throws() {
verifyError(JoinObservable.when(
JoinObservable
.from(some)
.and(some)
.and(some)
.and(some)
.and(some)
.then(throwFunc)));
}
// -----------------
@Test
public void and6() {
verifyAnd(JoinObservable.when(JoinObservable.from(some)
.and(some)
.and(some)
.and(some)
.and(some)
.and(some)
.then(add)), 6);
}
@Test
public void and6Error1() {
verifyError(JoinObservable.when(
JoinObservable.from(error)
.and(some)
.and(some)
.and(some)
.and(some)
.and(some)
.then(add)));
}
@Test
public void and6Error2() {
verifyError(JoinObservable.when(
JoinObservable.from(some)
.and(error)
.and(some)
.and(some)
.and(some)
.and(some)
.then(add)));
}
@Test
public void and6Error3() {
verifyError(JoinObservable.when(
JoinObservable.from(some)
.and(some)
.and(error)
.and(some)
.and(some)
.and(some)
.then(add)));
}
@Test
public void and6Error4() {
verifyError(JoinObservable.when(
JoinObservable.from(some)
.and(some)
.and(some)
.and(error)
.and(some)
.and(some)
.then(add)));
}
@Test
public void and6Error5() {
verifyError(JoinObservable.when(
JoinObservable.from(some)
.and(some)
.and(some)
.and(some)
.and(error)
.and(some)
.then(add)));
}
@Test
public void and6Error6() {
verifyError(JoinObservable.when(
JoinObservable.from(some)
.and(some)
.and(some)
.and(some)
.and(some)
.and(error)
.then(add)));
}
@Test
public void then6Throws() {
verifyError(JoinObservable.when(
JoinObservable
.from(some)
.and(some)
.and(some)
.and(some)
.and(some)
.and(some)
.then(throwFunc)));
}
// -----------------
@Test
public void and7() {
verifyAnd(JoinObservable.when(JoinObservable.from(some)
.and(some)
.and(some)
.and(some)
.and(some)
.and(some)
.and(some)
.then(add)), 7);
}
@Test
public void and7Error1() {
verifyError(JoinObservable.when(
JoinObservable.from(error)
.and(some)
.and(some)
.and(some)
.and(some)
.and(some)
.and(some)
.then(add)));
}
@Test
public void and7Error2() {
verifyError(JoinObservable.when(
JoinObservable.from(some)
.and(error)
.and(some)
.and(some)
.and(some)
.and(some)
.and(some)
.then(add)));
}
@Test
public void and7Error3() {
verifyError(JoinObservable.when(
JoinObservable.from(some)
.and(some)
.and(error)
.and(some)
.and(some)
.and(some)
.and(some)
.then(add)));
}
@Test
public void and7Error4() {
verifyError(JoinObservable.when(
JoinObservable.from(some)
.and(some)
.and(some)
.and(error)
.and(some)
.and(some)
.and(some)
.then(add)));
}
@Test
public void and7Error5() {
verifyError(JoinObservable.when(
JoinObservable.from(some)
.and(some)
.and(some)
.and(some)
.and(error)
.and(some)
.and(some)
.then(add)));
}
@Test
public void and7Error6() {
verifyError(JoinObservable.when(
JoinObservable.from(some)
.and(some)
.and(some)
.and(some)
.and(some)
.and(error)
.and(some)
.then(add)));
}
@Test
public void and7Error7() {
verifyError(JoinObservable.when(
JoinObservable.from(some)
.and(some)
.and(some)
.and(some)
.and(some)
.and(some)
.and(error)
.then(add)));
}
@Test
public void then7Throws() {
verifyError(JoinObservable.when(
JoinObservable
.from(some)
.and(some)
.and(some)
.and(some)
.and(some)
.and(some)
.and(some)
.then(throwFunc)));
}
// -----------------
@Test
public void and8() {
verifyAnd(JoinObservable.when(JoinObservable.from(some)
.and(some)
.and(some)
.and(some)
.and(some)
.and(some)
.and(some)
.and(some)
.then(add)), 8);
}
@Test
public void and8Error1() {
verifyError(JoinObservable.when(
JoinObservable.from(error)
.and(some)
.and(some)
.and(some)
.and(some)
.and(some)
.and(some)
.and(some)
.then(add)));
}
@Test
public void and8Error2() {
verifyError(JoinObservable.when(
JoinObservable.from(some)
.and(error)
.and(some)
.and(some)
.and(some)
.and(some)
.and(some)
.and(some)
.then(add)));
}
@Test
public void and8Error3() {
verifyError(JoinObservable.when(
JoinObservable.from(some)
.and(some)
.and(error)
.and(some)
.and(some)
.and(some)
.and(some)
.and(some)
.then(add)));
}
@Test
public void and8Error4() {
verifyError(JoinObservable.when(
JoinObservable.from(some)
.and(some)
.and(some)
.and(error)
.and(some)
.and(some)
.and(some)
.and(some)
.then(add)));
}
@Test
public void and8Error5() {
verifyError(JoinObservable.when(
JoinObservable.from(some)
.and(some)
.and(some)
.and(some)
.and(error)
.and(some)
.and(some)
.and(some)
.then(add)));
}
@Test
public void and8Error6() {
verifyError(JoinObservable.when(
JoinObservable.from(some)
.and(some)
.and(some)
.and(some)
.and(some)
.and(error)
.and(some)
.and(some)
.then(add)));
}
@Test
public void and8Error7() {
verifyError(JoinObservable.when(
JoinObservable.from(some)
.and(some)
.and(some)
.and(some)
.and(some)
.and(some)
.and(error)
.and(some)
.then(add)));
}
@Test
public void and8Error8() {
verifyError(JoinObservable.when(
JoinObservable.from(some)
.and(some)
.and(some)
.and(some)
.and(some)
.and(some)
.and(some)
.and(error)
.then(add)));
}
@Test
public void then8Throws() {
verifyError(JoinObservable.when(
JoinObservable
.from(some)
.and(some)
.and(some)
.and(some)
.and(some)
.and(some)
.and(some)
.and(some)
.then(throwFunc)));
}
// -----------------
@Test
public void and9() {
verifyAnd(JoinObservable.when(JoinObservable.from(some)
.and(some)
.and(some)
.and(some)
.and(some)
.and(some)
.and(some)
.and(some)
.and(some)
.then(add)), 9);
}
@Test
public void and9Error1() {
verifyError(JoinObservable.when(
JoinObservable.from(error)
.and(some)
.and(some)
.and(some)
.and(some)
.and(some)
.and(some)
.and(some)
.and(some)
.then(add)));
}
@Test
public void and9Error2() {
verifyError(JoinObservable.when(
JoinObservable.from(some)
.and(error)
.and(some)
.and(some)
.and(some)
.and(some)
.and(some)
.and(some)
.and(some)
.then(add)));
}
@Test
public void and9Error3() {
verifyError(JoinObservable.when(
JoinObservable.from(some)
.and(some)
.and(error)
.and(some)
.and(some)
.and(some)
.and(some)
.and(some)
.and(some)
.then(add)));
}
@Test
public void and9Error4() {
verifyError(JoinObservable.when(
JoinObservable.from(some)
.and(some)
.and(some)
.and(error)
.and(some)
.and(some)
.and(some)
.and(some)
.and(some)
.then(add)));
}
@Test
public void and9Error5() {
verifyError(JoinObservable.when(
JoinObservable.from(some)
.and(some)
.and(some)
.and(some)
.and(error)
.and(some)
.and(some)
.and(some)
.and(some)
.then(add)));
}
@Test
public void and9Error6() {
verifyError(JoinObservable.when(
JoinObservable.from(some)
.and(some)
.and(some)
.and(some)
.and(some)
.and(error)
.and(some)
.and(some)
.and(some)
.then(add)));
}
@Test
public void and9Error7() {
verifyError(JoinObservable.when(
JoinObservable.from(some)
.and(some)
.and(some)
.and(some)
.and(some)
.and(some)
.and(error)
.and(some)
.and(some)
.then(add)));
}
@Test
public void and9Error8() {
verifyError(JoinObservable.when(
JoinObservable.from(some)
.and(some)
.and(some)
.and(some)
.and(some)
.and(some)
.and(some)
.and(error)
.and(some)
.then(add)));
}
@Test
public void and9Error9() {
verifyError(JoinObservable.when(
JoinObservable.from(some)
.and(some)
.and(some)
.and(some)
.and(some)
.and(some)
.and(some)
.and(some)
.and(error)
.then(add)));
}
@Test
public void then9Throws() {
verifyError(JoinObservable.when(
JoinObservable
.from(some)
.and(some)
.and(some)
.and(some)
.and(some)
.and(some)
.and(some)
.and(some)
.and(some)
.then(throwFunc)));
}
// -----------------
@Test
public void andN() {
int s = 10;
for (int n = s; n < 100; n++) {
System.out.println("AndN(" + n + ")");
PatternN p = JoinObservable.from(some)
.and(some)
.and(some)
.and(some)
.and(some)
.and(some)
.and(some)
.and(some)
.and(some)
.and(some);
for (int j = 0; j < n - s; j++) {
p = p.and(some);
}
verifyAnd(JoinObservable.when(p.then(add)), n);
}
}
@Test
public void andNError1() {
verifyError(JoinObservable.when(
JoinObservable.from(error)
.and(some)
.and(some)
.and(some)
.and(some)
.and(some)
.and(some)
.and(some)
.and(some)
.then(add)));
}
@Test
public void andNError2() {
verifyError(JoinObservable.when(
JoinObservable.from(some)
.and(error)
.and(some)
.and(some)
.and(some)
.and(some)
.and(some)
.and(some)
.and(some)
.and(some)
.then(add)));
}
@Test
public void andNError3() {
verifyError(JoinObservable.when(
JoinObservable.from(some)
.and(some)
.and(error)
.and(some)
.and(some)
.and(some)
.and(some)
.and(some)
.and(some)
.and(some)
.then(add)));
}
@Test
public void andNError4() {
verifyError(JoinObservable.when(
JoinObservable.from(some)
.and(some)
.and(some)
.and(error)
.and(some)
.and(some)
.and(some)
.and(some)
.and(some)
.and(some)
.then(add)));
}
@Test
public void andNError5() {
verifyError(JoinObservable.when(
JoinObservable.from(some)
.and(some)
.and(some)
.and(some)
.and(error)
.and(some)
.and(some)
.and(some)
.and(some)
.and(some)
.then(add)));
}
@Test
public void andNError6() {
verifyError(JoinObservable.when(
JoinObservable.from(some)
.and(some)
.and(some)
.and(some)
.and(some)
.and(error)
.and(some)
.and(some)
.and(some)
.and(some)
.then(add)));
}
@Test
public void andNError7() {
verifyError(JoinObservable.when(
JoinObservable.from(some)
.and(some)
.and(some)
.and(some)
.and(some)
.and(some)
.and(error)
.and(some)
.and(some)
.and(some)
.then(add)));
}
@Test
public void andNError8() {
verifyError(JoinObservable.when(
JoinObservable.from(some)
.and(some)
.and(some)
.and(some)
.and(some)
.and(some)
.and(some)
.and(error)
.and(some)
.and(some)
.then(add)));
}
@Test
public void andNError9() {
verifyError(JoinObservable.when(
JoinObservable.from(some)
.and(some)
.and(some)
.and(some)
.and(some)
.and(some)
.and(some)
.and(some)
.and(error)
.and(some)
.then(add)));
}
@Test
public void andNErrorN() {
verifyError(JoinObservable.when(
JoinObservable.from(some)
.and(some)
.and(some)
.and(some)
.and(some)
.and(some)
.and(some)
.and(some)
.and(some)
.and(error)
.then(add)));
}
@Test
public void andNErrorNRange() {
for (int n = 10; n < 100; n++) {
PatternN p = JoinObservable.from(some)
.and(some)
.and(some)
.and(some)
.and(some)
.and(some)
.and(some)
.and(some)
.and(some)
.and(some);
for (int j = 0; j < n - 10; j++) {
p = p.and(some);
}
p = p.and(error);
verifyError(JoinObservable.when(p.then(add)));
}
}
@Test
public void thenNThrows() {
for (int n = 10; n < 100; n++) {
PatternN p = JoinObservable.from(some)
.and(some)
.and(some)
.and(some)
.and(some)
.and(some)
.and(some)
.and(some)
.and(some)
.and(some);
for (int j = 0; j < n - 10; j++) {
p = p.and(some);
}
verifyError(JoinObservable.when(p.then(throwFunc)));
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.geode.redis.internal.commands.executor.string;
import static org.apache.geode.redis.RedisCommandArgumentsTestHelper.assertAtLeastNArgs;
import static org.apache.geode.redis.internal.RedisConstants.ERROR_BITOP_NOT_MUST_USE_SINGLE_KEY;
import static org.apache.geode.redis.internal.RedisConstants.ERROR_SYNTAX;
import static org.apache.geode.redis.internal.RedisConstants.ERROR_WRONG_TYPE;
import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.Assertions.assertThatThrownBy;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import redis.clients.jedis.HostAndPort;
import redis.clients.jedis.JedisCluster;
import redis.clients.jedis.Protocol;
import redis.clients.jedis.args.BitOP;
import org.apache.geode.redis.RedisIntegrationTest;
import org.apache.geode.test.awaitility.GeodeAwaitility;
public abstract class AbstractBitOpIntegrationTest implements RedisIntegrationTest {
private JedisCluster jedis;
private static final int REDIS_CLIENT_TIMEOUT =
Math.toIntExact(GeodeAwaitility.getTimeout().toMillis());
private final String hashTag = "{111}";
private final String destKey = "destKey" + hashTag;
private final String srcKey = "srcKey" + hashTag;
private final String value = "value";
private final byte[] key = {1, '{', 111, '}'};
private final byte[] other = {2, '{', 111, '}'};
@Before
public void setUp() {
jedis = new JedisCluster(new HostAndPort("localhost", getPort()), REDIS_CLIENT_TIMEOUT);
}
@After
public void tearDown() {
flushAll();
jedis.close();
}
@Test
public void bitOp_errors_givenTooFewArguments() {
assertAtLeastNArgs(jedis, Protocol.Command.BITOP, 3);
}
@Test
public void bitop_givenInvalidOperationType_returnsSyntaxError() {
assertThatThrownBy(
() -> jedis.sendCommand(hashTag, Protocol.Command.BITOP, "invalidOp", destKey,
srcKey)).hasMessage(ERROR_SYNTAX);
}
@Test
public void bitop_givenSetFails() {
jedis.sadd(srcKey, "m1");
assertThatThrownBy(() -> jedis.bitop(BitOP.AND, destKey, srcKey)).hasMessage(ERROR_WRONG_TYPE);
assertThatThrownBy(() -> jedis.bitop(BitOP.OR, destKey, srcKey)).hasMessage(ERROR_WRONG_TYPE);
assertThatThrownBy(() -> jedis.bitop(BitOP.XOR, destKey, srcKey)).hasMessage(ERROR_WRONG_TYPE);
assertThatThrownBy(() -> jedis.bitop(BitOP.NOT, destKey, srcKey)).hasMessage(ERROR_WRONG_TYPE);
}
@Test
public void bitopNOT_givenMoreThanOneSourceKey_returnsError() {
assertThatThrownBy(
() -> jedis.sendCommand(
hashTag, Protocol.Command.BITOP, "NOT", destKey, srcKey, "srcKey2" + hashTag))
.hasMessage(ERROR_BITOP_NOT_MUST_USE_SINGLE_KEY);
}
@Test
public void bitopNOT_givenNothingLeavesKeyUnset() {
assertThat(jedis.bitop(BitOP.NOT, destKey, srcKey)).isEqualTo(0);
assertThat(jedis.exists(destKey)).isFalse();
}
@Test
public void bitopNOT_givenNothingDeletesKey() {
jedis.set(destKey, value);
assertThat(jedis.bitop(BitOP.NOT, destKey, srcKey)).isEqualTo(0);
assertThat(jedis.exists(destKey)).isFalse();
}
@Test
public void bitopNOT_givenNothingDeletesSet() {
jedis.sadd(destKey, value);
assertThat(jedis.bitop(BitOP.NOT, destKey, srcKey)).isEqualTo(0);
assertThat(jedis.exists(destKey)).isFalse();
}
@Test
public void bitopNOT_givenEmptyStringDeletesKey() {
jedis.set(destKey, value);
jedis.set(srcKey, "");
assertThat(jedis.bitop(BitOP.NOT, destKey, srcKey)).isEqualTo(0);
assertThat(jedis.exists(destKey)).isFalse();
}
@Test
public void bitopNOT_givenEmptyStringDeletesSet() {
jedis.sadd(destKey, value);
jedis.set(srcKey, "");
assertThat(jedis.bitop(BitOP.NOT, destKey, srcKey)).isEqualTo(0);
assertThat(jedis.exists(destKey)).isFalse();
}
@Test
public void bitopNOT_negatesSelf() {
byte[] key = {1, 2, 3};
byte[] bytes = {1};
jedis.set(key, bytes);
assertThat(jedis.bitop(BitOP.NOT, key, key)).isEqualTo(1);
assertThat(jedis.strlen(key)).isEqualTo(1);
byte[] newbytes = jedis.get(key);
assertThat(newbytes[0]).isEqualTo((byte) 0xFE);
}
@Test
public void bitopNOT_createsNonExistingKey() {
byte[] bytes = {1};
jedis.set(other, bytes);
assertThat(jedis.bitop(BitOP.NOT, key, other)).isEqualTo(1);
assertThat(jedis.strlen(key)).isEqualTo(1);
byte[] newbytes = jedis.get(key);
assertThat(newbytes[0]).isEqualTo((byte) 0xFE);
}
@Test
public void bitopAND_givenSelfAndOther() {
byte[] bytes = {1};
byte[] otherBytes = {-1};
jedis.set(key, bytes);
jedis.set(other, otherBytes);
assertThat(jedis.bitop(BitOP.AND, key, key, other)).isEqualTo(1);
assertThat(jedis.strlen(key)).isEqualTo(1);
byte[] newbytes = jedis.get(key);
assertThat(newbytes[0]).isEqualTo((byte) 1);
}
@Test
public void bitopAND_givenSelfAndLongerOther() {
byte[] bytes = {1};
byte[] otherBytes = {-1, 3};
jedis.set(key, bytes);
jedis.set(other, otherBytes);
assertThat(jedis.bitop(BitOP.AND, key, key, other)).isEqualTo(2);
assertThat(jedis.strlen(key)).isEqualTo(2);
byte[] newbytes = jedis.get(key);
assertThat(newbytes[0]).isEqualTo((byte) 1);
assertThat(newbytes[1]).isEqualTo((byte) 0);
}
@Test
public void bitopOR_givenSelfAndOther() {
byte[] bytes = {1};
byte[] otherBytes = {8};
jedis.set(key, bytes);
jedis.set(other, otherBytes);
assertThat(jedis.bitop(BitOP.OR, key, key, other)).isEqualTo(1);
assertThat(jedis.strlen(key)).isEqualTo(1);
byte[] newbytes = jedis.get(key);
assertThat(newbytes[0]).isEqualTo((byte) 9);
}
@Test
public void bitopOR_givenSelfAndLongerOther() {
byte[] bytes = {1};
byte[] otherBytes = {-1, 3};
jedis.set(key, bytes);
jedis.set(other, otherBytes);
assertThat(jedis.bitop(BitOP.OR, key, key, other)).isEqualTo(2);
assertThat(jedis.strlen(key)).isEqualTo(2);
byte[] newbytes = jedis.get(key);
assertThat(newbytes[0]).isEqualTo((byte) -1);
assertThat(newbytes[1]).isEqualTo((byte) 3);
}
@Test
public void bitopXOR_givenSelfAndOther() {
byte[] bytes = {9};
byte[] otherBytes = {8};
jedis.set(key, bytes);
jedis.set(other, otherBytes);
assertThat(jedis.bitop(BitOP.XOR, key, key, other)).isEqualTo(1);
assertThat(jedis.strlen(key)).isEqualTo(1);
byte[] newbytes = jedis.get(key);
assertThat(newbytes[0]).isEqualTo((byte) 1);
}
@Test
public void bitopXOR_givenSelfAndLongerOther() {
byte[] bytes = {1};
byte[] otherBytes = {-1, 3};
jedis.set(key, bytes);
jedis.set(other, otherBytes);
assertThat(jedis.bitop(BitOP.XOR, key, key, other)).isEqualTo(2);
assertThat(jedis.strlen(key)).isEqualTo(2);
byte[] newbytes = jedis.get(key);
assertThat(newbytes[0]).isEqualTo((byte) 0xFE);
assertThat(newbytes[1]).isEqualTo((byte) 3);
}
}
| |
/*
* Pore
* Copyright (c) 2014-2016, Lapis <https://github.com/LapisBlue>
*
* The MIT License
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package blue.lapis.pore.impl.entity;
import blue.lapis.pore.Pore;
import blue.lapis.pore.converter.vector.LocationConverter;
import blue.lapis.pore.converter.vector.VectorConverter;
import blue.lapis.pore.converter.wrapper.WrapperConverter;
import blue.lapis.pore.impl.PoreWorld;
import blue.lapis.pore.util.PoreText;
import blue.lapis.pore.util.PoreWrapper;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Lists;
import org.apache.commons.lang3.NotImplementedException;
import org.bukkit.EntityEffect;
import org.bukkit.Location;
import org.bukkit.Server;
import org.bukkit.World;
import org.bukkit.entity.EntityType;
import org.bukkit.event.entity.EntityDamageEvent;
import org.bukkit.event.player.PlayerTeleportEvent;
import org.bukkit.metadata.MetadataValue;
import org.bukkit.permissions.Permission;
import org.bukkit.permissions.PermissionAttachment;
import org.bukkit.permissions.PermissionAttachmentInfo;
import org.bukkit.plugin.Plugin;
import org.bukkit.util.Vector;
import org.spongepowered.api.data.key.Keys;
import org.spongepowered.api.data.manipulator.DataManipulator;
import org.spongepowered.api.entity.Entity;
import org.spongepowered.api.entity.EntitySnapshot;
import java.util.List;
import java.util.Optional;
import java.util.Set;
import java.util.UUID;
//TODO: Determine if metadata methods should be implemented manually
public class PoreEntity extends PoreWrapper<Entity> implements org.bukkit.entity.Entity {
public static PoreEntity of(Entity handle) {
return WrapperConverter.of(PoreEntity.class, handle);
}
public static PoreEntity of(EntitySnapshot snapshot) {
Optional<UUID> uuid = snapshot.getUniqueId();
Optional<Entity> entity = snapshot.getTransform().get().getExtent().getEntity(uuid.get());
if (!entity.isPresent()) {
return null;
}
return PoreEntity.of(entity.get());
}
protected PoreEntity(Entity handle) {
super(handle);
}
protected <T extends DataManipulator<T, ?>> boolean hasData(Class<T> key) {
return getHandle().get(key).isPresent();
}
@Override
public EntityType getType() {
return EntityType.UNKNOWN;
}
@Override
public Location getLocation() {
return LocationConverter.fromTransform(getHandle().getTransform());
}
@Override
public Location getLocation(Location loc) {
return LocationConverter.apply(loc, getHandle().getTransform());
}
@Override
public Vector getVelocity() {
return getHandle().get(Keys.VELOCITY).map(VectorConverter::createBukkitVector)
.orElseGet(() -> new Vector(0, 0, 0));
}
@Override
public void setVelocity(Vector velocity) {
getHandle().offer(Keys.VELOCITY, VectorConverter.create3d(velocity));
}
@Override
public boolean isOnGround() {
return getHandle().isOnGround();
}
@Override
public World getWorld() {
return PoreWorld.of(getHandle().getWorld());
}
@Override
public boolean teleport(Location location) {
return this.teleport(location, PlayerTeleportEvent.TeleportCause.PLUGIN);
}
@Override
public boolean teleport(Location location, PlayerTeleportEvent.TeleportCause cause) {
if (this.getPassenger() != null || this.isDead()) {
return false;
}
if (this.eject()) {
getHandle().setTransform(LocationConverter.toTransform(location));
// CraftBukkit apparently does not throw an event when this method is called
return true;
}
return false;
}
@Override
public boolean teleport(org.bukkit.entity.Entity destination) {
return this.teleport(destination.getLocation());
}
@Override
public boolean teleport(org.bukkit.entity.Entity destination, PlayerTeleportEvent.TeleportCause cause) {
return this.teleport(destination.getLocation(), cause);
}
@Override
public List<org.bukkit.entity.Entity> getNearbyEntities(double x, double y, double z) {
// TODO: Optimize this with the SpongeAPI method
List<org.bukkit.entity.Entity> worldEntities = getWorld().getEntities();
List<org.bukkit.entity.Entity> nearby = Lists.newArrayList();
for (org.bukkit.entity.Entity e : worldEntities) {
Location loc1 = e.getLocation();
Location loc2 = this.getLocation();
if (Math.abs(loc1.getX() - loc2.getX()) <= x
&& Math.abs(loc1.getY() - loc2.getY()) <= y
&& Math.abs(loc1.getZ() - loc2.getZ()) <= z) {
nearby.add(e);
}
}
return nearby;
}
@Override
public int getEntityId() { // note to self - this is the ID of the entity in the world, and unrelated to
// its UUID
throw new NotImplementedException("TODO");
}
@Override
public int getFireTicks() {
return getHandle().get(Keys.FIRE_TICKS).get();
}
@Override
public void setFireTicks(int ticks) {
getHandle().offer(Keys.FIRE_TICKS, ticks);
}
@Override
public int getMaxFireTicks() {
return getHandle().getValue(Keys.FIRE_TICKS).get().getMaxValue();
}
@Override
public void remove() {
getHandle().remove();
}
@Override
public boolean isDead() {
return getHandle().isRemoved();
}
@Override
public boolean isValid() {
return getHandle().isLoaded();
}
@Override
public void sendMessage(String message) {
// silently fail
}
@Override
public void sendMessage(String[] messages) {
}
@Override
public Server getServer() {
return Pore.getServer();
}
@Override
public String getName() {
throw new NotImplementedException("TODO");
}
@Override
public org.bukkit.entity.Entity getPassenger() {
return getHandle().get(Keys.PASSENGER).map(PoreEntity::of).orElse(null);
}
@Override
public boolean setPassenger(final org.bukkit.entity.Entity passenger) {
if (passenger != null) {
return getHandle().offer(Keys.PASSENGER, ((PoreEntity) passenger).getHandle().createSnapshot())
.isSuccessful();
} else {
return getHandle().remove(Keys.PASSENGER).isSuccessful();
}
}
@Override
public boolean isEmpty() {
return !getHandle().get(Keys.PASSENGER).isPresent();
}
@Override
public boolean eject() {
return setPassenger(null);
}
@Override
public float getFallDistance() {
return getHandle().get(Keys.FALL_DISTANCE).get();
}
@Override
public void setFallDistance(float distance) {
getHandle().offer(Keys.FALL_DISTANCE, distance);
}
@Override
public void setLastDamageCause(EntityDamageEvent event) {
//TODO: Sponge counterpart planned for 1.1
}
@Override
public EntityDamageEvent getLastDamageCause() {
// TODO: Sponge counterpart planned for 1.1
return null;
}
@Override
public UUID getUniqueId() {
return getHandle().getUniqueId();
}
@Override
public int getTicksLived() {
throw new NotImplementedException("TODO");
}
@Override
public void setTicksLived(int value) {
throw new NotImplementedException("TODO");
}
@Override
public void playEffect(EntityEffect type) {
throw new NotImplementedException("TODO");
}
@Override
public boolean isInsideVehicle() {
return getHandle().get(Keys.VEHICLE).isPresent();
}
@Override
public boolean leaveVehicle() {
return isInsideVehicle() && getHandle().remove(Keys.VEHICLE).isSuccessful();
}
@Override
public org.bukkit.entity.Entity getVehicle() {
return getHandle().get(Keys.VEHICLE).map(PoreEntity::of).orElse(null);
}
@Override
@SuppressWarnings("deprecation")
public String getCustomName() {
return getHandle().get(Keys.DISPLAY_NAME).map(PoreText::convert).orElse(null);
}
@SuppressWarnings("deprecation")
@Override
public void setCustomName(String name) {
getHandle().offer(Keys.DISPLAY_NAME, PoreText.convert(name));
}
@Override
public boolean isCustomNameVisible() {
Optional<Boolean> visible = getHandle().get(Keys.CUSTOM_NAME_VISIBLE);
return visible.isPresent() ? visible.get() : false;
}
@Override
public void setCustomNameVisible(boolean flag) {
getHandle().offer(Keys.CUSTOM_NAME_VISIBLE, flag);
}
@Override
public void setMetadata(String s, MetadataValue metadataValue) {
// TODO
}
@Override
public List<MetadataValue> getMetadata(String s) {
return ImmutableList.of(); // TODO
}
@Override
public boolean hasMetadata(String s) {
return false; // TODO
}
@Override
public void removeMetadata(String s, Plugin plugin) {
// TODO
}
@Override
public boolean isPermissionSet(String name) {
return false;
}
@Override
public boolean isPermissionSet(Permission perm) {
return false;
}
@Override
public boolean hasPermission(String name) {
return false;
}
@Override
public boolean hasPermission(Permission perm) {
return false;
}
@Override
public PermissionAttachment addAttachment(Plugin plugin, String name, boolean value) {
throw new NotImplementedException("TODO");
}
@Override
public PermissionAttachment addAttachment(Plugin plugin) {
throw new NotImplementedException("TODO");
}
@Override
public PermissionAttachment addAttachment(Plugin plugin, String name, boolean value, int ticks) {
throw new NotImplementedException("TODO");
}
@Override
public PermissionAttachment addAttachment(Plugin plugin, int ticks) {
throw new NotImplementedException("TODO");
}
@Override
public void removeAttachment(PermissionAttachment attachment) {
}
@Override
public void recalculatePermissions() {
}
@Override
public Set<PermissionAttachmentInfo> getEffectivePermissions() {
return ImmutableSet.of();
}
@Override
public boolean isOp() {
return false;
}
@Override
public void setOp(boolean value) {
// do nothing
}
}
| |
/*
Derby - Class com.pivotal.gemfirexd.internal.impl.services.locks.LockSet
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to you under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package com.pivotal.gemfirexd.internal.impl.services.locks;
import com.pivotal.gemfirexd.internal.iapi.error.StandardException;
import com.pivotal.gemfirexd.internal.iapi.reference.Property;
import com.pivotal.gemfirexd.internal.iapi.reference.SQLState;
import com.pivotal.gemfirexd.internal.iapi.services.diag.DiagnosticUtil;
import com.pivotal.gemfirexd.internal.iapi.services.locks.C_LockFactory;
import com.pivotal.gemfirexd.internal.iapi.services.locks.CompatibilitySpace;
import com.pivotal.gemfirexd.internal.iapi.services.locks.Latch;
import com.pivotal.gemfirexd.internal.iapi.services.locks.Lockable;
import com.pivotal.gemfirexd.internal.iapi.services.sanity.SanityManager;
import java.util.HashMap;
import java.util.Enumeration;
import java.util.Iterator;
import java.util.Map;
/**
A LockSet is a complete lock table. A lock table is a hash table
keyed by a Lockable and with a LockControl as the data element.
<P>
A LockControl contains information about the locks held on a Lockable.
<BR>
MT - Mutable - Container Object : All non-private methods of this class are
thread safe unless otherwise stated by their javadoc comments.
<BR>
All searching of
the hashtable is performed using java synchroization(this).
<BR>
The class creates ActiveLock and LockControl objects.
LockControl objects are never passed out of this class, All the methods of
LockControl are called while being synchronized on this, thus providing the
single threading that LockControl required.
Methods of Lockables are only called by this class or LockControl, and
always while being synchronized on this, thus providing the single
threading that Lockable requires.
@see LockControl
*/
final class LockSet implements LockTable {
/*
** Fields
*/
private final SinglePool factory;
/** Hash table which maps <code>Lockable</code> objects to
* <code>Lock</code>s. */
private final HashMap locks;
/**
Timeout for deadlocks, in ms.
<BR>
MT - immutable
*/
private int deadlockTimeout = Property.DEADLOCK_TIMEOUT_DEFAULT * 1000;
private int waitTimeout = Property.WAIT_TIMEOUT_DEFAULT * 1000;
//EXCLUDE-START-lockdiag-
// this varible is set and get without synchronization.
// Only one thread should be setting it at one time.
private boolean deadlockTrace;
//EXCLUDE-END-lockdiag-
// The number of waiters for locks
private int blockCount;
/*
** Constructor
*/
protected LockSet(SinglePool factory) {
this.factory = factory;
locks = new HashMap();
}
/*
** Public Methods
*/
/**
* Lock an object within a specific compatibility space.
*
* @param compatibilitySpace Compatibility space.
* @param ref Lockable reference.
* @param qualifier Qualifier.
* @param timeout Timeout in milli-seconds
*
* @return Object that represents the lock.
*
* @exception StandardException Standard Derby policy.
*/
public Lock lockObject(CompatibilitySpace compatibilitySpace, Lockable ref,
Object qualifier, int timeout)
throws StandardException
{
if (SanityManager.DEBUG) {
if (SanityManager.TraceMemoryLeak) {
if (locks.size() > 1000)
System.out.println("TraceMemoryLeak:LockSet: " +
locks.size());
}
}
Control gc;
LockControl control;
Lock lockItem;
String lockDebug = null;
synchronized (this) {
gc = getControl(ref);
if (gc == null) {
// object is not locked, can be granted
Lock gl = new Lock(compatibilitySpace, ref, qualifier);
gl.grant();
locks.put(ref, gl);
return gl;
}
control = gc.getLockControl();
if (control != gc) {
locks.put(ref, control);
}
if (SanityManager.DEBUG) {
SanityManager.ASSERT(ref.equals(control.getLockable()));
// ASSERT item is in the list
if (getControl(control.getLockable()) != control)
{
SanityManager.THROWASSERT(
"lockObject mismatched lock items " +
getControl(control.getLockable()) + " " + control);
}
}
lockItem = control.addLock(this, compatibilitySpace, qualifier);
if (lockItem.getCount() != 0) {
return lockItem;
}
if (AbstractPool.noLockWait(timeout, compatibilitySpace)) {
// remove all trace of lock
control.giveUpWait(lockItem, this);
if (SanityManager.DEBUG)
{
if (SanityManager.DEBUG_ON("DeadlockTrace"))
{
SanityManager.showTrace(new Throwable());
// The following dumps the lock table as it
// exists at the time a timeout is about to
// cause a deadlock exception to be thrown.
lockDebug =
DiagnosticUtil.toDiagString(lockItem) +
"\nCould not grant lock with zero timeout, here's the table" +
this.toDebugString();
}
}
return null;
}
} // synchronized block
boolean deadlockWait = false;
int actualTimeout;
if (timeout == C_LockFactory.WAIT_FOREVER)
{
// always check for deadlocks as there should not be any
deadlockWait = true;
if ((actualTimeout = deadlockTimeout) == C_LockFactory.WAIT_FOREVER)
actualTimeout = Property.DEADLOCK_TIMEOUT_DEFAULT * 1000;
}
else
{
if (timeout == C_LockFactory.TIMED_WAIT)
timeout = actualTimeout = waitTimeout;
else
actualTimeout = timeout;
// five posible cases
// i) timeout -1, deadlock -1 ->
// just wait forever, no deadlock check
// ii) timeout >= 0, deadlock -1 ->
// just wait for timeout, no deadlock check
// iii) timeout -1, deadlock >= 0 ->
// wait for deadlock, then deadlock check,
// then infinite timeout
// iv) timeout >=0, deadlock < timeout ->
// wait for deadlock, then deadlock check,
// then wait for (timeout - deadlock)
// v) timeout >=0, deadlock >= timeout ->
// just wait for timeout, no deadlock check
if (deadlockTimeout >= 0) {
if (actualTimeout < 0) {
// infinite wait but perform a deadlock check first
deadlockWait = true;
actualTimeout = deadlockTimeout;
} else if (deadlockTimeout < actualTimeout) {
// deadlock wait followed by a timeout wait
deadlockWait = true;
actualTimeout = deadlockTimeout;
// leave timeout as the remaining time
timeout -= deadlockTimeout;
}
}
}
ActiveLock waitingLock = (ActiveLock) lockItem;
lockItem = null;
int earlyWakeupCount = 0;
long startWaitTime = 0;
forever: for (;;) {
byte wakeupReason = waitingLock.waitForGrant(actualTimeout);
ActiveLock nextWaitingLock = null;
Object[] deadlockData = null;
try {
boolean willQuitWait;
Enumeration timeoutLockTable = null;
long currentTime = 0;
synchronized (this) {
if (control.isGrantable(
control.firstWaiter() == waitingLock,
compatibilitySpace,
qualifier)) {
// Yes, we are granted, put us on the granted queue.
control.grant(waitingLock);
// Remove from the waiting queue & get next waiter
nextWaitingLock =
control.getNextWaiter(waitingLock, true, this);
return waitingLock;
}
// try again later
waitingLock.clearPotentiallyGranted();
willQuitWait =
(wakeupReason != Constants.WAITING_LOCK_GRANT);
if (((wakeupReason == Constants.WAITING_LOCK_IN_WAIT) &&
deadlockWait) ||
(wakeupReason == Constants.WAITING_LOCK_DEADLOCK))
{
// check for a deadlock, even if we were woken up
// because we were selected as a victim we still
// check because the situation may have changed.
deadlockData =
Deadlock.look(
factory, this, control, waitingLock,
wakeupReason);
if (deadlockData == null) {
// we don't have a deadlock
deadlockWait = false;
actualTimeout = timeout;
startWaitTime = 0;
willQuitWait = false;
} else {
willQuitWait = true;
}
}
nextWaitingLock =
control.getNextWaiter(
waitingLock, willQuitWait, this);
// If we were not woken by another then we have
// timed out. Either deadlock out or timeout
if (willQuitWait) {
if (SanityManager.DEBUG)
{
if (SanityManager.DEBUG_ON("DeadlockTrace"))
{
SanityManager.showTrace(new Throwable());
// The following dumps the lock table as it
// exists at the time a timeout is about to
// cause a deadlock exception to be thrown.
lockDebug =
DiagnosticUtil.toDiagString(waitingLock) +
"\nGot deadlock/timeout, here's the table" +
this.toDebugString();
}
}
if (deadlockTrace && (deadlockData == null))
{
// if ending lock request due to lock timeout
// want a copy of the LockTable and the time,
// in case of deadlock deadlockData has the
// info we need.
currentTime = System.currentTimeMillis();
timeoutLockTable =
factory.makeVirtualLockTable();
}
}
} // synchronized block
// need to do this outside of the synchronized block as the
// message text building (timeouts and deadlocks) may
// involve getting locks to look up table names from
// identifiers.
if (willQuitWait)
{
if (SanityManager.DEBUG)
{
if (lockDebug != null)
{
String type =
((deadlockData != null) ?
"deadlock:" : "timeout:");
SanityManager.DEBUG_PRINT(
type,
"wait on lockitem caused " + type +
lockDebug);
}
}
if (deadlockData == null)
{
// ending wait because of lock timeout.
if (deadlockTrace)
{
// Turn ON gemfirexd.locks.deadlockTrace to build
// the lockTable.
throw Timeout.buildException(
waitingLock, timeoutLockTable, currentTime);
}
else
{
StandardException se =
StandardException.newException(
SQLState.LOCK_TIMEOUT);
throw se;
}
}
else
{
// ending wait because of lock deadlock.
throw Deadlock.buildException(
factory, deadlockData);
}
}
} finally {
if (nextWaitingLock != null) {
nextWaitingLock.wakeUp(Constants.WAITING_LOCK_GRANT);
nextWaitingLock = null;
}
}
if (actualTimeout != C_LockFactory.WAIT_FOREVER) {
if (wakeupReason != Constants.WAITING_LOCK_IN_WAIT)
earlyWakeupCount++;
if (earlyWakeupCount > 5) {
long now = System.currentTimeMillis();
if (startWaitTime != 0) {
long sleepTime = now - startWaitTime;
actualTimeout -= sleepTime;
}
startWaitTime = now;
}
}
} // for(;;)
}
/**
Unlock an object, previously locked by lockObject().
If unlockCOunt is not zero then the lock will be unlocked
that many times, otherwise the unlock count is taken from
item.
*/
public void unlock(Latch item, int unlockCount) {
if (SanityManager.DEBUG) {
if (SanityManager.DEBUG_ON(Constants.LOCK_TRACE)) {
/*
** I don't like checking the trace flag twice, but SanityManager
** doesn't provide a way to get to the debug trace stream
** directly.
*/
SanityManager.DEBUG(
Constants.LOCK_TRACE,
"Release lock: " + DiagnosticUtil.toDiagString(item));
}
}
boolean tryGrant = false;
ActiveLock nextGrant = null;
synchronized (this) {
Control control = getControl(item.getLockable());
if (SanityManager.DEBUG) {
// only valid Lock's expected
if (item.getLockable() == null)
{
SanityManager.THROWASSERT(
"item.getLockable() = null." +
"unlockCount " + unlockCount +
"item = " + DiagnosticUtil.toDiagString(item));
}
// only valid Lock's expected
if (control == null)
{
SanityManager.THROWASSERT(
"control = null." +
"unlockCount " + unlockCount +
"item = " + DiagnosticUtil.toDiagString(item));
}
if (getControl(control.getLockable()) != control)
{
SanityManager.THROWASSERT(
"unlock mismatched lock items " +
getControl(control.getLockable()) + " " + control);
}
if ((unlockCount != 0) && (unlockCount > item.getCount()))
SanityManager.THROWASSERT("unlockCount " + unlockCount +
" larger than actual lock count " + item.getCount() + " item " + item);
}
tryGrant = control.unlock(item, unlockCount);
item = null;
boolean mayBeEmpty = true;
if (tryGrant) {
nextGrant = control.firstWaiter();
if (nextGrant != null) {
mayBeEmpty = false;
if (!nextGrant.setPotentiallyGranted())
nextGrant = null;
}
}
if (mayBeEmpty) {
if (control.isEmpty()) {
// no-one granted, no-one waiting, remove lock control
locks.remove(control.getLockable());
}
return;
}
} // synchronized (this)
if (tryGrant && (nextGrant != null)) {
nextGrant.wakeUp(Constants.WAITING_LOCK_GRANT);
}
}
/**
* Unlock an object once if it is present in the specified group. Also
* remove the object from the group.
*
* @param space the compatibility space
* @param ref a reference to the locked object
* @param qualifier qualifier of the lock
* @param group a map representing the locks in a group
* @return the corresponding lock in the group map, or <code>null</code> if
* the object was not unlocked
*/
public synchronized Lock unlockReference(CompatibilitySpace space,
Lockable ref, Object qualifier,
Map group) {
Control control = getControl(ref);
if (control == null) {
return null;
}
Lock setLock = control.getLock(space, qualifier);
if (setLock == null) {
return null;
}
Lock lockInGroup = (Lock) group.remove(setLock);
if (lockInGroup != null) {
unlock(lockInGroup, 1);
}
return lockInGroup;
}
/**
* {@inheritDoc}
*/
public boolean zeroDurationLockObject(
CompatibilitySpace space, Lockable ref, Object qualifier, int timeout)
throws StandardException {
if (SanityManager.DEBUG) {
if (SanityManager.DEBUG_ON(Constants.LOCK_TRACE)) {
D_LockControl.debugLock(
"Zero Duration Lock Request before Grant: ",
space, null, ref, qualifier, timeout);
if (SanityManager.DEBUG_ON(Constants.LOCK_STACK_TRACE)) {
// The following will print the stack trace of the lock
// request to the log.
Throwable t = new Throwable();
java.io.PrintWriter istream =
SanityManager.GET_DEBUG_STREAM();
istream.println("Stack trace of lock request:");
t.printStackTrace(istream);
}
}
}
// Very fast zeroDurationLockObject() for unlocked objects.
// If no entry exists in the lock manager for this reference
// then it must be unlocked.
// If the object is locked then we perform a grantable
// check, skipping over any waiters.
// If the caller wants to wait and the lock cannot
// be granted then we do the slow join the queue and
// release the lock method.
synchronized (this) {
Control control = getControl(ref);
if (control == null) {
return true;
}
// If we are grantable, ignoring waiting locks then
// we can also grant this request now, as skipping
// over the waiters won't block them as we release
// the lock rightway.
if (control.isGrantable(true, space, qualifier)) {
return true;
}
// can't be granted and are not willing to wait.
if (AbstractPool.noLockWait(timeout, space)) {
return false;
}
}
Lock lock = lockObject(space, ref, qualifier, timeout);
if (SanityManager.DEBUG) {
if (SanityManager.DEBUG_ON(Constants.LOCK_TRACE)) {
D_LockControl.debugLock(
"Zero Lock Request Granted: ",
space, null, ref, qualifier, timeout);
}
}
// and simply unlock it once
unlock(lock, 1);
return true;
}
/**
* Set the deadlock timeout.
*
* @param timeout deadlock timeout in milliseconds
*/
public void setDeadlockTimeout(int timeout) {
deadlockTimeout = timeout;
}
/**
* Set the wait timeout.
*
* @param timeout wait timeout in milliseconds
*/
public void setWaitTimeout(int timeout) {
waitTimeout = timeout;
}
/*
** Non public methods
*/
//EXCLUDE-START-lockdiag-
public void setDeadlockTrace(boolean val)
{
// set this without synchronization
deadlockTrace = val;
}
//EXCLUDE-END-lockdiag-
public String toDebugString()
{
if (SanityManager.DEBUG)
{
String str = new String();
int i = 0;
for (Iterator it = locks.values().iterator(); it.hasNext(); )
{
str += "\n lock[" + i + "]: " +
DiagnosticUtil.toDiagString(it.next());
}
return(str);
}
else
{
return(null);
}
}
/**
* Add all waiters in this lock table to a <code>Map</code> object.
* <br>
* MT - must be synchronized on this <code>LockSet</code> object.
*/
public void addWaiters(Map waiters) {
for (Iterator it = locks.values().iterator(); it.hasNext(); ) {
Control control = (Control) it.next();
control.addWaiters(waiters);
}
}
//EXCLUDE-START-lockdiag-
/**
* make a shallow clone of myself and my lock controls
*/
public synchronized Map shallowClone()
{
HashMap clone = new HashMap();
for (Iterator it = locks.keySet().iterator(); it.hasNext(); )
{
Lockable lockable = (Lockable) it.next();
Control control = getControl(lockable);
clone.put(lockable, control.shallowClone());
}
return clone;
}
//EXCLUDE-END-lockdiag-
/*
** Support for anyoneBlocked(). These methods assume that caller
** is synchronized on this LockSet object.
*/
/**
* Increase blockCount by one.
* <BR>
* MT - must be synchronized on this <code>LockSet</code> object.
*/
public void oneMoreWaiter() {
blockCount++;
}
/**
* Decrease blockCount by one.
* <BR>
* MT - must be synchronized on this <code>LockSet</code> object.
*/
public void oneLessWaiter() {
blockCount--;
}
public synchronized boolean anyoneBlocked() {
if (SanityManager.DEBUG) {
SanityManager.ASSERT(
blockCount >= 0, "blockCount should not be negative");
}
return blockCount != 0;
}
/**
* Get the <code>Control</code> for an object in the lock table.
* <br>
* MT - must be synchronized on this <code>LockSet</code> object.
*/
private final Control getControl(Lockable ref) {
return (Control) locks.get(ref);
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search;
import org.apache.lucene.search.BooleanClause.Occur;
import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.Collector;
import org.apache.lucene.search.ConstantScoreQuery;
import org.apache.lucene.search.FieldDoc;
import org.apache.lucene.search.Query;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.Counter;
import org.elasticsearch.action.search.SearchType;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.lease.Releasables;
import org.elasticsearch.common.lucene.search.Queries;
import org.elasticsearch.common.lucene.search.function.FunctionScoreQuery;
import org.elasticsearch.common.lucene.search.function.WeightFactorFunction;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.util.BigArrays;
import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.cache.bitset.BitsetFilterCache;
import org.elasticsearch.index.engine.Engine;
import org.elasticsearch.index.fielddata.IndexFieldDataService;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.ObjectMapper;
import org.elasticsearch.index.mapper.TypeFieldMapper;
import org.elasticsearch.index.query.AbstractQueryBuilder;
import org.elasticsearch.index.query.ParsedQuery;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.index.shard.IndexShard;
import org.elasticsearch.index.similarity.SimilarityService;
import org.elasticsearch.search.aggregations.SearchContextAggregations;
import org.elasticsearch.search.dfs.DfsSearchResult;
import org.elasticsearch.search.fetch.FetchPhase;
import org.elasticsearch.search.fetch.FetchSearchResult;
import org.elasticsearch.search.fetch.StoredFieldsContext;
import org.elasticsearch.search.fetch.subphase.DocValueFieldsContext;
import org.elasticsearch.search.fetch.subphase.FetchSourceContext;
import org.elasticsearch.search.fetch.subphase.ScriptFieldsContext;
import org.elasticsearch.search.fetch.subphase.highlight.SearchContextHighlight;
import org.elasticsearch.search.internal.ContextIndexSearcher;
import org.elasticsearch.search.internal.ScrollContext;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.search.internal.ShardSearchRequest;
import org.elasticsearch.search.profile.Profilers;
import org.elasticsearch.search.query.QueryPhaseExecutionException;
import org.elasticsearch.search.query.QuerySearchResult;
import org.elasticsearch.search.rescore.RescoreSearchContext;
import org.elasticsearch.search.slice.SliceBuilder;
import org.elasticsearch.search.sort.SortAndFormats;
import org.elasticsearch.search.suggest.SuggestionSearchContext;
import java.io.IOException;
import java.io.UncheckedIOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
final class DefaultSearchContext extends SearchContext {
private final long id;
private final ShardSearchRequest request;
private final SearchShardTarget shardTarget;
private final Counter timeEstimateCounter;
private SearchType searchType;
private final Engine.Searcher engineSearcher;
private final BigArrays bigArrays;
private final IndexShard indexShard;
private final IndexService indexService;
private final ContextIndexSearcher searcher;
private final DfsSearchResult dfsResult;
private final QuerySearchResult queryResult;
private final FetchSearchResult fetchResult;
private float queryBoost = 1.0f;
private TimeValue timeout;
// terminate after count
private int terminateAfter = DEFAULT_TERMINATE_AFTER;
private List<String> groupStats;
private ScrollContext scrollContext;
private boolean explain;
private boolean version = false; // by default, we don't return versions
private StoredFieldsContext storedFields;
private ScriptFieldsContext scriptFields;
private FetchSourceContext fetchSourceContext;
private DocValueFieldsContext docValueFieldsContext;
private int from = -1;
private int size = -1;
private SortAndFormats sort;
private Float minimumScore;
private boolean trackScores = false; // when sorting, track scores as well...
private FieldDoc searchAfter;
// filter for sliced scroll
private SliceBuilder sliceBuilder;
/**
* The original query as sent by the user without the types and aliases
* applied. Putting things in here leaks them into highlighting so don't add
* things like the type filter or alias filters.
*/
private ParsedQuery originalQuery;
/**
* The query to actually execute.
*/
private Query query;
private ParsedQuery postFilter;
private Query aliasFilter;
private int[] docIdsToLoad;
private int docsIdsToLoadFrom;
private int docsIdsToLoadSize;
private SearchContextAggregations aggregations;
private SearchContextHighlight highlight;
private SuggestionSearchContext suggest;
private List<RescoreSearchContext> rescore;
private volatile long keepAlive;
private final long originNanoTime = System.nanoTime();
private volatile long lastAccessTime = -1;
private Profilers profilers;
private final Map<String, SearchExtBuilder> searchExtBuilders = new HashMap<>();
private final Map<Class<?>, Collector> queryCollectors = new HashMap<>();
private final QueryShardContext queryShardContext;
private FetchPhase fetchPhase;
DefaultSearchContext(long id, ShardSearchRequest request, SearchShardTarget shardTarget, Engine.Searcher engineSearcher,
IndexService indexService, IndexShard indexShard,
BigArrays bigArrays, Counter timeEstimateCounter, ParseFieldMatcher parseFieldMatcher, TimeValue timeout,
FetchPhase fetchPhase) {
super(parseFieldMatcher);
this.id = id;
this.request = request;
this.fetchPhase = fetchPhase;
this.searchType = request.searchType();
this.shardTarget = shardTarget;
this.engineSearcher = engineSearcher;
// SearchContexts use a BigArrays that can circuit break
this.bigArrays = bigArrays.withCircuitBreaking();
this.dfsResult = new DfsSearchResult(id, shardTarget);
this.queryResult = new QuerySearchResult(id, shardTarget);
this.fetchResult = new FetchSearchResult(id, shardTarget);
this.indexShard = indexShard;
this.indexService = indexService;
this.searcher = new ContextIndexSearcher(engineSearcher, indexService.cache().query(), indexShard.getQueryCachingPolicy());
this.timeEstimateCounter = timeEstimateCounter;
this.timeout = timeout;
queryShardContext = indexService.newQueryShardContext(request.shardId().id(), searcher.getIndexReader(), request::nowInMillis);
queryShardContext.setTypes(request.types());
}
@Override
public void doClose() {
// clear and scope phase we have
Releasables.close(searcher, engineSearcher);
}
/**
* Should be called before executing the main query and after all other parameters have been set.
*/
@Override
public void preProcess(boolean rewrite) {
if (hasOnlySuggest() ) {
return;
}
long from = from() == -1 ? 0 : from();
long size = size() == -1 ? 10 : size();
long resultWindow = from + size;
int maxResultWindow = indexService.getIndexSettings().getMaxResultWindow();
if (resultWindow > maxResultWindow) {
if (scrollContext == null) {
throw new QueryPhaseExecutionException(this,
"Result window is too large, from + size must be less than or equal to: [" + maxResultWindow + "] but was ["
+ resultWindow + "]. See the scroll api for a more efficient way to request large data sets. "
+ "This limit can be set by changing the [" + IndexSettings.MAX_RESULT_WINDOW_SETTING.getKey()
+ "] index level setting.");
}
throw new QueryPhaseExecutionException(this,
"Batch size is too large, size must be less than or equal to: [" + maxResultWindow + "] but was [" + resultWindow
+ "]. Scroll batch sizes cost as much memory as result windows so they are controlled by the ["
+ IndexSettings.MAX_RESULT_WINDOW_SETTING.getKey() + "] index level setting.");
}
if (rescore != null) {
int maxWindow = indexService.getIndexSettings().getMaxRescoreWindow();
for (RescoreSearchContext rescoreContext: rescore) {
if (rescoreContext.window() > maxWindow) {
throw new QueryPhaseExecutionException(this, "Rescore window [" + rescoreContext.window() + "] is too large. It must "
+ "be less than [" + maxWindow + "]. This prevents allocating massive heaps for storing the results to be "
+ "rescored. This limit can be set by changing the [" + IndexSettings.MAX_RESCORE_WINDOW_SETTING.getKey()
+ "] index level setting.");
}
}
}
if (sliceBuilder != null) {
int sliceLimit = indexService.getIndexSettings().getMaxSlicesPerScroll();
int numSlices = sliceBuilder.getMax();
if (numSlices > sliceLimit) {
throw new QueryPhaseExecutionException(this, "The number of slices [" + numSlices + "] is too large. It must "
+ "be less than [" + sliceLimit + "]. This limit can be set by changing the [" +
IndexSettings.MAX_SLICES_PER_SCROLL.getKey() + "] index level setting.");
}
}
// initialize the filtering alias based on the provided filters
try {
final QueryBuilder queryBuilder = request.filteringAliases();
aliasFilter = queryBuilder == null ? null : queryBuilder.toFilter(queryShardContext);
} catch (IOException e) {
throw new UncheckedIOException(e);
}
if (query() == null) {
parsedQuery(ParsedQuery.parsedMatchAllQuery());
}
if (queryBoost() != AbstractQueryBuilder.DEFAULT_BOOST) {
parsedQuery(new ParsedQuery(new FunctionScoreQuery(query(), new WeightFactorFunction(queryBoost)), parsedQuery()));
}
this.query = buildFilteredQuery();
if (rewrite) {
try {
this.query = searcher.rewrite(query);
} catch (IOException e) {
throw new QueryPhaseExecutionException(this, "Failed to rewrite main query", e);
}
}
}
private Query buildFilteredQuery() {
final Query searchFilter = searchFilter(queryShardContext.getTypes());
if (searchFilter == null) {
return originalQuery.query();
}
final Query result;
if (Queries.isConstantMatchAllQuery(query())) {
result = new ConstantScoreQuery(searchFilter);
} else {
result = new BooleanQuery.Builder()
.add(query, Occur.MUST)
.add(searchFilter, Occur.FILTER)
.build();
}
return result;
}
@Override
@Nullable
public Query searchFilter(String[] types) {
Query typesFilter = createSearchFilter(types, aliasFilter, mapperService().hasNested());
if (sliceBuilder == null) {
return typesFilter;
}
Query sliceFilter = sliceBuilder.toFilter(queryShardContext, shardTarget().getShardId().getId(),
queryShardContext.getIndexSettings().getNumberOfShards());
if (typesFilter == null) {
return sliceFilter;
}
return new BooleanQuery.Builder()
.add(typesFilter, Occur.FILTER)
.add(sliceFilter, Occur.FILTER)
.build();
}
// extracted to static helper method to make writing unit tests easier:
static Query createSearchFilter(String[] types, Query aliasFilter, boolean hasNestedFields) {
Query typesFilter = null;
if (types != null && types.length >= 1) {
BytesRef[] typesBytes = new BytesRef[types.length];
for (int i = 0; i < typesBytes.length; i++) {
typesBytes[i] = new BytesRef(types[i]);
}
typesFilter = new TypeFieldMapper.TypesQuery(typesBytes);
}
if (typesFilter == null && aliasFilter == null && hasNestedFields == false) {
return null;
}
BooleanQuery.Builder bq = new BooleanQuery.Builder();
if (typesFilter != null) {
bq.add(typesFilter, Occur.FILTER);
} else if (hasNestedFields) {
bq.add(Queries.newNonNestedFilter(), Occur.FILTER);
}
if (aliasFilter != null) {
bq.add(aliasFilter, Occur.FILTER);
}
return bq.build();
}
@Override
public long id() {
return this.id;
}
@Override
public String source() {
return engineSearcher.source();
}
@Override
public ShardSearchRequest request() {
return this.request;
}
@Override
public SearchType searchType() {
return this.searchType;
}
@Override
public SearchShardTarget shardTarget() {
return this.shardTarget;
}
@Override
public int numberOfShards() {
return request.numberOfShards();
}
@Override
public float queryBoost() {
return queryBoost;
}
@Override
public SearchContext queryBoost(float queryBoost) {
this.queryBoost = queryBoost;
return this;
}
@Override
public long getOriginNanoTime() {
return originNanoTime;
}
@Override
public ScrollContext scrollContext() {
return this.scrollContext;
}
@Override
public SearchContext scrollContext(ScrollContext scrollContext) {
this.scrollContext = scrollContext;
return this;
}
@Override
public SearchContextAggregations aggregations() {
return aggregations;
}
@Override
public SearchContext aggregations(SearchContextAggregations aggregations) {
this.aggregations = aggregations;
return this;
}
@Override
public void addSearchExt(SearchExtBuilder searchExtBuilder) {
//it's ok to use the writeable name here given that we enforce it to be the same as the name of the element that gets
//parsed by the corresponding parser. There is one single name and one single way to retrieve the parsed object from the context.
searchExtBuilders.put(searchExtBuilder.getWriteableName(), searchExtBuilder);
}
@Override
public SearchExtBuilder getSearchExt(String name) {
return searchExtBuilders.get(name);
}
@Override
public SearchContextHighlight highlight() {
return highlight;
}
@Override
public void highlight(SearchContextHighlight highlight) {
this.highlight = highlight;
}
@Override
public SuggestionSearchContext suggest() {
return suggest;
}
@Override
public void suggest(SuggestionSearchContext suggest) {
this.suggest = suggest;
}
@Override
public List<RescoreSearchContext> rescore() {
if (rescore == null) {
return Collections.emptyList();
}
return rescore;
}
@Override
public void addRescore(RescoreSearchContext rescore) {
if (this.rescore == null) {
this.rescore = new ArrayList<>();
}
this.rescore.add(rescore);
}
@Override
public boolean hasScriptFields() {
return scriptFields != null;
}
@Override
public ScriptFieldsContext scriptFields() {
if (scriptFields == null) {
scriptFields = new ScriptFieldsContext();
}
return this.scriptFields;
}
/**
* A shortcut function to see whether there is a fetchSourceContext and it says the source is requested.
*/
@Override
public boolean sourceRequested() {
return fetchSourceContext != null && fetchSourceContext.fetchSource();
}
@Override
public boolean hasFetchSourceContext() {
return fetchSourceContext != null;
}
@Override
public FetchSourceContext fetchSourceContext() {
return this.fetchSourceContext;
}
@Override
public SearchContext fetchSourceContext(FetchSourceContext fetchSourceContext) {
this.fetchSourceContext = fetchSourceContext;
return this;
}
@Override
public DocValueFieldsContext docValueFieldsContext() {
return docValueFieldsContext;
}
@Override
public SearchContext docValueFieldsContext(DocValueFieldsContext docValueFieldsContext) {
this.docValueFieldsContext = docValueFieldsContext;
return this;
}
@Override
public ContextIndexSearcher searcher() {
return this.searcher;
}
@Override
public IndexShard indexShard() {
return this.indexShard;
}
@Override
public MapperService mapperService() {
return indexService.mapperService();
}
@Override
public SimilarityService similarityService() {
return indexService.similarityService();
}
@Override
public BigArrays bigArrays() {
return bigArrays;
}
@Override
public BitsetFilterCache bitsetFilterCache() {
return indexService.cache().bitsetFilterCache();
}
@Override
public IndexFieldDataService fieldData() {
return indexService.fieldData();
}
@Override
public TimeValue timeout() {
return timeout;
}
@Override
public void timeout(TimeValue timeout) {
this.timeout = timeout;
}
@Override
public int terminateAfter() {
return terminateAfter;
}
@Override
public void terminateAfter(int terminateAfter) {
this.terminateAfter = terminateAfter;
}
@Override
public SearchContext minimumScore(float minimumScore) {
this.minimumScore = minimumScore;
return this;
}
@Override
public Float minimumScore() {
return this.minimumScore;
}
@Override
public SearchContext sort(SortAndFormats sort) {
this.sort = sort;
return this;
}
@Override
public SortAndFormats sort() {
return this.sort;
}
@Override
public SearchContext trackScores(boolean trackScores) {
this.trackScores = trackScores;
return this;
}
@Override
public boolean trackScores() {
return this.trackScores;
}
@Override
public SearchContext searchAfter(FieldDoc searchAfter) {
this.searchAfter = searchAfter;
return this;
}
@Override
public FieldDoc searchAfter() {
return searchAfter;
}
public SearchContext sliceBuilder(SliceBuilder sliceBuilder) {
this.sliceBuilder = sliceBuilder;
return this;
}
@Override
public SearchContext parsedPostFilter(ParsedQuery postFilter) {
this.postFilter = postFilter;
return this;
}
@Override
public ParsedQuery parsedPostFilter() {
return this.postFilter;
}
@Override
public Query aliasFilter() {
return aliasFilter;
}
@Override
public SearchContext parsedQuery(ParsedQuery query) {
this.originalQuery = query;
this.query = query.query();
return this;
}
@Override
public ParsedQuery parsedQuery() {
return this.originalQuery;
}
/**
* The query to execute, in its rewritten form.
*/
@Override
public Query query() {
return this.query;
}
@Override
public int from() {
return from;
}
@Override
public SearchContext from(int from) {
this.from = from;
return this;
}
@Override
public int size() {
return size;
}
@Override
public SearchContext size(int size) {
this.size = size;
return this;
}
@Override
public boolean hasStoredFields() {
return storedFields != null && storedFields.fieldNames() != null;
}
@Override
public boolean hasStoredFieldsContext() {
return storedFields != null;
}
@Override
public StoredFieldsContext storedFieldsContext() {
return storedFields;
}
@Override
public SearchContext storedFieldsContext(StoredFieldsContext storedFieldsContext) {
this.storedFields = storedFieldsContext;
return this;
}
@Override
public boolean storedFieldsRequested() {
return storedFields == null || storedFields.fetchFields();
}
@Override
public boolean explain() {
return explain;
}
@Override
public void explain(boolean explain) {
this.explain = explain;
}
@Override
@Nullable
public List<String> groupStats() {
return this.groupStats;
}
@Override
public void groupStats(List<String> groupStats) {
this.groupStats = groupStats;
}
@Override
public boolean version() {
return version;
}
@Override
public void version(boolean version) {
this.version = version;
}
@Override
public int[] docIdsToLoad() {
return docIdsToLoad;
}
@Override
public int docIdsToLoadFrom() {
return docsIdsToLoadFrom;
}
@Override
public int docIdsToLoadSize() {
return docsIdsToLoadSize;
}
@Override
public SearchContext docIdsToLoad(int[] docIdsToLoad, int docsIdsToLoadFrom, int docsIdsToLoadSize) {
this.docIdsToLoad = docIdsToLoad;
this.docsIdsToLoadFrom = docsIdsToLoadFrom;
this.docsIdsToLoadSize = docsIdsToLoadSize;
return this;
}
@Override
public void accessed(long accessTime) {
this.lastAccessTime = accessTime;
}
@Override
public long lastAccessTime() {
return this.lastAccessTime;
}
@Override
public long keepAlive() {
return this.keepAlive;
}
@Override
public void keepAlive(long keepAlive) {
this.keepAlive = keepAlive;
}
@Override
public DfsSearchResult dfsResult() {
return dfsResult;
}
@Override
public QuerySearchResult queryResult() {
return queryResult;
}
@Override
public FetchPhase fetchPhase() {
return fetchPhase;
}
@Override
public FetchSearchResult fetchResult() {
return fetchResult;
}
@Override
public MappedFieldType smartNameFieldType(String name) {
return mapperService().fullName(name);
}
@Override
public ObjectMapper getObjectMapper(String name) {
return mapperService().getObjectMapper(name);
}
@Override
public Counter timeEstimateCounter() {
return timeEstimateCounter;
}
@Override
public Map<Class<?>, Collector> queryCollectors() {
return queryCollectors;
}
@Override
public QueryShardContext getQueryShardContext() {
return queryShardContext;
}
@Override
public Profilers getProfilers() {
return profilers;
}
public void setProfilers(Profilers profilers) {
this.profilers = profilers;
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.bwcompat;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.util.TestUtil;
import org.elasticsearch.Version;
import org.elasticsearch.action.admin.indices.get.GetIndexResponse;
import org.elasticsearch.action.admin.indices.segments.IndexSegments;
import org.elasticsearch.action.admin.indices.segments.IndexShardSegments;
import org.elasticsearch.action.admin.indices.segments.IndicesSegmentResponse;
import org.elasticsearch.action.admin.indices.segments.ShardSegments;
import org.elasticsearch.action.admin.indices.upgrade.UpgradeIT;
import org.elasticsearch.action.get.GetResponse;
import org.elasticsearch.action.search.SearchRequestBuilder;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.cluster.metadata.MetaData;
import org.elasticsearch.cluster.routing.allocation.decider.ThrottlingAllocationDecider;
import org.elasticsearch.common.io.FileSystemUtils;
import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.env.Environment;
import org.elasticsearch.env.NodeEnvironment;
import org.elasticsearch.gateway.MetaDataStateFormat;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.MergePolicyConfig;
import org.elasticsearch.index.engine.Segment;
import org.elasticsearch.index.mapper.string.StringFieldMapperPositionIncrementGapTests;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.node.Node;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.aggregations.AggregationBuilders;
import org.elasticsearch.search.aggregations.bucket.histogram.Histogram;
import org.elasticsearch.search.aggregations.bucket.terms.Terms;
import org.elasticsearch.search.sort.SortOrder;
import org.elasticsearch.test.ESIntegTestCase;
import org.elasticsearch.test.InternalSettingsPlugin;
import org.elasticsearch.test.InternalTestCluster;
import org.elasticsearch.test.VersionUtils;
import org.elasticsearch.test.hamcrest.ElasticsearchAssertions;
import org.hamcrest.Matchers;
import org.junit.AfterClass;
import org.junit.Before;
import java.io.IOException;
import java.io.InputStream;
import java.nio.file.DirectoryStream;
import java.nio.file.FileVisitResult;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.SimpleFileVisitor;
import java.nio.file.attribute.BasicFileAttributes;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.SortedSet;
import java.util.TreeSet;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures;
import static org.hamcrest.Matchers.greaterThanOrEqualTo;
// needs at least 2 nodes since it bumps replicas to 1
@ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.TEST, numDataNodes = 0)
@LuceneTestCase.SuppressFileSystems("ExtrasFS")
public class OldIndexBackwardsCompatibilityIT extends ESIntegTestCase {
// TODO: test for proper exception on unsupported indexes (maybe via separate test?)
// We have a 0.20.6.zip etc for this.
@Override
protected Collection<Class<? extends Plugin>> nodePlugins() {
return pluginList(InternalSettingsPlugin.class);
}
List<String> indexes;
List<String> unsupportedIndexes;
static Path singleDataPath;
static Path[] multiDataPath;
@Before
public void initIndexesList() throws Exception {
indexes = loadIndexesList("index");
unsupportedIndexes = loadIndexesList("unsupported");
}
private List<String> loadIndexesList(String prefix) throws IOException {
List<String> indexes = new ArrayList<>();
try (DirectoryStream<Path> stream = Files.newDirectoryStream(getBwcIndicesPath(), prefix + "-*.zip")) {
for (Path path : stream) {
indexes.add(path.getFileName().toString());
}
}
Collections.sort(indexes);
return indexes;
}
@AfterClass
public static void tearDownStatics() {
singleDataPath = null;
multiDataPath = null;
}
@Override
public Settings nodeSettings(int ord) {
return Settings.builder()
.put(MergePolicyConfig.INDEX_MERGE_ENABLED, false) // disable merging so no segments will be upgraded
.put(ThrottlingAllocationDecider.CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_INCOMING_RECOVERIES_SETTING.getKey(), 30) // speed up recoveries
.put(ThrottlingAllocationDecider.CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_OUTGOING_RECOVERIES_SETTING.getKey(), 30)
.build();
}
void setupCluster() throws Exception {
InternalTestCluster.Async<List<String>> replicas = internalCluster().startNodesAsync(1); // for replicas
Path baseTempDir = createTempDir();
// start single data path node
Settings.Builder nodeSettings = Settings.builder()
.put(Environment.PATH_DATA_SETTING.getKey(), baseTempDir.resolve("single-path").toAbsolutePath())
.put(Node.NODE_MASTER_SETTING.getKey(), false); // workaround for dangling index loading issue when node is master
InternalTestCluster.Async<String> singleDataPathNode = internalCluster().startNodeAsync(nodeSettings.build());
// start multi data path node
nodeSettings = Settings.builder()
.put(Environment.PATH_DATA_SETTING.getKey(), baseTempDir.resolve("multi-path1").toAbsolutePath() + "," + baseTempDir.resolve("multi-path2").toAbsolutePath())
.put(Node.NODE_MASTER_SETTING.getKey(), false); // workaround for dangling index loading issue when node is master
InternalTestCluster.Async<String> multiDataPathNode = internalCluster().startNodeAsync(nodeSettings.build());
// find single data path dir
Path[] nodePaths = internalCluster().getInstance(NodeEnvironment.class, singleDataPathNode.get()).nodeDataPaths();
assertEquals(1, nodePaths.length);
singleDataPath = nodePaths[0].resolve(NodeEnvironment.INDICES_FOLDER);
assertFalse(Files.exists(singleDataPath));
Files.createDirectories(singleDataPath);
logger.info("--> Single data path: " + singleDataPath.toString());
// find multi data path dirs
nodePaths = internalCluster().getInstance(NodeEnvironment.class, multiDataPathNode.get()).nodeDataPaths();
assertEquals(2, nodePaths.length);
multiDataPath = new Path[] {nodePaths[0].resolve(NodeEnvironment.INDICES_FOLDER),
nodePaths[1].resolve(NodeEnvironment.INDICES_FOLDER)};
assertFalse(Files.exists(multiDataPath[0]));
assertFalse(Files.exists(multiDataPath[1]));
Files.createDirectories(multiDataPath[0]);
Files.createDirectories(multiDataPath[1]);
logger.info("--> Multi data paths: " + multiDataPath[0].toString() + ", " + multiDataPath[1].toString());
replicas.get(); // wait for replicas
}
String loadIndex(String indexFile) throws Exception {
Path unzipDir = createTempDir();
Path unzipDataDir = unzipDir.resolve("data");
String indexName = indexFile.replace(".zip", "").toLowerCase(Locale.ROOT).replace("unsupported-", "index-");
// decompress the index
Path backwardsIndex = getBwcIndicesPath().resolve(indexFile);
try (InputStream stream = Files.newInputStream(backwardsIndex)) {
TestUtil.unzip(stream, unzipDir);
}
// check it is unique
assertTrue(Files.exists(unzipDataDir));
Path[] list = FileSystemUtils.files(unzipDataDir);
if (list.length != 1) {
throw new IllegalStateException("Backwards index must contain exactly one cluster");
}
// the bwc scripts packs the indices under this path
Path src = list[0].resolve("nodes/0/indices/" + indexName);
assertTrue("[" + indexFile + "] missing index dir: " + src.toString(), Files.exists(src));
if (randomBoolean()) {
logger.info("--> injecting index [{}] into single data path", indexName);
copyIndex(logger, src, indexName, singleDataPath);
} else {
logger.info("--> injecting index [{}] into multi data path", indexName);
copyIndex(logger, src, indexName, multiDataPath);
}
return indexName;
}
void importIndex(String indexName) throws IOException {
// force reloading dangling indices with a cluster state republish
client().admin().cluster().prepareReroute().get();
ensureGreen(indexName);
}
// randomly distribute the files from src over dests paths
public static void copyIndex(final ESLogger logger, final Path src, final String indexName, final Path... dests) throws IOException {
Path destinationDataPath = dests[randomInt(dests.length - 1)];
for (Path dest : dests) {
Path indexDir = dest.resolve(indexName);
assertFalse(Files.exists(indexDir));
Files.createDirectories(indexDir);
}
Files.walkFileTree(src, new SimpleFileVisitor<Path>() {
@Override
public FileVisitResult preVisitDirectory(Path dir, BasicFileAttributes attrs) throws IOException {
Path relativeDir = src.relativize(dir);
for (Path dest : dests) {
Path destDir = dest.resolve(indexName).resolve(relativeDir);
Files.createDirectories(destDir);
}
return FileVisitResult.CONTINUE;
}
@Override
public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException {
if (file.getFileName().toString().equals(IndexWriter.WRITE_LOCK_NAME)) {
// skip lock file, we don't need it
logger.trace("Skipping lock file: " + file.toString());
return FileVisitResult.CONTINUE;
}
Path relativeFile = src.relativize(file);
Path destFile = destinationDataPath.resolve(indexName).resolve(relativeFile);
logger.trace("--> Moving " + relativeFile.toString() + " to " + destFile.toString());
Files.move(file, destFile);
assertFalse(Files.exists(file));
assertTrue(Files.exists(destFile));
return FileVisitResult.CONTINUE;
}
});
}
void unloadIndex(String indexName) throws Exception {
assertAcked(client().admin().indices().prepareDelete(indexName).get());
}
public void testAllVersionsTested() throws Exception {
SortedSet<String> expectedVersions = new TreeSet<>();
for (Version v : VersionUtils.allVersions()) {
if (v.snapshot()) continue; // snapshots are unreleased, so there is no backcompat yet
if (v.onOrBefore(Version.V_2_0_0_beta1)) continue; // we can only test back one major lucene version
if (v.equals(Version.CURRENT)) continue; // the current version is always compatible with itself
expectedVersions.add("index-" + v.toString() + ".zip");
}
for (String index : indexes) {
if (expectedVersions.remove(index) == false) {
logger.warn("Old indexes tests contain extra index: " + index);
}
}
if (expectedVersions.isEmpty() == false) {
StringBuilder msg = new StringBuilder("Old index tests are missing indexes:");
for (String expected : expectedVersions) {
msg.append("\n" + expected);
}
fail(msg.toString());
}
}
public void testOldIndexes() throws Exception {
setupCluster();
Collections.shuffle(indexes, random());
for (String index : indexes) {
long startTime = System.currentTimeMillis();
logger.info("--> Testing old index " + index);
assertOldIndexWorks(index);
logger.info("--> Done testing " + index + ", took " + ((System.currentTimeMillis() - startTime) / 1000.0) + " seconds");
}
}
void assertOldIndexWorks(String index) throws Exception {
Version version = extractVersion(index);
String indexName = loadIndex(index);
importIndex(indexName);
assertIndexSanity(indexName, version);
assertBasicSearchWorks(indexName);
assertBasicAggregationWorks(indexName);
assertRealtimeGetWorks(indexName);
assertNewReplicasWork(indexName);
assertUpgradeWorks(indexName, isLatestLuceneVersion(version));
assertDeleteByQueryWorked(indexName, version);
assertPositionIncrementGapDefaults(indexName, version);
unloadIndex(indexName);
}
Version extractVersion(String index) {
return Version.fromString(index.substring(index.indexOf('-') + 1, index.lastIndexOf('.')));
}
boolean isLatestLuceneVersion(Version version) {
return version.luceneVersion.major == Version.CURRENT.luceneVersion.major &&
version.luceneVersion.minor == Version.CURRENT.luceneVersion.minor;
}
void assertIndexSanity(String indexName, Version indexCreated) {
GetIndexResponse getIndexResponse = client().admin().indices().prepareGetIndex().addIndices(indexName).get();
assertEquals(1, getIndexResponse.indices().length);
assertEquals(indexName, getIndexResponse.indices()[0]);
Version actualVersionCreated = Version.indexCreated(getIndexResponse.getSettings().get(indexName));
assertEquals(indexCreated, actualVersionCreated);
ensureYellow(indexName);
IndicesSegmentResponse segmentsResponse = client().admin().indices().prepareSegments(indexName).get();
IndexSegments segments = segmentsResponse.getIndices().get(indexName);
for (IndexShardSegments indexShardSegments : segments) {
for (ShardSegments shardSegments : indexShardSegments) {
for (Segment segment : shardSegments) {
assertEquals(indexCreated.luceneVersion, segment.version);
}
}
}
SearchResponse test = client().prepareSearch(indexName).get();
assertThat(test.getHits().getTotalHits(), greaterThanOrEqualTo(1l));
}
void assertBasicSearchWorks(String indexName) {
logger.info("--> testing basic search");
SearchRequestBuilder searchReq = client().prepareSearch(indexName).setQuery(QueryBuilders.matchAllQuery());
SearchResponse searchRsp = searchReq.get();
ElasticsearchAssertions.assertNoFailures(searchRsp);
long numDocs = searchRsp.getHits().getTotalHits();
logger.info("Found " + numDocs + " in old index");
logger.info("--> testing basic search with sort");
searchReq.addSort("long_sort", SortOrder.ASC);
ElasticsearchAssertions.assertNoFailures(searchReq.get());
logger.info("--> testing exists filter");
searchReq = client().prepareSearch(indexName).setQuery(QueryBuilders.existsQuery("string"));
searchRsp = searchReq.get();
ElasticsearchAssertions.assertNoFailures(searchRsp);
assertEquals(numDocs, searchRsp.getHits().getTotalHits());
}
void assertBasicAggregationWorks(String indexName) {
// histogram on a long
SearchResponse searchRsp = client().prepareSearch(indexName).addAggregation(AggregationBuilders.histogram("histo").field("long_sort").interval(10)).get();
ElasticsearchAssertions.assertSearchResponse(searchRsp);
Histogram histo = searchRsp.getAggregations().get("histo");
assertNotNull(histo);
long totalCount = 0;
for (Histogram.Bucket bucket : histo.getBuckets()) {
totalCount += bucket.getDocCount();
}
assertEquals(totalCount, searchRsp.getHits().getTotalHits());
// terms on a boolean
searchRsp = client().prepareSearch(indexName).addAggregation(AggregationBuilders.terms("bool_terms").field("bool")).get();
Terms terms = searchRsp.getAggregations().get("bool_terms");
totalCount = 0;
for (Terms.Bucket bucket : terms.getBuckets()) {
totalCount += bucket.getDocCount();
}
assertEquals(totalCount, searchRsp.getHits().getTotalHits());
}
void assertRealtimeGetWorks(String indexName) {
assertAcked(client().admin().indices().prepareUpdateSettings(indexName).setSettings(Settings.builder()
.put("refresh_interval", -1)
.build()));
SearchRequestBuilder searchReq = client().prepareSearch(indexName).setQuery(QueryBuilders.matchAllQuery());
SearchHit hit = searchReq.get().getHits().getAt(0);
String docId = hit.getId();
// foo is new, it is not a field in the generated index
client().prepareUpdate(indexName, "doc", docId).setDoc("foo", "bar").get();
GetResponse getRsp = client().prepareGet(indexName, "doc", docId).get();
Map<String, Object> source = getRsp.getSourceAsMap();
assertThat(source, Matchers.hasKey("foo"));
assertAcked(client().admin().indices().prepareUpdateSettings(indexName).setSettings(Settings.builder()
.put("refresh_interval", IndexSettings.DEFAULT_REFRESH_INTERVAL)
.build()));
}
void assertNewReplicasWork(String indexName) throws Exception {
final int numReplicas = 1;
final long startTime = System.currentTimeMillis();
logger.debug("--> creating [{}] replicas for index [{}]", numReplicas, indexName);
assertAcked(client().admin().indices().prepareUpdateSettings(indexName).setSettings(Settings.builder()
.put("number_of_replicas", numReplicas)
).execute().actionGet());
ensureGreen(TimeValue.timeValueMinutes(2), indexName);
logger.debug("--> index [{}] is green, took [{}]", indexName, TimeValue.timeValueMillis(System.currentTimeMillis() - startTime));
logger.debug("--> recovery status:\n{}", XContentHelper.toString(client().admin().indices().prepareRecoveries(indexName).get()));
// TODO: do something with the replicas! query? index?
}
// #10067: create-bwc-index.py deleted any doc with long_sort:[10-20]
void assertDeleteByQueryWorked(String indexName, Version version) throws Exception {
if (version.onOrBefore(Version.V_1_0_0_Beta2) || version.onOrAfter(Version.V_2_0_0_beta1)) {
// TODO: remove this once #10262 is fixed
return;
}
// these documents are supposed to be deleted by a delete by query operation in the translog
SearchRequestBuilder searchReq = client().prepareSearch(indexName).setQuery(QueryBuilders.queryStringQuery("long_sort:[10 TO 20]"));
assertEquals(0, searchReq.get().getHits().getTotalHits());
}
void assertPositionIncrementGapDefaults(String indexName, Version version) throws Exception {
if (version.before(Version.V_2_0_0_beta1)) {
StringFieldMapperPositionIncrementGapTests.assertGapIsZero(client(), indexName, "doc");
} else {
StringFieldMapperPositionIncrementGapTests.assertGapIsOneHundred(client(), indexName, "doc");
}
}
void assertUpgradeWorks(String indexName, boolean alreadyLatest) throws Exception {
if (alreadyLatest == false) {
UpgradeIT.assertNotUpgraded(client(), indexName);
}
assertNoFailures(client().admin().indices().prepareUpgrade(indexName).get());
UpgradeIT.assertUpgraded(client(), indexName);
}
private Path getNodeDir(String indexFile) throws IOException {
Path unzipDir = createTempDir();
Path unzipDataDir = unzipDir.resolve("data");
// decompress the index
Path backwardsIndex = getBwcIndicesPath().resolve(indexFile);
try (InputStream stream = Files.newInputStream(backwardsIndex)) {
TestUtil.unzip(stream, unzipDir);
}
// check it is unique
assertTrue(Files.exists(unzipDataDir));
Path[] list = FileSystemUtils.files(unzipDataDir);
if (list.length != 1) {
throw new IllegalStateException("Backwards index must contain exactly one cluster");
}
// the bwc scripts packs the indices under this path
return list[0].resolve("nodes/0/");
}
public void testOldClusterStates() throws Exception {
// dangling indices do not load the global state, only the per-index states
// so we make sure we can read them separately
MetaDataStateFormat<MetaData> globalFormat = new MetaDataStateFormat<MetaData>(XContentType.JSON, "global-") {
@Override
public void toXContent(XContentBuilder builder, MetaData state) throws IOException {
throw new UnsupportedOperationException();
}
@Override
public MetaData fromXContent(XContentParser parser) throws IOException {
return MetaData.Builder.fromXContent(parser);
}
};
MetaDataStateFormat<IndexMetaData> indexFormat = new MetaDataStateFormat<IndexMetaData>(XContentType.JSON, "state-") {
@Override
public void toXContent(XContentBuilder builder, IndexMetaData state) throws IOException {
throw new UnsupportedOperationException();
}
@Override
public IndexMetaData fromXContent(XContentParser parser) throws IOException {
return IndexMetaData.Builder.fromXContent(parser);
}
};
Collections.shuffle(indexes, random());
for (String indexFile : indexes) {
String indexName = indexFile.replace(".zip", "").toLowerCase(Locale.ROOT).replace("unsupported-", "index-");
Path nodeDir = getNodeDir(indexFile);
logger.info("Parsing cluster state files from index [" + indexName + "]");
assertNotNull(globalFormat.loadLatestState(logger, nodeDir)); // no exception
Path indexDir = nodeDir.resolve("indices").resolve(indexName);
assertNotNull(indexFormat.loadLatestState(logger, indexDir)); // no exception
}
}
}
| |
package com.hannesdorfmann.mosby3.sample.mail.details;
import android.animation.ObjectAnimator;
import android.animation.PropertyValuesHolder;
import android.annotation.TargetApi;
import android.os.Build;
import android.os.Bundle;
import android.support.annotation.Nullable;
import android.support.v4.app.ActivityOptionsCompat;
import android.transition.TransitionInflater;
import android.transition.TransitionSet;
import android.view.View;
import android.view.ViewTreeObserver;
import android.view.Window;
import android.view.animation.OvershootInterpolator;
import android.widget.ImageView;
import android.widget.TextView;
import android.widget.Toast;
import com.hannesdorfmann.fragmentargs.annotation.Arg;
import com.hannesdorfmann.mosby3.sample.mail.IntentStarter;
import com.hannesdorfmann.mosby3.sample.mail.MailApplication;
import com.hannesdorfmann.mosby3.sample.mail.R;
import com.hannesdorfmann.mosby3.sample.mail.base.view.AuthFragment;
import com.hannesdorfmann.mosby3.sample.mail.base.view.viewstate.AuthParcelableDataViewState;
import com.hannesdorfmann.mosby3.sample.mail.base.view.viewstate.AuthViewState;
import com.hannesdorfmann.mosby3.sample.mail.label.LabelLayout;
import com.hannesdorfmann.mosby3.sample.mail.model.mail.Mail;
import com.hannesdorfmann.mosby3.sample.mail.ui.transition.ExcludedExplodeTransition;
import com.hannesdorfmann.mosby3.sample.mail.ui.transition.ExplodeFadeEnterTransition;
import com.hannesdorfmann.mosby3.sample.mail.ui.transition.TextSizeEnterSharedElementCallback;
import com.hannesdorfmann.mosby3.sample.mail.ui.transition.TextSizeTransition;
import com.hannesdorfmann.mosby3.sample.mail.ui.view.StarView;
import com.melnykov.fab.FloatingActionButton;
import com.melnykov.fab.ObservableScrollView;
import java.text.Format;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.Locale;
import javax.inject.Inject;
import butterknife.BindView;
import butterknife.OnClick;
/**
* @author Hannes Dorfmann
*/
public class DetailsFragment extends AuthFragment<TextView, Mail, DetailsView, DetailsPresenter>
implements DetailsView, View.OnClickListener {
@Arg int mailId;
@Arg String subject;
@Arg int senderProfilePic;
@Arg String senderName;
@Arg String senderEmail;
@Arg long date;
@Arg boolean starred;
@Inject IntentStarter intentStarter;
@BindView(R.id.senderPic) ImageView senderImageView;
@BindView(R.id.subject) TextView subjectView;
@BindView(R.id.date) TextView dateView;
@BindView(R.id.starButton) StarView starView;
@BindView(R.id.replay) FloatingActionButton replayView;
@BindView(R.id.senderName) TextView senderNameView;
@BindView(R.id.senderMail) TextView senderMailView;
@BindView(R.id.separatorLine) View separatorLine;
@BindView(R.id.label) LabelLayout labelView;
@BindView(R.id.scrollView) ObservableScrollView scrollView;
private DetailsComponent detailsComponent;
Format format = new SimpleDateFormat("d. MMM", Locale.getDefault());
// The loaded data
private Mail mail;
@Override public AuthViewState<Mail, DetailsView> createViewState() {
return new AuthParcelableDataViewState<>();
}
@Override protected int getLayoutRes() {
return R.layout.fragment_mail_details;
}
@TargetApi(21) @Override
public void onViewCreated(final View view, @Nullable Bundle savedInstanceState) {
super.onViewCreated(view, savedInstanceState);
replayView.attachToScrollView(scrollView);
starView.setOnClickListener(this);
subjectView.setText(subject);
senderImageView.setImageResource(senderProfilePic);
senderNameView.setText(senderName);
senderMailView.setText(senderEmail);
starView.setStarred(starred);
dateView.setText(format.format(new Date(date)));
senderImageView.setOnClickListener(new View.OnClickListener() {
@Override public void onClick(View v) {
if (mail != null) {
intentStarter.showProfile(getActivity(), mail.getSender());
}
}
});
// Shared element animation
if (Build.VERSION.SDK_INT >= 21 && !isTablet()) {
initTransitions();
view.getViewTreeObserver().addOnPreDrawListener(new ViewTreeObserver.OnPreDrawListener() {
@Override public boolean onPreDraw() {
view.getViewTreeObserver().removeOnPreDrawListener(this);
getActivity().startPostponedEnterTransition();
return true;
}
});
}
}
@TargetApi(21) private void initTransitions() {
Window window = getActivity().getWindow();
window.setEnterTransition(
new ExplodeFadeEnterTransition(senderNameView, senderMailView, separatorLine));
window.setExitTransition(new ExcludedExplodeTransition());
window.setReenterTransition(new ExcludedExplodeTransition());
window.setReturnTransition(new ExcludedExplodeTransition());
TransitionSet textSizeSet = new TransitionSet();
textSizeSet.addTransition(
TransitionInflater.from(getActivity()).inflateTransition(android.R.transition.move));
TextSizeTransition textSizeTransition = new TextSizeTransition();
textSizeTransition.addTarget(R.id.subject);
textSizeTransition.addTarget(getString(R.string.shared_mail_subject));
textSizeSet.addTransition(textSizeTransition);
textSizeSet.setOrdering(TransitionSet.ORDERING_TOGETHER);
window.setSharedElementEnterTransition(textSizeSet);
getActivity().setEnterSharedElementCallback(
new TextSizeEnterSharedElementCallback(getActivity()));
}
private boolean isTablet() {
return getResources().getBoolean(R.bool.tablet);
}
@Override public Mail getData() {
return mail;
}
@Override public DetailsPresenter createPresenter() {
return detailsComponent.presenter();
}
@Override public void setData(Mail data) {
this.mail = data;
senderImageView.setImageResource(data.getSender().getImageRes());
senderNameView.setText(data.getSender().getName());
senderMailView.setText(data.getSender().getEmail());
subjectView.setText(data.getSubject());
contentView.setText(data.getText() + data.getText() + data.getText() + data.getText());
starView.setStarred(data.isStarred());
dateView.setText(format.format(data.getDate()));
labelView.setMail(data);
labelView.setVisibility(View.VISIBLE);
replayView.setVisibility(View.VISIBLE);
// Animate only if not restoring
if (!isRestoringViewState()) {
labelView.setAlpha(0f);
labelView.animate().alpha(1f).setDuration(150).start();
PropertyValuesHolder holderX = PropertyValuesHolder.ofFloat("scaleX", 0, 1);
PropertyValuesHolder holderY = PropertyValuesHolder.ofFloat("scaleY", 0, 1);
ObjectAnimator animator = ObjectAnimator.ofPropertyValuesHolder(replayView, holderX, holderY);
animator.setInterpolator(new OvershootInterpolator());
animator.start();
}
}
@Override public void loadData(boolean pullToRefresh) {
presenter.loadMail(mailId);
}
public int getMailId() {
return mailId;
}
@Override public void onClick(View v) {
if (mail != null) {
presenter.starMail(mail, !mail.isStarred());
} else {
Toast.makeText(getActivity(), R.string.error_wait_mail_loaded, Toast.LENGTH_SHORT).show();
}
}
@Override public void markMailAsStared(int mailId) {
if (mail.getId() == mailId) {
mail.setStarred(true);
starView.setStarred(true);
}
}
@Override public void markMailAsUnstared(int mailId) {
if (mail.getId() == mailId) {
mail.setStarred(false);
starView.setStarred(false);
}
}
private void showStarErrorToast(int messageRes, Mail mail) {
Toast.makeText(getActivity(), String.format(getString(messageRes), mail.getSender().getName()),
Toast.LENGTH_SHORT).show();
}
@Override public void showStaringFailed(Mail mail) {
showStarErrorToast(R.string.error_staring_mail, mail);
}
@Override public void showUnstaringFailed(Mail mail) {
showStarErrorToast(R.string.error_unstaring_mail, mail);
}
@OnClick(R.id.replay) public void onReplayClicked() {
ActivityOptionsCompat options =
ActivityOptionsCompat.makeSceneTransitionAnimation(getActivity(), replayView,
getString(R.string.shared_write_action));
intentStarter.showWriteMail(getActivity(), mail, options.toBundle());
}
@Override public void markMailAsRead(Mail mail, boolean read) {
// TODO: currently there is no UI component that shows if that mail has been read or not
}
@Override protected void injectDependencies() {
detailsComponent =
DaggerDetailsComponent.builder().mailAppComponent(MailApplication.getMailComponents()).build();
detailsComponent.inject(this);
}
}
| |
/*
* The MIT License
*
* Copyright 2016 Focused Ultrasound Foundation.
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package org.fusfoundation.kranion;
import org.fusfoundation.kranion.controller.DefaultController;
import org.fusfoundation.kranion.controller.Controller;
import org.fusfoundation.kranion.plugin.PluginFinder;
import org.fusfoundation.kranion.plugin.Plugin;
import org.fusfoundation.kranion.view.View;
import org.fusfoundation.kranion.view.DefaultView;
import static org.lwjgl.opengl.GL11.*;
import static org.lwjgl.util.glu.GLU.*;
import org.lwjgl.BufferUtils;
import java.nio.*;
import java.io.IOException;
import java.util.logging.FileHandler;
import java.util.logging.Level;
import java.util.logging.Logger;
import org.lwjgl.LWJGLException;
import org.lwjgl.input.Keyboard;
import org.lwjgl.input.Mouse;
import org.lwjgl.opengl.*;
import org.lwjgl.PointerBuffer;
import java.nio.FloatBuffer;
import java.nio.IntBuffer;
import java.util.List;
import org.lwjgl.opencl.CL;
import org.lwjgl.opencl.CLDevice;
import org.lwjgl.opencl.CLPlatform;
import org.lwjgl.opencl.CLContext;
import static org.lwjgl.opencl.CL10.*;
import static org.lwjgl.opencl.CL10GL.*;
import java.io.File;
import java.util.StringTokenizer;
import java.util.List;
import org.fusfoundation.kranion.model.*;
import org.fusfoundation.kranion.model.image.*;
import static org.lwjgl.opengl.GL20.GL_FRAGMENT_SHADER;
import static org.lwjgl.opengl.GL20.GL_VERTEX_SHADER;
import static org.lwjgl.opengl.GL20.glUniform1f;
import static org.lwjgl.opengl.GL20.glUniform3f;
import static org.lwjgl.opengl.GL20.glGetUniformLocation;
import java.awt.SplashScreen;
import java.awt.geom.Rectangle2D;
import java.awt.Dimension;
import java.awt.Graphics2D;
import java.awt.Font;
import java.awt.Color;
import java.awt.image.BufferedImage;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.MissingResourceException;
import java.util.ResourceBundle;
import javax.imageio.ImageIO;
import org.lwjgl.opencl.CL10GL;
import org.lwjgl.opencl.CLCapabilities;
import org.lwjgl.opencl.CLCommandQueue;
import org.lwjgl.opencl.CLDeviceCapabilities;
import static org.lwjgl.opengl.GL20.glGetUniformLocation;
import javax.swing.*;
public class Main implements ProgressListener {
private Model model;
private Controller controller;
private View view;
private int currentBuffer = 0;
private boolean bufferNeedsRendering[] = new boolean[3];
public static final int DISPLAY_HEIGHT = 1024;
public static final int DISPLAY_WIDTH = 1680;
public static float OpenGLVersion = -1f;
public static CLContext CLcontext = null;
public static CLCommandQueue CLqueue = null;
public static final boolean GL_DEBUG = false;
private float viewportAspect = 1f;
private static List<Plugin> plugins = new ArrayList<>();
private static List<BackgroundWorker> workers = new ArrayList<>();
private static CrossThreadCallableManager callQ = new CrossThreadCallableManager();
public static Object callCrossThreadCallable(CrossThreadCallable c) {
return callQ.call(c);
}
public static void addBackgroundWorker(BackgroundWorker w) {
workers.add(w);
}
public static void removeBackgroundWorker(BackgroundWorker w) {
workers.remove(w);
}
public static void startBackgroundWorker(String name) {
Iterator<BackgroundWorker> i = workers.iterator();
while(i.hasNext()) {
BackgroundWorker w = i.next();
if (w.getName().compareTo(name) == 0) {
w.start();
}
}
}
public static void stopBackgroundWorker(String name) {
Iterator<BackgroundWorker> i = workers.iterator();
while(i.hasNext()) {
BackgroundWorker w = i.next();
if (w.getName().compareTo(name) == 0) {
w.stop();
}
}
}
private void doBackgroundWorkers() {
Iterator<BackgroundWorker> i = workers.iterator();
while (i.hasNext()) {
BackgroundWorker w = i.next();
w.doWorkStep();
}
}
public static Model getModel() { return Main.main.model; }
public static void setModel(Model model) {
if (model == null) {
model = new Model();
}
for (int i = 0; i < plugins.size(); i++) {
plugins.get(i).release();
}
Main.main.model.clearImages();
Model oldModel = Main.main.model;
Main.main.model = model;
Main.main.controller.setModel(model);
Main.main.view.setModel(model);
//disconnect old model if exists
if (oldModel != null) {
// transfer model listeners to new model
Main.main.model.copyPropertyChangeListeners(oldModel);
// remove listeners from the old model and clear
oldModel.clear();
}
// reconnect plugins to new model
for (int i = 0; i < plugins.size(); i++) {
plugins.get(i).init(Main.main.controller);
}
// reconnect observer controls to new model
// Iterator<Renderable> controls = Renderable.iterator();
// while (controls.hasNext()) {
// Renderable control = controls.next();
// if (control instanceof GUIControl) {
// String propName = ((GUIControl) control).getCommand();
// if (propName != null && propName.length() > 0) {
// model.addPropertyChangeListener(propName, (GUIControl)control);
// }
// else {
// model.addPropertyChangeListener((GUIControl)control);
// }
// }
// }
// update workers
Iterator<BackgroundWorker> i = workers.iterator();
while(i.hasNext()) {
BackgroundWorker w = i.next();
w.setModel(model);
}
}
public static List<Plugin> getPlugins() {
return plugins;
}
public static void updateAllControlBindings() {
if (Main.main.model != null) {
Iterator<Renderable> controls = Renderable.iterator();
while (controls.hasNext()) {
Renderable control = controls.next();
if (control instanceof GUIControlModelBinding) {
((GUIControlModelBinding)control).doBinding(Main.main.model);
}
}
}
}
public static View getView() { return Main.main.view; }
public static void setView(View view) {
if (Main.main.view != null) {
Main.main.view.setModel(null);
}
view.setModel(Main.main.model);
Main.main.view = view;
}
// Hard wired logging level for now. TODO: make this runtime configurable
static {
try {
Logger.getGlobal().setLevel(Level.WARNING);
Logger.getGlobal().addHandler(new FileHandler("errors.log", true));
} catch (IOException ex) {
Logger.getGlobal().log(Level.WARNING, ex.toString(), ex);
}
}
private static Main main;
private static long mainThreadId = Thread.currentThread().getId();
public static void main(String[] args) {
try {
main = new Main();
Logger.getGlobal().log(Level.INFO, "This is build " + getRbTok("app.version") + ":" + getRbTok("app.build"));
Logger.getGlobal().log(Level.INFO, "JVM version: " + System.getProperty("java.vm.name") + " " + System.getProperty("java.vendor") + " " + System.getProperty("java.version"));
main.create();
SplashScreen mySplash = SplashScreen.getSplashScreen();
if (mySplash != null) {
mySplash.close();
}
main.run();
} catch (Exception ex) {
Logger.getGlobal().log(Level.SEVERE, ex.toString(), ex);
} finally {
if (main != null) {
main.destroy();
}
}
}
public Main() {
}
public static void setTitle(String title) {
if (title == null || title.length() == 0) {
Display.setTitle("Kranion");
}
else {
Display.setTitle("Kranion - " + title);
}
}
public static void update() {
// If the caller is in the same thread and thus blocking the main loop,
// we do one cycle of the main loop without input checking.
// Main idea is to allow a progress indicator to be updated while
// some lengthy process is running in the main thread calling this update()
// function periodically.
if (Thread.currentThread().getId() == mainThreadId) {
main.handleResize();
main.nextFrame();
Main.checkForGLError();
}
}
public void create() throws LWJGLException {
model = new Model();
controller = new DefaultController();
controller.setModel(model);
view = new DefaultView();
view.setModel(model);
view.setPropertyPrefix("Model.Attribute");
view.setController(controller);
view.setTag("DefaultView");
controller.setView(view);
//Display
DisplayMode[] modes = Display.getAvailableDisplayModes();
DisplayMode chosenMode = null;
int maxDisplayWidth = 0;
for (int i = 0; i < modes.length; i++) {
DisplayMode current = modes[i];
Logger.getGlobal().log(Level.INFO, current.getWidth() + "x" + current.getHeight() + "x"
+ current.getBitsPerPixel() + " " + current.getFrequency() + "Hz");
if (current.getBitsPerPixel() == 32 && current.getWidth() == 2560 && current.getHeight() == 1440 && current.getFrequency() == 60) {
chosenMode = current;
}
else if (current.getBitsPerPixel() == 32 && current.getFrequency() >= 60) {
if (current.getWidth() > maxDisplayWidth) {
maxDisplayWidth = current.getWidth();
chosenMode = current;
if (maxDisplayWidth >= DISPLAY_WIDTH) {
break;
}
}
}
}
DisplayMode mode = null;
if (chosenMode == null) {
Logger.getGlobal().log(Level.INFO, "Didn't find a display mode we like. Trying default...");
mode = new DisplayMode(DISPLAY_WIDTH, DISPLAY_HEIGHT);
}
else {
mode = chosenMode;
}
// mode = chosenMode;
Logger.getGlobal().log(Level.INFO, "Display: " + mode.getBitsPerPixel() + " bpp");
Display.setDisplayMode(mode);
// Display.setFullscreen(true);
Logger.getGlobal().log(Level.INFO, "Display: mode set");
Display.setResizable(true);
Display.setTitle("Kranion");
// System.out.println("Setting pixel format...");
PixelFormat pixelFormat = new PixelFormat(24, 8, 24, 8, 1);
org.lwjgl.opengl.ContextAttribs contextAtribs = new ContextAttribs(2, 1);
contextAtribs.withForwardCompatible(true);
try {
ByteBuffer[] list = new ByteBuffer[3];
InputStream rstm = this.getClass().getResourceAsStream("/org/fusfoundation/kranion/images/icon32.png");
BufferedImage img = ImageIO.read(rstm);
list[0] = this.convertToByteBuffer(img);
rstm = this.getClass().getResourceAsStream("/org/fusfoundation/kranion/images/icon64.png");
img = ImageIO.read(rstm);
list[1] = this.convertToByteBuffer(img);
rstm = this.getClass().getResourceAsStream("/org/fusfoundation/kranion/images/icon256.png");
img = ImageIO.read(rstm);
list[2] = this.convertToByteBuffer(img);
Display.setIcon(list);
} catch (Exception e) {
Logger.getGlobal().log(Level.WARNING, "Failed to set window icon.");
}
// System.out.println("Creating display...");
Display.create(pixelFormat, contextAtribs);
//Display.create();
Logger.getGlobal().log(Level.INFO, "GL Vendor: " + org.lwjgl.opengl.GL11.glGetString(org.lwjgl.opengl.GL11.GL_VENDOR));
Logger.getGlobal().log(Level.INFO, "GL Version: " + org.lwjgl.opengl.GL11.glGetString(org.lwjgl.opengl.GL11.GL_VERSION));
Logger.getGlobal().log(Level.INFO, "GLSL Language Version: " + org.lwjgl.opengl.GL11.glGetString(org.lwjgl.opengl.GL20.GL_SHADING_LANGUAGE_VERSION));
Logger.getGlobal().log(Level.INFO, "GL Renderer: " + org.lwjgl.opengl.GL11.glGetString(org.lwjgl.opengl.GL11.GL_RENDERER));
checkGLSupport();
checkCLSupport();
//Keyboard
Keyboard.create();
//Mouse
Mouse.setGrabbed(false);
Mouse.create();
//OpenGL
initGL();
resizeGL();
view.create();
// load plugins
try {
PluginFinder pluginFinder = new PluginFinder();
pluginFinder.search("plugins");
plugins = pluginFinder.getPluginCollection();
for (int i = 0; i < plugins.size(); i++) {
plugins.get(i).init(controller);
}
} catch (Exception e) {
e.printStackTrace();
}
resizeGL();
// resizeGL();
}
private ByteBuffer convertToByteBuffer(BufferedImage image) {
byte[] buffer = new byte[image.getWidth() * image.getHeight() * 4];
int counter = 0;
for (int i = 0; i < image.getHeight(); i++) {
for (int j = 0; j < image.getWidth(); j++) {
int colorSpace = image.getRGB(j, i);
buffer[counter + 0] = (byte) ((colorSpace << 8) >> 24);
buffer[counter + 1] = (byte) ((colorSpace << 16) >> 24);
buffer[counter + 2] = (byte) ((colorSpace << 24) >> 24);
buffer[counter + 3] = (byte) (colorSpace >> 24);
counter += 4;
}
}
return ByteBuffer.wrap(buffer);
}
private void checkCLSupport() {
try {
// Initialize OpenCL and create a context and command queue
CL.create();
// System.out.println("\n****************");
Logger.getGlobal().log(Level.INFO, "CL created");
Logger.getGlobal().log(Level.INFO, CLPlatform.getPlatforms().size() + " platforms found.");
CLPlatform platform = null;
List<CLDevice> devices = null;
CLDevice selectedDevice = null;
IntBuffer errcode_ret = BufferUtils.createIntBuffer(1);
boolean success = false;
for (int p = 0; p < CLPlatform.getPlatforms().size(); p++) {
platform = CLPlatform.getPlatforms().get(p);
Logger.getGlobal().log(Level.INFO, "Platform : " + platform.getInfoString(CL_PLATFORM_NAME) + " version: " + platform.getInfoString(CL_PLATFORM_VERSION));
// PointerBuffer ctxProps = BufferUtils.createPointerBuffer(3);
// ctxProps.put(CL_CONTEXT_PLATFORM).put(platform).put(0).flip();
devices = platform.getDevices(CL_DEVICE_TYPE_GPU);
for (int d = 0; d < devices.size(); d++) {
Logger.getGlobal().log(Level.INFO, devices.size() + " GPU devices found.");
// long context = clCreateContext(platform, devices, null, null, null);
//CLContext context = org.lwjgl.opencl.CLContext.createFromType(platform, Thread.currentThread().getId(), null, Display.getDrawable(), errcode_ret);
try {
List<CLDevice> deviceCandidate = new ArrayList<CLDevice>();
deviceCandidate.add(devices.get(d));
CLcontext = org.lwjgl.opencl.CLContext.create(platform, deviceCandidate, null, Display.getDrawable(), errcode_ret);
org.lwjgl.opencl.Util.checkCLError(errcode_ret.get(0));
} catch (Exception e) {
Logger.getGlobal().log(Level.WARNING, "Couldn't create shareable OpenCL context.");
}
if (errcode_ret.get(0) == CL_SUCCESS) {
Logger.getGlobal().log(Level.INFO, "CL context created");
selectedDevice = devices.get(d);
success = true;
}
if (success && selectedDevice != null) {
Logger.getGlobal().log(Level.INFO, "Device : " + selectedDevice.getInfoString(CL_DEVICE_NAME));
Logger.getGlobal().log(Level.INFO, "Device CL extensions: " + selectedDevice.getInfoString(CL_DEVICE_EXTENSIONS));
try {
CLDeviceCapabilities abilities = CLCapabilities.getDeviceCapabilities(selectedDevice);
Logger.getGlobal().log(Level.INFO, "3d_image_writes: " + abilities.CL_KHR_3d_image_writes);
Logger.getGlobal().log(Level.INFO, "gl_sharing: " + abilities.CL_KHR_gl_sharing);
errcode_ret.clear();
org.lwjgl.opencl.CLCommandQueue queue = org.lwjgl.opencl.CL10.clCreateCommandQueue(CLcontext, selectedDevice, 0 /*org.lwjgl.opencl.CL10.CL_QUEUE_PROFILING_ENABLE*/, errcode_ret);
// checkCLError throw a CLExcepetion if the error code does not equal CL_SUCCESS. This exception should be caught and all currently created resources released. See later.
org.lwjgl.opencl.Util.checkCLError(errcode_ret.get(0));
CLqueue = queue;
} catch (Exception e) {
Logger.getGlobal().log(Level.WARNING, "Failed to create a sharable CL ccommand queue with this device.");
success = false;
}
}
}
if (success) {
break;
}
}
// org.lwjgl.opencl.CL10.clReleaseContext(CLcontext);
// org.lwjgl.opencl.CL.destroy();
// System.out.println("CL context released, CL shutdown");
// System.out.println("****************\n");
} catch (Exception e) {
e.printStackTrace();
Logger.getGlobal().log(Level.WARNING, "*** Problem initializing OpenCL", e);
}
}
private void checkGLSupport() {
String vendor = org.lwjgl.opengl.GL11.glGetString(org.lwjgl.opengl.GL11.GL_VENDOR);
String version = org.lwjgl.opengl.GL11.glGetString(org.lwjgl.opengl.GL11.GL_VERSION);
int nMaxTexUnits = org.lwjgl.opengl.GL11.glGetInteger(org.lwjgl.opengl.GL20.GL_MAX_TEXTURE_IMAGE_UNITS);
int nMaxCombinedTexUnits = org.lwjgl.opengl.GL11.glGetInteger(org.lwjgl.opengl.GL20.GL_MAX_COMBINED_TEXTURE_IMAGE_UNITS);
try {
float versionVal = 0f;
Logger.getGlobal().log(Level.INFO, " Texture unit count = " + nMaxTexUnits + ", Max combined textures = " + nMaxCombinedTexUnits);
StringTokenizer tok = new StringTokenizer(version, ". ");
if (tok.hasMoreElements()) {
versionVal = Float.parseFloat(tok.nextToken());
}
if (tok.hasMoreElements()) {
versionVal += Float.parseFloat(tok.nextToken()) / 10f;
}
Main.OpenGLVersion = versionVal;
if (versionVal < 4.5f) {
JOptionPane.showMessageDialog(null, "OpenGL 4.5 or later required.\n\nYou have:\n" + vendor + "\n" + version);
// System.exit(1);
}
} catch (Exception e) {
e.printStackTrace();
}
}
public void destroy() {
//Methods already check if created before destroying.
Mouse.destroy();
Keyboard.destroy();
Display.destroy();
}
//----------- Variables added for Lighting Test -----------//
private FloatBuffer matSpecular;
private FloatBuffer lightPosition;
private FloatBuffer whiteLight;
private FloatBuffer lModelAmbient;
//------- Added for Lighting Test----------//
private void initLightArrays() {
matSpecular = BufferUtils.createFloatBuffer(4);
matSpecular.put(0.3f).put(0.3f).put(0.3f).put(1.0f).flip();
lightPosition = BufferUtils.createFloatBuffer(4);
lightPosition.put(150.0f).put(150.0f).put(400.01f).put(0.0f).flip();
//lightPosition.put(0.0f).put(700.0f).put(700.01f).put(0.0f).flip();
whiteLight = BufferUtils.createFloatBuffer(4);
whiteLight.put(1.0f).put(1.0f).put(1.0f).put(1.0f).flip();
lModelAmbient = BufferUtils.createFloatBuffer(4);
lModelAmbient.put(0.1f).put(0.1f).put(0.1f).put(1.0f).flip();
}
public void initGL() {
initLightArrays();
//2D Initialization
glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
glEnable(GL_DEPTH_TEST);
glEnable(GL_LIGHTING);
// glEnable(GL_POLYGON_SMOOTH); // EVIL
// glHint(GL_POLYGON_SMOOTH_HINT, GL_NICEST);
glHint(GL_PERSPECTIVE_CORRECTION_HINT, GL_NICEST);
glShadeModel(GL_SMOOTH);
glMaterial(GL_FRONT_AND_BACK, GL_SPECULAR, matSpecular); // sets specular material color
glMaterialf(GL_FRONT_AND_BACK, GL_SHININESS, 50.0f); // sets shininess
// lighting setup
glLight(GL_LIGHT0, GL_POSITION, lightPosition); // sets light position
glLight(GL_LIGHT0, GL_SPECULAR, whiteLight); // sets specular light to white
glLight(GL_LIGHT0, GL_DIFFUSE, whiteLight); // sets diffuse light to white
glLightModel(GL_LIGHT_MODEL_AMBIENT, lModelAmbient); // global ambient light
glEnable(GL_LIGHTING); // enables lighting
// enables light0
glEnable(GL_LIGHT0);
glLightModeli(GL_LIGHT_MODEL_TWO_SIDE, GL_TRUE);
glLightModeli(GL_LIGHT_MODEL_LOCAL_VIEWER, GL_TRUE);
glEnable(GL_COLOR_MATERIAL); // enables opengl to use glColor3f to define material color
glColorMaterial(GL_FRONT_AND_BACK, GL_AMBIENT_AND_DIFFUSE); // tell opengl glColor3f effects the ambient and diffuse properties of material
}
public void resizeGL() throws org.lwjgl.LWJGLException {
//2D Scene
Logger.getGlobal().log(Level.INFO, "Viewport: " + Display.getWidth() + ", " + Display.getHeight());
if (Display.getWidth() <= 0 || Display.getHeight() <= 0) {
return;
}
glViewport(0, 0, Display.getWidth(), Display.getHeight());
// trackball.set(Display.getWidth() / 2, Display.getHeight() / 2, Display.getHeight() / 2f);
// registerBall.set(Display.getWidth() / 2, Display.getHeight() / 2, Display.getHeight() / 2f);
// registerBall2.set(Display.getWidth() / 2, Display.getHeight() / 2, Display.getHeight() / 2f);
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
viewportAspect = (float) Display.getWidth() / (float) Display.getHeight();
gluPerspective(40.0f, viewportAspect, 100.0f, 100000.0f);
// org.lwjgl.util.vector.Matrix4f perspective = new org.lwjgl.util.vector.Matrix4f();
// FloatBuffer matbuf = BufferUtils.createFloatBuffer(16);
// glGetFloat(GL_PROJECTION_MATRIX, matbuf);
// perspective.load(matbuf);
// System.out.println("MAIN GL perspective mat:\n" + perspective);
glMatrixMode(GL_MODELVIEW);
glLoadIdentity();
view.doLayout();
}
public void run() {
if (view.doOkCancelMessageBox("RESEARCH USE ONLY", "Kranion is intended for research purposes only.")) {
while (true) {
// handleResize();
//
// view.processInput();
//
// nextFrame();
//
// Main.checkForGLError();
messageLoop();
if (Display.isCloseRequested()) {
// int result = JOptionPane.showConfirmDialog(null, "Are you sure you want to exit?", "Exit Kranion", JOptionPane.YES_NO_OPTION);
// if (result == JOptionPane.YES_OPTION) {
// break;
// }
if (view.okToExit()) {
break;
}
}
}
};
view.release();
if (CLcontext != null) {
org.lwjgl.opencl.CL10.clReleaseContext(CLcontext);
}
org.lwjgl.opencl.CL.destroy();
}
public void handleResize() {
if (Display.wasResized()) {
try {
resizeGL();
} catch (org.lwjgl.LWJGLException e) {
Logger.getGlobal().log(Level.INFO, "Main.handlResize error.", e);
System.exit(0);
}
}
}
public static void processNextFrame() {
// Spin the event loop for one frame.
// Modal dialog boxes and others need this
if (Main.main != null) {
Main.main.messageLoop();
}
}
protected void messageLoop() {
handleResize();
// If another thread has called a method requiring access to GL context data,
// process method calls on the main thread (which has a GL context);
// messageLoop() gets called per frame so should provide adaquate latency to cross thread calls
// which block until the requested method runs on this thread and passes back the result, if any.
//
// The intention is to support an interpreter running in another thread making method calls into this class.
callQ.processWaitingCalls();
view.processInput();
nextFrame();
Main.checkForGLError();
}
public void nextFrame() {
boolean wasRendered = false;
if (Display.isVisible()) {
// manage rendering into double buffer
if (view.getIsDirty()) {
bufferNeedsRendering[0] = true;
bufferNeedsRendering[1] = true;
bufferNeedsRendering[2] = true;
}
if (bufferNeedsRendering[currentBuffer]) {
bufferNeedsRendering[currentBuffer] = false;
// Render the scene
view.render();
wasRendered = true;
}
else {
doBackgroundWorkers();
}
}
// This needs to be here for window resizing to get handled correctly
// swap display buffers
Display.update();
// Display.processMessages(); // Unnecessary
currentBuffer = (currentBuffer + 1) % 3; // keep track of front/back buffers
try {
if (!wasRendered) { // if no rendering updates, sleep a bit
Thread.sleep(50);
}
else {
Display.sync(60); // not sure this makes any difference
}
}
catch(Exception e) {}
}
public static void checkForGLErrorAndThrow() {
int error = checkForGLError();
if (error != GL_NO_ERROR) {
throw new org.lwjgl.opengl.OpenGLException(error);
}
}
public static void printStackTrace() {
StackTraceElement[] traces;
traces = Thread.currentThread().getStackTrace();
for (int i = 2; i < traces.length; i++) {
System.out.println("\t" + traces[i]);
}
}
public static int checkForGLError() {
int error = glGetError();
if (error != GL_NO_ERROR) {
Logger.getGlobal().log(Level.WARNING, "GL ERROR DETECTED.");
switch (error) {
case GL_INVALID_ENUM:
Logger.getGlobal().log(Level.WARNING, "GL_INVALID_ENUM");
break;
case GL_INVALID_VALUE:
Logger.getGlobal().log(Level.WARNING, "GL_INVALID_VALUE");
break;
case GL_INVALID_OPERATION:
Logger.getGlobal().log(Level.WARNING, "GL_INVALID_OPERATION");
break;
case org.lwjgl.opengl.GL30.GL_INVALID_FRAMEBUFFER_OPERATION:
Logger.getGlobal().log(Level.WARNING, "GL_INVALID_FRAMEBUFFER_OPERATION");
break;
case GL_OUT_OF_MEMORY:
Logger.getGlobal().log(Level.WARNING, "GL_OUT_OF_MEMORY");
break;
case GL_STACK_OVERFLOW:
Logger.getGlobal().log(Level.WARNING, "GL_STACK_OVERFLOW");
break;
case GL_STACK_UNDERFLOW:
Logger.getGlobal().log(Level.WARNING, "GL_STACK_UNDERFLOW");
break;
default:
Logger.getGlobal().log(Level.WARNING, "UNKNOWN GL ERROR: " + error);
}
// printStackTrace();
}
return error;
}
@Override
public void percentDone(String msg, int percent) {
// System.out.println(msg + " - " + percent + "%");
}
public static void glPushMatrix() {
org.lwjgl.opengl.GL11.glPushMatrix();
if (GL_DEBUG && Main.checkForGLError() != GL_NO_ERROR) {
Logger.getGlobal().log(Level.WARNING, "MODELVIEW stack depth: " + glGetInteger(GL_MODELVIEW_STACK_DEPTH));
Logger.getGlobal().log(Level.WARNING, "MODELVIEW max stack depth: " + glGetInteger(GL_MAX_MODELVIEW_STACK_DEPTH));
Logger.getGlobal().log(Level.WARNING, "PROJECTIONVIEW stack depth: " + glGetInteger(GL_PROJECTION_STACK_DEPTH));
Logger.getGlobal().log(Level.WARNING, "PROJECTIONVIEW max stack depth: " + glGetInteger(GL_MAX_PROJECTION_STACK_DEPTH));
}
}
public static void glPopMatrix() {
org.lwjgl.opengl.GL11.glPopMatrix();
if (GL_DEBUG && Main.checkForGLError() != GL_NO_ERROR) {
Logger.getGlobal().log(Level.WARNING, "MODELVIEW stack depth: " + glGetInteger(GL_MODELVIEW_STACK_DEPTH));
Logger.getGlobal().log(Level.WARNING, "MODELVIEW max stack depth: " + glGetInteger(GL_MAX_MODELVIEW_STACK_DEPTH));
Logger.getGlobal().log(Level.WARNING, "PROJECTIONVIEW stack depth: " + glGetInteger(GL_PROJECTION_STACK_DEPTH));
Logger.getGlobal().log(Level.WARNING, "PROJECTIONVIEW max stack depth: " + glGetInteger(GL_MAX_PROJECTION_STACK_DEPTH));
}
}
public static void glPushAttrib(int bitmask) {
org.lwjgl.opengl.GL11.glPushAttrib(bitmask);
if (GL_DEBUG && Main.checkForGLError() != GL_NO_ERROR) {
Logger.getGlobal().log(Level.WARNING, "ATTRIB stack depth: " + glGetInteger(GL_ATTRIB_STACK_DEPTH));
Logger.getGlobal().log(Level.WARNING, "ATTRIB max stack depth: " + glGetInteger(GL_MAX_ATTRIB_STACK_DEPTH));
}
}
public static void glPopAttrib() {
org.lwjgl.opengl.GL11.glPopAttrib();
if (GL_DEBUG && Main.checkForGLError() != GL_NO_ERROR) {
Logger.getGlobal().log(Level.WARNING, "ATTRIB stack depth: " + glGetInteger(GL_ATTRIB_STACK_DEPTH));
Logger.getGlobal().log(Level.WARNING, "ATTRIB max stack depth: " + glGetInteger(GL_MAX_ATTRIB_STACK_DEPTH));
}
}
final static ResourceBundle appResourceBundle
= ResourceBundle.getBundle("version");
public static final String getRbTok(String propToken) {
String msg = "";
try {
msg = appResourceBundle.getString(propToken);
} catch (MissingResourceException e) {
System.err.println("Token ".concat(propToken).concat(" not in Propertyfile!"));
}
return msg;
}
}
| |
/**
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.airavata.registry.core.app.catalog.resources;
import org.apache.airavata.common.exception.ApplicationSettingsException;
import org.apache.airavata.registry.core.app.catalog.model.ComputeResource;
import org.apache.airavata.registry.core.app.catalog.model.ComputeResourcePreference;
import org.apache.airavata.registry.core.app.catalog.model.ComputeResourcePreferencePK;
import org.apache.airavata.registry.core.app.catalog.model.GatewayProfile;
import org.apache.airavata.registry.core.app.catalog.util.AppCatalogJPAUtils;
import org.apache.airavata.registry.core.app.catalog.util.AppCatalogQueryGenerator;
import org.apache.airavata.registry.core.app.catalog.util.AppCatalogResourceType;
import org.apache.airavata.registry.cpi.AppCatalogException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.persistence.EntityManager;
import javax.persistence.Query;
import java.sql.Timestamp;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
public class ComputeHostPreferenceResource extends AppCatAbstractResource {
private final static Logger logger = LoggerFactory.getLogger(ComputeHostPreferenceResource.class);
private String gatewayId;
private String resourceId;
private boolean overrideByAiravata;
private String preferredJobProtocol;
private String preferedDMProtocol;
private String batchQueue;
private String scratchLocation;
private String projectNumber;
private String loginUserName;
private String resourceCSToken;
private String usageReportingGatewayId;
private String qualityOfService;
private String reservation;
private Timestamp reservationStartTime;
private Timestamp reservationEndTime;
private GatewayProfileResource gatewayProfile;
private ComputeResourceResource computeHostResource;
public String getLoginUserName() {
return loginUserName;
}
public void setLoginUserName(String loginUserName) {
this.loginUserName = loginUserName;
}
public String getGatewayId() {
return gatewayId;
}
public void setGatewayId(String gatewayId) {
this.gatewayId = gatewayId;
}
public String getResourceId() {
return resourceId;
}
public void setResourceId(String resourceId) {
this.resourceId = resourceId;
}
public boolean getOverrideByAiravata() {
return overrideByAiravata;
}
public void setOverrideByAiravata(boolean overrideByAiravata) {
this.overrideByAiravata = overrideByAiravata;
}
public String getPreferredJobProtocol() {
return preferredJobProtocol;
}
public void setPreferredJobProtocol(String preferredJobProtocol) {
this.preferredJobProtocol = preferredJobProtocol;
}
public String getPreferedDMProtocol() {
return preferedDMProtocol;
}
public void setPreferedDMProtocol(String preferedDMProtocol) {
this.preferedDMProtocol = preferedDMProtocol;
}
public String getBatchQueue() {
return batchQueue;
}
public void setBatchQueue(String batchQueue) {
this.batchQueue = batchQueue;
}
public String getScratchLocation() {
return scratchLocation;
}
public void setScratchLocation(String scratchLocation) {
this.scratchLocation = scratchLocation;
}
public String getProjectNumber() {
return projectNumber;
}
public void setProjectNumber(String projectNumber) {
this.projectNumber = projectNumber;
}
public GatewayProfileResource getGatewayProfile() {
return gatewayProfile;
}
public void setGatewayProfile(GatewayProfileResource gatewayProfile) {
this.gatewayProfile = gatewayProfile;
}
public void setUserResourceProfile(GatewayProfileResource gatewayProfile) {
this.gatewayProfile = gatewayProfile;
}
public ComputeResourceResource getComputeHostResource() {
return computeHostResource;
}
public void setComputeHostResource(ComputeResourceResource computeHostResource) {
this.computeHostResource = computeHostResource;
}
public String getResourceCSToken() {
return resourceCSToken;
}
public void setResourceCSToken(String resourceCSToken) {
this.resourceCSToken = resourceCSToken;
}
public String getUsageReportingGatewayId() {
return usageReportingGatewayId;
}
public void setUsageReportingGatewayId(String usageReportingGatewayId) {
this.usageReportingGatewayId = usageReportingGatewayId;
}
public String getQualityOfService() {
return qualityOfService;
}
public void setQualityOfService(String qualityOfService) {
this.qualityOfService = qualityOfService;
}
public String getReservation() {
return reservation;
}
public void setReservation(String reservation) {
this.reservation = reservation;
}
public Timestamp getReservationStartTime() {
return reservationStartTime;
}
public void setReservationStartTime(Timestamp reservationStartTime) {
this.reservationStartTime = reservationStartTime;
}
public Timestamp getReservationEndTime() {
return reservationEndTime;
}
public void setReservationEndTime(Timestamp reservationEndTime) {
this.reservationEndTime = reservationEndTime;
}
@Override
public void remove(Object identifier) throws AppCatalogException {
HashMap<String, String> ids;
if (identifier instanceof Map) {
ids = (HashMap) identifier;
} else {
logger.error("Identifier should be a map with the field name and it's value");
throw new AppCatalogException("Identifier should be a map with the field name and it's value");
}
EntityManager em = null;
try {
em = AppCatalogJPAUtils.getEntityManager();
em.getTransaction().begin();
AppCatalogQueryGenerator generator = new AppCatalogQueryGenerator(COMPUTE_RESOURCE_PREFERENCE);
generator.setParameter(ComputeResourcePreferenceConstants.RESOURCE_ID, ids.get(ComputeResourcePreferenceConstants.RESOURCE_ID));
generator.setParameter(ComputeResourcePreferenceConstants.GATEWAY_ID, ids.get(ComputeResourcePreferenceConstants.GATEWAY_ID));
Query q = generator.deleteQuery(em);
q.executeUpdate();
em.getTransaction().commit();
if (em.isOpen()) {
if (em.getTransaction().isActive()){
em.getTransaction().rollback();
}
em.close();
}
} catch (ApplicationSettingsException e) {
logger.error(e.getMessage(), e);
throw new AppCatalogException(e);
} finally {
if (em != null && em.isOpen()) {
if (em.getTransaction().isActive()) {
em.getTransaction().rollback();
}
em.close();
}
}
}
@Override
public AppCatalogResource get(Object identifier) throws AppCatalogException {
HashMap<String, String> ids;
if (identifier instanceof Map) {
ids = (HashMap) identifier;
} else {
logger.error("Identifier should be a map with the field name and it's value");
throw new AppCatalogException("Identifier should be a map with the field name and it's value");
}
EntityManager em = null;
try {
em = AppCatalogJPAUtils.getEntityManager();
em.getTransaction().begin();
AppCatalogQueryGenerator generator = new AppCatalogQueryGenerator(COMPUTE_RESOURCE_PREFERENCE);
generator.setParameter(ComputeResourcePreferenceConstants.GATEWAY_ID, ids.get(ComputeResourcePreferenceConstants.GATEWAY_ID));
generator.setParameter(ComputeResourcePreferenceConstants.RESOURCE_ID, ids.get(ComputeResourcePreferenceConstants.RESOURCE_ID));
Query q = generator.selectQuery(em);
ComputeResourcePreference preference = (ComputeResourcePreference) q.getSingleResult();
ComputeHostPreferenceResource preferenceResource =
(ComputeHostPreferenceResource) AppCatalogJPAUtils.getResource(AppCatalogResourceType.COMPUTE_RESOURCE_PREFERENCE, preference);
em.getTransaction().commit();
if (em.isOpen()) {
if (em.getTransaction().isActive()){
em.getTransaction().rollback();
}
em.close();
}
return preferenceResource;
} catch (ApplicationSettingsException e) {
logger.error(e.getMessage(), e);
throw new AppCatalogException(e);
} finally {
if (em != null && em.isOpen()) {
if (em.getTransaction().isActive()) {
em.getTransaction().rollback();
}
em.close();
}
}
}
@Override
public List<AppCatalogResource> get(String fieldName, Object value) throws AppCatalogException {
List<AppCatalogResource> preferenceResourceList = new ArrayList<AppCatalogResource>();
EntityManager em = null;
try {
em = AppCatalogJPAUtils.getEntityManager();
em.getTransaction().begin();
Query q;
AppCatalogQueryGenerator generator = new AppCatalogQueryGenerator(COMPUTE_RESOURCE_PREFERENCE);
List results;
if (fieldName.equals(ComputeResourcePreferenceConstants.RESOURCE_ID)) {
generator.setParameter(ComputeResourcePreferenceConstants.RESOURCE_ID, value);
q = generator.selectQuery(em);
results = q.getResultList();
if (results.size() != 0) {
for (Object result : results) {
ComputeResourcePreference preference = (ComputeResourcePreference) result;
if (preference.getComputeHostResource()!=null) {
ComputeHostPreferenceResource preferenceResource = (ComputeHostPreferenceResource) AppCatalogJPAUtils
.getResource(
AppCatalogResourceType.COMPUTE_RESOURCE_PREFERENCE,
preference);
preferenceResourceList.add(preferenceResource);
}
}
}
} else if (fieldName.equals(ComputeResourcePreferenceConstants.GATEWAY_ID)) {
generator.setParameter(ComputeResourcePreferenceConstants.GATEWAY_ID, value);
q = generator.selectQuery(em);
results = q.getResultList();
if (results.size() != 0) {
for (Object result : results) {
ComputeResourcePreference preference = (ComputeResourcePreference) result;
if (preference.getComputeHostResource()!=null) {
ComputeHostPreferenceResource preferenceResource =
(ComputeHostPreferenceResource) AppCatalogJPAUtils.getResource(AppCatalogResourceType.COMPUTE_RESOURCE_PREFERENCE, preference);
preferenceResourceList.add(preferenceResource);
}
}
}
} else if (fieldName.equals(ComputeResourcePreferenceConstants.PREFERED_JOB_SUB_PROTOCOL)) {
generator.setParameter(ComputeResourcePreferenceConstants.PREFERED_JOB_SUB_PROTOCOL, value);
q = generator.selectQuery(em);
results = q.getResultList();
if (results.size() != 0) {
for (Object result : results) {
ComputeResourcePreference preference = (ComputeResourcePreference) result;
if (preference.getComputeHostResource()!=null) {
ComputeHostPreferenceResource preferenceResource = (ComputeHostPreferenceResource) AppCatalogJPAUtils
.getResource(
AppCatalogResourceType.COMPUTE_RESOURCE_PREFERENCE,
preference);
preferenceResourceList.add(preferenceResource);
}
}
}
} else if (fieldName.equals(ComputeResourcePreferenceConstants.PREFERED_DATA_MOVE_PROTOCOL)) {
generator.setParameter(ComputeResourcePreferenceConstants.PREFERED_DATA_MOVE_PROTOCOL, value);
q = generator.selectQuery(em);
results = q.getResultList();
if (results.size() != 0) {
for (Object result : results) {
ComputeResourcePreference preference = (ComputeResourcePreference) result;
if (preference.getResourceId()!=null) {
ComputeHostPreferenceResource preferenceResource = (ComputeHostPreferenceResource) AppCatalogJPAUtils
.getResource(
AppCatalogResourceType.COMPUTE_RESOURCE_PREFERENCE,
preference);
preferenceResourceList.add(preferenceResource);
}
}
}
} else {
em.getTransaction().commit();
if (em.isOpen()) {
if (em.getTransaction().isActive()){
em.getTransaction().rollback();
}
em.close();
}
logger.error("Unsupported field name for Compute host preference Resource.", new IllegalArgumentException());
throw new IllegalArgumentException("Unsupported field name for Compute host preference Resource.");
}
em.getTransaction().commit();
if (em.isOpen()) {
if (em.getTransaction().isActive()){
em.getTransaction().rollback();
}
em.close();
}
} catch (Exception e) {
logger.error(e.getMessage(), e);
throw new AppCatalogException(e);
} finally {
if (em != null && em.isOpen()) {
if (em.getTransaction().isActive()) {
em.getTransaction().rollback();
}
em.close();
}
}
return preferenceResourceList;
}
@Override
public List<AppCatalogResource> getAll() throws AppCatalogException {
return null;
}
@Override
public List<String> getAllIds() throws AppCatalogException {
return null;
}
@Override
public List<String> getIds(String fieldName, Object value) throws AppCatalogException {
logger.error("Unsupported for objects with a composite identifier");
throw new AppCatalogException("Unsupported for objects with a composite identifier");
}
@Override
public void save() throws AppCatalogException {
EntityManager em = null;
try {
em = AppCatalogJPAUtils.getEntityManager();
ComputeResourcePreference existingPreference = em.find(ComputeResourcePreference.class, new ComputeResourcePreferencePK(gatewayId, resourceId));
if (em.isOpen()) {
if (em.getTransaction().isActive()){
em.getTransaction().rollback();
}
em.close();
}
em = AppCatalogJPAUtils.getEntityManager();
em.getTransaction().begin();
ComputeResource computeResource = em.find(ComputeResource.class, resourceId);
GatewayProfile gatewayProf = em.find(GatewayProfile.class, gatewayId);
if (existingPreference != null) {
existingPreference.setResourceId(resourceId);
existingPreference.setGatewayId(gatewayId);
existingPreference.setComputeHostResource(computeResource);
existingPreference.setGatewayProfile(gatewayProf);
existingPreference.setOverrideByAiravata(overrideByAiravata);
existingPreference.setPreferedJobSubmissionProtocol(preferredJobProtocol);
existingPreference.setPreferedDataMoveProtocol(preferedDMProtocol);
existingPreference.setScratchLocation(scratchLocation);
existingPreference.setProjectNumber(projectNumber);
existingPreference.setBatchQueue(batchQueue);
existingPreference.setLoginUserName(loginUserName);
existingPreference.setComputeResourceCSToken(resourceCSToken);
existingPreference.setUsageReportingGWId(usageReportingGatewayId);
existingPreference.setQualityOfService(qualityOfService);
existingPreference.setReservation(reservation);
existingPreference.setReservationStartTime(reservationStartTime);
existingPreference.setReservationEndTime(reservationEndTime);
em.merge(existingPreference);
} else {
ComputeResourcePreference resourcePreference = new ComputeResourcePreference();
resourcePreference.setResourceId(resourceId);
resourcePreference.setGatewayId(gatewayId);
resourcePreference.setComputeHostResource(computeResource);
resourcePreference.setGatewayProfile(gatewayProf);
resourcePreference.setOverrideByAiravata(overrideByAiravata);
resourcePreference.setPreferedJobSubmissionProtocol(preferredJobProtocol);
resourcePreference.setPreferedDataMoveProtocol(preferedDMProtocol);
resourcePreference.setScratchLocation(scratchLocation);
resourcePreference.setProjectNumber(projectNumber);
resourcePreference.setBatchQueue(batchQueue);
resourcePreference.setLoginUserName(loginUserName);
resourcePreference.setComputeResourceCSToken(resourceCSToken);
resourcePreference.setUsageReportingGWId(usageReportingGatewayId);
resourcePreference.setQualityOfService(qualityOfService);
resourcePreference.setReservation(reservation);
resourcePreference.setReservationStartTime(reservationStartTime);
resourcePreference.setReservationEndTime(reservationEndTime);
em.persist(resourcePreference);
}
em.getTransaction().commit();
if (em.isOpen()) {
if (em.getTransaction().isActive()){
em.getTransaction().rollback();
}
em.close();
}
} catch (Exception e) {
logger.error(e.getMessage(), e);
throw new AppCatalogException(e);
} finally {
if (em != null && em.isOpen()) {
if (em.getTransaction().isActive()) {
em.getTransaction().rollback();
}
em.close();
}
}
}
@Override
public boolean isExists(Object identifier) throws AppCatalogException {
HashMap<String, String> ids;
if (identifier instanceof Map) {
ids = (HashMap) identifier;
} else {
logger.error("Identifier should be a map with the field name and it's value");
throw new AppCatalogException("Identifier should be a map with the field name and it's value");
}
EntityManager em = null;
try {
em = AppCatalogJPAUtils.getEntityManager();
ComputeResourcePreference existingPreference = em.find(ComputeResourcePreference.class,
new ComputeResourcePreferencePK(ids.get(ComputeResourcePreferenceConstants.GATEWAY_ID),
ids.get(ComputeResourcePreferenceConstants.RESOURCE_ID)));
if (em.isOpen()) {
if (em.getTransaction().isActive()){
em.getTransaction().rollback();
}
em.close();
}
return existingPreference != null;
}catch (Exception e) {
logger.error(e.getMessage(), e);
throw new AppCatalogException(e);
} finally {
if (em != null && em.isOpen()) {
if (em.getTransaction().isActive()) {
em.getTransaction().rollback();
}
em.close();
}
}
}
}
| |
package org.basex.query.util.fingertree;
import java.util.*;
import org.basex.util.*;
/**
* A builder for {@link FingerTree}s from leaf nodes.
*
* @author BaseX Team 2005-22, BSD License
* @author Leo Woerteler
*
* @param <E> element type
*/
@SuppressWarnings("unchecked")
public final class FingerTreeBuilder<E> implements Iterable<E> {
/** The root node, {@code null} if the tree is empty. */
private BufferNode<E, E> root;
/**
* Checks if this builder is empty, i.e. if no leaf nodes were added to it.
* @return {@code true} if the builder is empty, {@code false} otherwise
*/
public boolean isEmpty() {
return root == null;
}
/**
* Adds a leaf node to the front of the tree.
* @param leaf the leaf node to add
*/
public void prepend(final Node<E, E> leaf) {
if(root == null) {
root = new BufferNode<>(leaf);
} else {
root.prepend(leaf);
}
}
/**
* Adds a leaf node to the back of the tree.
* @param leaf the leaf node to add
*/
public void append(final Node<E, E> leaf) {
if(root == null) {
root = new BufferNode<>(leaf);
} else {
root.append(leaf);
}
}
/**
* Appends another finger tree to this builder.
* @param tree finger tree to append
*/
public void append(final FingerTree<E, E> tree) {
if(!tree.isEmpty()) {
if(root == null) {
root = new BufferNode<>(tree);
} else {
root.append(tree);
}
}
}
/**
* Builds a finger tree from the current state of this builder.
* @return the resulting finger tree
*/
public FingerTree<E, E> freeze() {
return root == null ? FingerTree.empty() : root.freeze();
}
@Override
public String toString() {
final StringBuilder sb = new StringBuilder(Util.className(this)).append('[');
final Iterator<E> iter = iterator();
if(iter.hasNext()) {
sb.append(iter.next());
while(iter.hasNext()) sb.append(", ").append(iter.next());
}
return sb.append(']').toString();
}
@Override
public Iterator<E> iterator() {
if(root == null) return Collections.emptyIterator();
return new BufferNodeIterator<>(root);
}
/**
* Node of the middle tree.
*
* @param <N> node type
* @param <E> element type
*/
private static class BufferNode<N, E> {
/** Size of inner nodes to create. */
private static final int NODE_SIZE = FingerTree.MAX_ARITY;
/** Maximum number of elements in a digit. */
private static final int MAX_DIGIT = NODE_SIZE + 1;
/** Maximum number of nodes in the digits. */
private static final int CAP = 2 * MAX_DIGIT;
/** Ring buffer for nodes in the digits. */
final Node<N, E>[] nodes = new Node[CAP];
/** Number of elements in left digit. */
int inLeft;
/** Position of middle between left and right digit in buffer. */
int midPos = MAX_DIGIT;
/** Number of elements in right digit. */
int inRight;
/**
* Root node of middle tree, either a {@code FingerTree<Node<N, E>, E>} or a
* {@code BufferNode<Node<N, E>, E>}.
*/
Object middle;
/**
* Constructs a buffered tree containing the given single node.
* @param node the initial node
*/
BufferNode(final Node<N, E> node) {
prepend(node);
}
/**
* Constructs a buffered tree containing the same contents as the given tree.
* @param tree the tree to take the contents of
*/
BufferNode(final FingerTree<N, E> tree) {
if(tree instanceof SingletonTree) {
prepend(((SingletonTree<N, E>) tree).elem);
} else {
final DeepTree<N, E> deep = (DeepTree<N, E>) tree;
for(int i = deep.left.length; --i >= 0;) prepend(deep.left[i]);
final FingerTree<Node<N, E>, E> mid = deep.middle;
if(!mid.isEmpty()) middle = mid;
for(final Node<N, E> node : deep.right) append(node);
}
}
/**
* Adds a node to the front of this tree.
* @param node the node to add
*/
void prepend(final Node<N, E> node) {
if(inLeft < MAX_DIGIT) {
nodes[(midPos - inLeft - 1 + CAP) % CAP] = node;
inLeft++;
} else if(middle == null && inRight < MAX_DIGIT) {
midPos = (midPos - 1 + CAP) % CAP;
nodes[(midPos - inLeft + CAP) % CAP] = node;
inRight++;
} else {
final int l = (midPos - inLeft + CAP) % CAP;
final Node<Node<N, E>, E> next = new InnerNode<>(copy(l + 1, inLeft - 1));
nodes[(midPos - 1 + CAP) % CAP] = nodes[l];
nodes[(midPos - 2 + CAP) % CAP] = node;
inLeft = 2;
if(middle == null) middle = new BufferNode<>(next);
else midBuffer().prepend(next);
}
}
/**
* Adds a node to the back of this tree.
* @param node the node to add
*/
void append(final Node<N, E> node) {
if(inRight < MAX_DIGIT) {
nodes[(midPos + inRight) % CAP] = node;
inRight++;
} else if(middle == null && inLeft < MAX_DIGIT) {
midPos = (midPos + 1) % CAP;
nodes[(midPos + inRight - 1) % CAP] = node;
inLeft++;
} else {
final Node<Node<N, E>, E> next = new InnerNode<>(copy(midPos, inRight - 1));
nodes[midPos] = nodes[(midPos + inRight - 1) % CAP];
nodes[(midPos + 1) % CAP] = node;
inRight = 2;
if(middle == null) middle = new BufferNode<>(next);
else midBuffer().append(next);
}
}
/**
* Appends the contents of the given tree to this buffer.
* @param tree finger tree to append
*/
void append(final FingerTree<N, E> tree) {
if(!(tree instanceof DeepTree)) {
if(tree instanceof SingletonTree) append(((SingletonTree<N, E>) tree).elem);
return;
}
final DeepTree<N, E> deep = (DeepTree<N, E>) tree;
final Node<N, E>[] ls = deep.left, rs = deep.right;
final int ll = ls.length;
final FingerTree<Node<N, E>, E> mid = deep.middle;
if(mid.isEmpty()) {
// add digits
for(final Node<N, E> l : ls) append(l);
for(final Node<N, E> r : rs) append(r);
} else if(middle == null) {
// cache previous contents and re-add them afterwards
final int n = inLeft + inRight;
final Node<N, E>[] buff = new Node[n + ll];
copyInto(midPos - inLeft, buff, 0, n);
Array.copyFromStart(ls, ll, buff, n);
inLeft = inRight = 0;
middle = mid;
for(int i = buff.length; --i >= 0;) prepend(buff[i]);
for(final Node<N, E> r : rs) append(r);
} else {
// inner digits have to be merged
final int n = inRight + ll;
final Node<N, E>[] buff = new Node[n];
copyInto(midPos, buff, 0, inRight);
Array.copyFromStart(ls, ll, buff, inRight);
inRight = 0;
for(int k = (n + NODE_SIZE - 1) / NODE_SIZE, p = 0; k > 0; k--) {
final int inNode = (n - p + k - 1) / k;
final Node<N, E>[] out = new Node[inNode];
Array.copyToStart(buff, p, inNode, out);
final Node<Node<N, E>, E> sub = new InnerNode<>(out);
if(middle == null) middle = new BufferNode<>(sub);
else midBuffer().append(sub);
p += inNode;
}
if(middle == null) middle = mid;
else midBuffer().append(mid);
for(final Node<N, E> r : rs) append(r);
}
}
/**
* Creates an {@link FingerTree} containing the elements of this builder.
* @return the finger tree
*/
FingerTree<N, E> freeze() {
final int n = inLeft + inRight;
if(n == 1) return new SingletonTree<>(nodes[(midPos + inRight - 1 + CAP) % CAP]);
final int a = middle == null ? n / 2 : inLeft, l = midPos - inLeft;
final Node<N, E>[] left = copy(l, a), right = copy(l + a, n - a);
if(middle == null) return DeepTree.get(left, right);
if(middle instanceof FingerTree) {
final FingerTree<Node<N, E>, E> tree = (FingerTree<Node<N, E>, E>) middle;
return DeepTree.get(left, tree, right);
}
final BufferNode<Node<N, E>, E> buffer = (BufferNode<Node<N, E>, E>) middle;
return DeepTree.get(left, buffer.freeze(), right);
}
/**
* Returns the node at the given position in this node's ring buffer.
* @param pos position
* @return node at that position
*/
Node<N, E> get(final int pos) {
return nodes[((midPos + pos) % CAP + CAP) % CAP];
}
/**
* Returns the middle tree as a buffer node.
* @return middle buffer node (can be {@code null})
*/
private BufferNode<Node<N, E>, E> midBuffer() {
if(middle == null) return null;
if(middle instanceof BufferNode) return (BufferNode<Node<N, E>, E>) middle;
final BufferNode<Node<N, E>, E> mid = new BufferNode<>((FingerTree<Node<N, E>, E>) middle);
middle = mid;
return mid;
}
/**
* Copies the elements in the given range from the ring buffer into an array.
* @param start start of the range
* @param len length of the range
* @return array containing all nodes in the range
*/
private Node<N, E>[] copy(final int start, final int len) {
final Node<N, E>[] out = new Node[len];
copyInto(start, out, 0, len);
return out;
}
/**
* Copies the nodes in the given range of the ring buffer into the given array.
* @param start start position of the range in the ring buffer
* @param arr output array
* @param pos start position in the output array
* @param len length of the range
*/
private void copyInto(final int start, final Node<N, E>[] arr, final int pos, final int len) {
final int p = (start % CAP + CAP) % CAP, k = CAP - p;
if(len <= k) {
Array.copy(nodes, p, len, arr, pos);
} else {
Array.copy(nodes, p, k, arr, pos);
Array.copyFromStart(nodes, len - k, arr, pos + k);
}
}
}
/**
* Iterator over the elements in this builder.
* @param <E> element type
*/
private static class BufferNodeIterator<E> implements Iterator<E> {
/** Stack of buffer nodes. */
private BufferNode<?, E>[] stack = new BufferNode[8];
/** Stack of position inside the buffer nodes. */
private int[] poss = new int[8];
/** Stack top. */
private int top;
/** Iterator over the current tree node. */
private Iterator<E> sub;
/**
* Constructor.
* @param root buffer node
*/
BufferNodeIterator(final BufferNode<E, E> root) {
stack[0] = root;
final int pos = -root.inLeft;
poss[0] = pos;
sub = new FingerTreeIterator<>(root.get(pos), 0);
}
@Override
public boolean hasNext() {
return sub != null;
}
@Override
public E next() {
final E out = sub.next();
if(sub.hasNext()) return out;
// sub-iterator empty
sub = null;
final BufferNode<?, E> buffer = stack[top];
poss[top]++;
if(poss[top] < 0) {
sub = new FingerTreeIterator<>(buffer.get(poss[top]), 0);
return out;
}
if(poss[top] == 0) {
final Object mid = buffer.middle;
if(mid != null) {
if(mid instanceof FingerTree) {
sub = ((FingerTree<?, E>) mid).iterator();
} else {
final BufferNode<?, E> buff = (BufferNode<?, E>) mid;
if(++top == stack.length) {
stack = Arrays.copyOf(stack, 2 * top);
poss = Arrays.copyOf(poss, 2 * top);
}
stack[top] = buff;
poss[top] = -buff.inLeft;
sub = new FingerTreeIterator<>(buff.get(poss[top]), 0);
}
return out;
}
poss[top]++;
}
if(poss[top] <= buffer.inRight) {
sub = new FingerTreeIterator<>(buffer.get(poss[top] - 1), 0);
return out;
}
stack[top] = null;
if(--top >= 0) {
sub = new FingerTreeIterator<>(stack[top].get(0), 0);
poss[top]++;
}
return out;
}
@Override
public void remove() {
throw Util.notExpected();
}
}
}
| |
package tools;
/*
* Extremely Compiler Collection
* Copyright (c) 2015-2020, Jianping Zeng.
* All rights reserved.
* This software is subjected to the protection of BSD 3.0 Licence.
* For more details, please refers to the LICENSE file.
*/
import java.io.*;
import java.nio.CharBuffer;
import java.nio.charset.Charset;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.Arrays;
/**
* @author Jianping Zeng
* @version 0.4
*/
public class MemoryBuffer implements Cloneable {
private final byte[] buffer;
private int bufferStart;
private String filename;
private boolean isRegular;
private MemoryBuffer(byte[] buffer) {
this.buffer = new byte[buffer.length];
System.arraycopy(buffer, 0, this.buffer, 0, buffer.length);
}
public String getFilename() { return filename; }
public void setFilename(String filename) { this.filename = filename; }
public byte[] getCharBuffer() { return buffer; }
public int length() { return buffer.length; }
/**
* Determines whether this memory buffer is read from regular file (either stdin nor memory).
*
* @return
*/
public boolean isRegular() {
return isRegular;
}
public void setRegular(boolean regular) {
isRegular = regular;
}
public int getBufferStart() {
return bufferStart;
}
public void advance() {
++bufferStart;
}
/**
* Obtains the current character indexed by {@linkplain #bufferStart}
* from CharBuffer.
*
* @return
*/
public byte getCurChar() {
return buffer[bufferStart];
}
public byte getCharAt(int i) {
int len = length();
if (i >= getBufferStart() && i < len)
return buffer[i];
return 0;
}
public String getSubString(int lineStart, int lineEnd) {
Util.assertion(0 <= lineStart && lineStart <= lineEnd && lineEnd < buffer.length);
if (lineEnd == lineStart) return "";
StringBuilder sb = new StringBuilder();
for (int i = lineStart; i < lineEnd; i++)
sb.append((char)buffer[i]);
return sb.toString();
}
/**
* Checks if the other MemoryBuffer is within the current
* MemoryBuffer.
*
* @param other
* @return
*/
public boolean contains(MemoryBuffer other) {
boolean b1 = Arrays.equals(buffer, other.getCharBuffer()),
b2 = other.getBufferStart() >= bufferStart,
b3 = other.getBufferStart() <= buffer.length; // must be less and equal.
return b1 && b2 && b3;
}
@Override
public boolean equals(Object obj) {
if (obj == null) return false;
if (obj == this) return true;
if (getClass() != obj.getClass())
return false;
MemoryBuffer memBuf = (MemoryBuffer) obj;
return bufferStart == memBuf.bufferStart && Arrays
.equals(buffer, memBuf.buffer);
}
@Override
public MemoryBuffer clone() {
try {
return (MemoryBuffer) super.clone();
} catch (Exception e) {
return null;
}
}
public void setBufferStart(int bufferStart) {
this.bufferStart = bufferStart;
}
private static MemoryBuffer read(String filename) {
int sz = 1024;
try (BufferedInputStream reader = new BufferedInputStream(filename.equals("-") ?
System.in : new FileInputStream(filename))) {
byte[] buf = new byte[sz];
int offset = 0;
do {
sz = reader.read(buf, offset, sz);
if (sz < 0)
break;
offset += sz;
byte[] newArray = new byte[buf.length + sz];
System.arraycopy(buf, 0, newArray, 0, buf.length);
buf = newArray;
} while (true);
// Allocate a redundant one space to reside the '\0' which
// indicates EOF.
byte[] newBuf = new byte[offset];
System.arraycopy(buf, 0, newBuf, 0, offset);
MemoryBuffer buffer = new MemoryBuffer(newBuf);
buffer.setRegular(!filename.equals("-"));
buffer.setFilename(filename);
return buffer;
} catch (Exception e) {
throw new CompilerException(String.format("error when reading input file '%s'\n", filename));
}
}
public static MemoryBuffer getFile(String filename) {
Path path = Paths.get(filename);
if (!Files.exists(path)) {
System.err.println("File '" + filename + "' does not exist");
System.exit(1);
}
return read(filename);
}
public static MemoryBuffer getSTDIN() { return read("-"); }
public static MemoryBuffer getFileOrSTDIN(String filename) {
if (!filename.equals("-") && !filename.isEmpty())
return getFile(filename);
MemoryBuffer m = getSTDIN();
if (m != null) return m;
// If stdin was empty, M is null. Cons up an empty memory buffer now.
return MemoryBuffer.getMemBuffer("", "<stdin>");
}
public String getBufferIdentifier() {
return getFilename();
}
static class MemoryBufferMem extends MemoryBuffer {
public MemoryBufferMem(byte[] buffer, String fid) {
super(buffer);
setFilename(fid);
}
/**
* Determines whether this memory buffer is read from regular file (either stdin nor memory).
*
* @return
*/
public boolean isRegular() { return false; }
}
public static MemoryBuffer getMemBuffer(String buffer, String name) {
byte[] temp = buffer.getBytes(StandardCharsets.US_ASCII);
return new MemoryBuffer.MemoryBufferMem(temp, name);
}
public static MemoryBuffer getMemBuffer(byte[] buffer, String name) {
return new MemoryBuffer.MemoryBufferMem(buffer, name);
}
}
| |
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
/**
* CreateVpnGatewayType.java
*
* This file was auto-generated from WSDL
* by the Apache Axis2 version: 1.5.1 Built on : Oct 19, 2009 (10:59:34 EDT)
*/
package com.amazon.ec2;
/**
* CreateVpnGatewayType bean class
*/
public class CreateVpnGatewayType
implements org.apache.axis2.databinding.ADBBean{
/* This type was generated from the piece of schema that had
name = CreateVpnGatewayType
Namespace URI = http://ec2.amazonaws.com/doc/2010-11-15/
Namespace Prefix = ns1
*/
private static java.lang.String generatePrefix(java.lang.String namespace) {
if(namespace.equals("http://ec2.amazonaws.com/doc/2010-11-15/")){
return "ns1";
}
return org.apache.axis2.databinding.utils.BeanUtil.getUniquePrefix();
}
/**
* field for Type
*/
protected java.lang.String localType ;
/**
* Auto generated getter method
* @return java.lang.String
*/
public java.lang.String getType(){
return localType;
}
/**
* Auto generated setter method
* @param param Type
*/
public void setType(java.lang.String param){
this.localType=param;
}
/**
* field for AvailabilityZone
*/
protected java.lang.String localAvailabilityZone ;
/* This tracker boolean wil be used to detect whether the user called the set method
* for this attribute. It will be used to determine whether to include this field
* in the serialized XML
*/
protected boolean localAvailabilityZoneTracker = false ;
/**
* Auto generated getter method
* @return java.lang.String
*/
public java.lang.String getAvailabilityZone(){
return localAvailabilityZone;
}
/**
* Auto generated setter method
* @param param AvailabilityZone
*/
public void setAvailabilityZone(java.lang.String param){
if (param != null){
//update the setting tracker
localAvailabilityZoneTracker = true;
} else {
localAvailabilityZoneTracker = false;
}
this.localAvailabilityZone=param;
}
/**
* isReaderMTOMAware
* @return true if the reader supports MTOM
*/
public static boolean isReaderMTOMAware(javax.xml.stream.XMLStreamReader reader) {
boolean isReaderMTOMAware = false;
try{
isReaderMTOMAware = java.lang.Boolean.TRUE.equals(reader.getProperty(org.apache.axiom.om.OMConstants.IS_DATA_HANDLERS_AWARE));
}catch(java.lang.IllegalArgumentException e){
isReaderMTOMAware = false;
}
return isReaderMTOMAware;
}
/**
*
* @param parentQName
* @param factory
* @return org.apache.axiom.om.OMElement
*/
public org.apache.axiom.om.OMElement getOMElement (
final javax.xml.namespace.QName parentQName,
final org.apache.axiom.om.OMFactory factory) throws org.apache.axis2.databinding.ADBException{
org.apache.axiom.om.OMDataSource dataSource =
new org.apache.axis2.databinding.ADBDataSource(this,parentQName){
public void serialize(org.apache.axis2.databinding.utils.writer.MTOMAwareXMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException {
CreateVpnGatewayType.this.serialize(parentQName,factory,xmlWriter);
}
};
return new org.apache.axiom.om.impl.llom.OMSourcedElementImpl(
parentQName,factory,dataSource);
}
public void serialize(final javax.xml.namespace.QName parentQName,
final org.apache.axiom.om.OMFactory factory,
org.apache.axis2.databinding.utils.writer.MTOMAwareXMLStreamWriter xmlWriter)
throws javax.xml.stream.XMLStreamException, org.apache.axis2.databinding.ADBException{
serialize(parentQName,factory,xmlWriter,false);
}
public void serialize(final javax.xml.namespace.QName parentQName,
final org.apache.axiom.om.OMFactory factory,
org.apache.axis2.databinding.utils.writer.MTOMAwareXMLStreamWriter xmlWriter,
boolean serializeType)
throws javax.xml.stream.XMLStreamException, org.apache.axis2.databinding.ADBException{
java.lang.String prefix = null;
java.lang.String namespace = null;
prefix = parentQName.getPrefix();
namespace = parentQName.getNamespaceURI();
if ((namespace != null) && (namespace.trim().length() > 0)) {
java.lang.String writerPrefix = xmlWriter.getPrefix(namespace);
if (writerPrefix != null) {
xmlWriter.writeStartElement(namespace, parentQName.getLocalPart());
} else {
if (prefix == null) {
prefix = generatePrefix(namespace);
}
xmlWriter.writeStartElement(prefix, parentQName.getLocalPart(), namespace);
xmlWriter.writeNamespace(prefix, namespace);
xmlWriter.setPrefix(prefix, namespace);
}
} else {
xmlWriter.writeStartElement(parentQName.getLocalPart());
}
if (serializeType){
java.lang.String namespacePrefix = registerPrefix(xmlWriter,"http://ec2.amazonaws.com/doc/2010-11-15/");
if ((namespacePrefix != null) && (namespacePrefix.trim().length() > 0)){
writeAttribute("xsi","http://www.w3.org/2001/XMLSchema-instance","type",
namespacePrefix+":CreateVpnGatewayType",
xmlWriter);
} else {
writeAttribute("xsi","http://www.w3.org/2001/XMLSchema-instance","type",
"CreateVpnGatewayType",
xmlWriter);
}
}
namespace = "http://ec2.amazonaws.com/doc/2010-11-15/";
if (! namespace.equals("")) {
prefix = xmlWriter.getPrefix(namespace);
if (prefix == null) {
prefix = generatePrefix(namespace);
xmlWriter.writeStartElement(prefix,"type", namespace);
xmlWriter.writeNamespace(prefix, namespace);
xmlWriter.setPrefix(prefix, namespace);
} else {
xmlWriter.writeStartElement(namespace,"type");
}
} else {
xmlWriter.writeStartElement("type");
}
if (localType==null){
// write the nil attribute
throw new org.apache.axis2.databinding.ADBException("type cannot be null!!");
}else{
xmlWriter.writeCharacters(localType);
}
xmlWriter.writeEndElement();
if (localAvailabilityZoneTracker){
namespace = "http://ec2.amazonaws.com/doc/2010-11-15/";
if (! namespace.equals("")) {
prefix = xmlWriter.getPrefix(namespace);
if (prefix == null) {
prefix = generatePrefix(namespace);
xmlWriter.writeStartElement(prefix,"availabilityZone", namespace);
xmlWriter.writeNamespace(prefix, namespace);
xmlWriter.setPrefix(prefix, namespace);
} else {
xmlWriter.writeStartElement(namespace,"availabilityZone");
}
} else {
xmlWriter.writeStartElement("availabilityZone");
}
if (localAvailabilityZone==null){
// write the nil attribute
throw new org.apache.axis2.databinding.ADBException("availabilityZone cannot be null!!");
}else{
xmlWriter.writeCharacters(localAvailabilityZone);
}
xmlWriter.writeEndElement();
}
xmlWriter.writeEndElement();
}
/**
* Util method to write an attribute with the ns prefix
*/
private void writeAttribute(java.lang.String prefix,java.lang.String namespace,java.lang.String attName,
java.lang.String attValue,javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException{
if (xmlWriter.getPrefix(namespace) == null) {
xmlWriter.writeNamespace(prefix, namespace);
xmlWriter.setPrefix(prefix, namespace);
}
xmlWriter.writeAttribute(namespace,attName,attValue);
}
/**
* Util method to write an attribute without the ns prefix
*/
private void writeAttribute(java.lang.String namespace,java.lang.String attName,
java.lang.String attValue,javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException{
if (namespace.equals(""))
{
xmlWriter.writeAttribute(attName,attValue);
}
else
{
registerPrefix(xmlWriter, namespace);
xmlWriter.writeAttribute(namespace,attName,attValue);
}
}
/**
* Util method to write an attribute without the ns prefix
*/
private void writeQNameAttribute(java.lang.String namespace, java.lang.String attName,
javax.xml.namespace.QName qname, javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException {
java.lang.String attributeNamespace = qname.getNamespaceURI();
java.lang.String attributePrefix = xmlWriter.getPrefix(attributeNamespace);
if (attributePrefix == null) {
attributePrefix = registerPrefix(xmlWriter, attributeNamespace);
}
java.lang.String attributeValue;
if (attributePrefix.trim().length() > 0) {
attributeValue = attributePrefix + ":" + qname.getLocalPart();
} else {
attributeValue = qname.getLocalPart();
}
if (namespace.equals("")) {
xmlWriter.writeAttribute(attName, attributeValue);
} else {
registerPrefix(xmlWriter, namespace);
xmlWriter.writeAttribute(namespace, attName, attributeValue);
}
}
/**
* method to handle Qnames
*/
private void writeQName(javax.xml.namespace.QName qname,
javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException {
java.lang.String namespaceURI = qname.getNamespaceURI();
if (namespaceURI != null) {
java.lang.String prefix = xmlWriter.getPrefix(namespaceURI);
if (prefix == null) {
prefix = generatePrefix(namespaceURI);
xmlWriter.writeNamespace(prefix, namespaceURI);
xmlWriter.setPrefix(prefix,namespaceURI);
}
if (prefix.trim().length() > 0){
xmlWriter.writeCharacters(prefix + ":" + org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qname));
} else {
// i.e this is the default namespace
xmlWriter.writeCharacters(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qname));
}
} else {
xmlWriter.writeCharacters(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qname));
}
}
private void writeQNames(javax.xml.namespace.QName[] qnames,
javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException {
if (qnames != null) {
// we have to store this data until last moment since it is not possible to write any
// namespace data after writing the charactor data
java.lang.StringBuffer stringToWrite = new java.lang.StringBuffer();
java.lang.String namespaceURI = null;
java.lang.String prefix = null;
for (int i = 0; i < qnames.length; i++) {
if (i > 0) {
stringToWrite.append(" ");
}
namespaceURI = qnames[i].getNamespaceURI();
if (namespaceURI != null) {
prefix = xmlWriter.getPrefix(namespaceURI);
if ((prefix == null) || (prefix.length() == 0)) {
prefix = generatePrefix(namespaceURI);
xmlWriter.writeNamespace(prefix, namespaceURI);
xmlWriter.setPrefix(prefix,namespaceURI);
}
if (prefix.trim().length() > 0){
stringToWrite.append(prefix).append(":").append(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qnames[i]));
} else {
stringToWrite.append(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qnames[i]));
}
} else {
stringToWrite.append(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qnames[i]));
}
}
xmlWriter.writeCharacters(stringToWrite.toString());
}
}
/**
* Register a namespace prefix
*/
private java.lang.String registerPrefix(javax.xml.stream.XMLStreamWriter xmlWriter, java.lang.String namespace) throws javax.xml.stream.XMLStreamException {
java.lang.String prefix = xmlWriter.getPrefix(namespace);
if (prefix == null) {
prefix = generatePrefix(namespace);
while (xmlWriter.getNamespaceContext().getNamespaceURI(prefix) != null) {
prefix = org.apache.axis2.databinding.utils.BeanUtil.getUniquePrefix();
}
xmlWriter.writeNamespace(prefix, namespace);
xmlWriter.setPrefix(prefix, namespace);
}
return prefix;
}
/**
* databinding method to get an XML representation of this object
*
*/
public javax.xml.stream.XMLStreamReader getPullParser(javax.xml.namespace.QName qName)
throws org.apache.axis2.databinding.ADBException{
java.util.ArrayList elementList = new java.util.ArrayList();
java.util.ArrayList attribList = new java.util.ArrayList();
elementList.add(new javax.xml.namespace.QName("http://ec2.amazonaws.com/doc/2010-11-15/",
"type"));
if (localType != null){
elementList.add(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(localType));
} else {
throw new org.apache.axis2.databinding.ADBException("type cannot be null!!");
}
if (localAvailabilityZoneTracker){
elementList.add(new javax.xml.namespace.QName("http://ec2.amazonaws.com/doc/2010-11-15/",
"availabilityZone"));
if (localAvailabilityZone != null){
elementList.add(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(localAvailabilityZone));
} else {
throw new org.apache.axis2.databinding.ADBException("availabilityZone cannot be null!!");
}
}
return new org.apache.axis2.databinding.utils.reader.ADBXMLStreamReaderImpl(qName, elementList.toArray(), attribList.toArray());
}
/**
* Factory class that keeps the parse method
*/
public static class Factory{
/**
* static method to create the object
* Precondition: If this object is an element, the current or next start element starts this object and any intervening reader events are ignorable
* If this object is not an element, it is a complex type and the reader is at the event just after the outer start element
* Postcondition: If this object is an element, the reader is positioned at its end element
* If this object is a complex type, the reader is positioned at the end element of its outer element
*/
public static CreateVpnGatewayType parse(javax.xml.stream.XMLStreamReader reader) throws java.lang.Exception{
CreateVpnGatewayType object =
new CreateVpnGatewayType();
int event;
java.lang.String nillableValue = null;
java.lang.String prefix ="";
java.lang.String namespaceuri ="";
try {
while (!reader.isStartElement() && !reader.isEndElement())
reader.next();
if (reader.getAttributeValue("http://www.w3.org/2001/XMLSchema-instance","type")!=null){
java.lang.String fullTypeName = reader.getAttributeValue("http://www.w3.org/2001/XMLSchema-instance",
"type");
if (fullTypeName!=null){
java.lang.String nsPrefix = null;
if (fullTypeName.indexOf(":") > -1){
nsPrefix = fullTypeName.substring(0,fullTypeName.indexOf(":"));
}
nsPrefix = nsPrefix==null?"":nsPrefix;
java.lang.String type = fullTypeName.substring(fullTypeName.indexOf(":")+1);
if (!"CreateVpnGatewayType".equals(type)){
//find namespace for the prefix
java.lang.String nsUri = reader.getNamespaceContext().getNamespaceURI(nsPrefix);
return (CreateVpnGatewayType)com.amazon.ec2.ExtensionMapper.getTypeObject(
nsUri,type,reader);
}
}
}
// Note all attributes that were handled. Used to differ normal attributes
// from anyAttributes.
java.util.Vector handledAttributes = new java.util.Vector();
reader.next();
while (!reader.isStartElement() && !reader.isEndElement()) reader.next();
if (reader.isStartElement() && new javax.xml.namespace.QName("http://ec2.amazonaws.com/doc/2010-11-15/","type").equals(reader.getName())){
java.lang.String content = reader.getElementText();
object.setType(
org.apache.axis2.databinding.utils.ConverterUtil.convertToString(content));
reader.next();
} // End of if for expected property start element
else{
// A start element we are not expecting indicates an invalid parameter was passed
throw new org.apache.axis2.databinding.ADBException("Unexpected subelement " + reader.getLocalName());
}
while (!reader.isStartElement() && !reader.isEndElement()) reader.next();
if (reader.isStartElement() && new javax.xml.namespace.QName("http://ec2.amazonaws.com/doc/2010-11-15/","availabilityZone").equals(reader.getName())){
java.lang.String content = reader.getElementText();
object.setAvailabilityZone(
org.apache.axis2.databinding.utils.ConverterUtil.convertToString(content));
reader.next();
} // End of if for expected property start element
else {
}
while (!reader.isStartElement() && !reader.isEndElement())
reader.next();
if (reader.isStartElement())
// A start element we are not expecting indicates a trailing invalid property
throw new org.apache.axis2.databinding.ADBException("Unexpected subelement " + reader.getLocalName());
} catch (javax.xml.stream.XMLStreamException e) {
throw new java.lang.Exception(e);
}
return object;
}
}//end of factory class
}
| |
/*
Copyright 2016 Goldman Sachs.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
*/
package com.gs.fw.common.mithra.finder;
import java.math.*;
import java.sql.*;
import java.util.Date;
import java.util.*;
import com.gs.fw.common.mithra.attribute.*;
import com.gs.fw.common.mithra.databasetype.*;
import com.gs.fw.common.mithra.tempobject.*;
import com.gs.fw.common.mithra.util.*;
import com.gs.fw.common.mithra.util.Time;
public abstract class AtomicSetBasedOperation extends AbstractAtomicOperation implements SetBasedAtomicOperation
{
private static final long serialVersionUID = 3420203156804055576L;
public static final int IN_CLAUSE_BULK_INSERT_THRESHOLD = 1000;
private static final int POSSIBLE_SPLIT_THRESHOLD = 10;
protected AtomicSetBasedOperation(Attribute attribute)
{
super(attribute);
}
protected AtomicSetBasedOperation()
{
}
protected abstract boolean isNot();
public void generateSql(SqlQuery query)
{
if (this.getAttribute().isSourceAttribute()) return;
String fullyQualifiedColumnName = this.getAttribute().getFullyQualifiedLeftHandExpression(query);
query.appendWhereClause(fullyQualifiedColumnName);
if (isNot()) query.appendWhereClause("not");
query.appendWhereClause("in (");
if (query.mayNeedToSplit() && this.getSetSize() > POSSIBLE_SPLIT_THRESHOLD)
{
query.appendWhereClause(")");
query.setSetBasedClausePosition(this);
}
else
{
StringBuilder buffer = new StringBuilder(this.getSetSize() * 2);
for (int i = 0; i < this.getSetSize() - 1; i++)
{
buffer.append("?,");
}
buffer.append("?)");
query.appendWhereClause(buffer);
}
query.addSqlParameterSetter(this);
}
public int setSqlParameters(PreparedStatement pstmt, int startIndex, SqlQuery query) throws SQLException
{
int start = 0;
int valuesSet = 0;
int numberToSet = this.getSetSize();
populateCopiedArray();
if (query.hasChunkedUnions(this))
{
int union = query.getCurrentUnionNumber();
int numberOfChunksPerIn = query.getNumberOfChunksPerIn();
int numberOfQuestions = this.getSetSize() / numberOfChunksPerIn;
if ((this.getSetSize() % numberOfChunksPerIn) > 0) numberOfQuestions++;
start = numberOfQuestions * union;
numberToSet = numberOfQuestions;
if (start + numberToSet > this.getSetSize()) numberToSet = this.getSetSize() - start;
}
if (!query.isSubSelectInstead(this))
{
valuesSet += this.setSqlParameters(pstmt, startIndex+valuesSet, query.getTimeZone(), start, numberToSet, query.getDatabaseType());
}
return valuesSet;
}
public String getSubSelectStringForTupleTempContext(TupleTempContext tempContext, Object source, PersisterId persisterId)
{
return "select c0 from "+tempContext.getFullyQualifiedTableName(source, persisterId);
}
protected abstract int setSqlParameters(PreparedStatement pstmt, int startIndex, TimeZone timeZone, int setStart, int numberToSet, DatabaseType databaseType) throws SQLException;
public abstract int getSetSize();
protected abstract void populateCopiedArray();
public boolean getSetValueAsBoolean(int index)
{
throw new RuntimeException("not implemented");
}
public byte getSetValueAsByte(int index)
{
throw new RuntimeException("not implemented");
}
public byte[] getSetValueAsByteArray(int index)
{
throw new RuntimeException("not implemented");
}
public BigDecimal getSetValueAsBigDecimal(int index)
{
throw new RuntimeException("not implemented");
}
public char getSetValueAsChar(int index)
{
throw new RuntimeException("not implemented");
}
public Date getSetValueAsDate(int index)
{
throw new RuntimeException("not implemented");
}
public Time getSetValueAsTime(int index)
{
throw new RuntimeException("not implemented");
}
public double getSetValueAsDouble(int index)
{
throw new RuntimeException("not implemented");
}
public float getSetValueAsFloat(int index)
{
throw new RuntimeException("not implemented");
}
public int getSetValueAsInt(int index)
{
throw new RuntimeException("not implemented");
}
public long getSetValueAsLong(int index)
{
throw new RuntimeException("not implemented");
}
public short getSetValueAsShort(int index)
{
throw new RuntimeException("not implemented");
}
public String getSetValueAsString(int index)
{
throw new RuntimeException("not implemented");
}
public Timestamp getSetValueAsTimestamp(int index)
{
throw new RuntimeException("not implemented");
}
public TupleTempContext createTempContextAndInsert(SqlQuery query)
{
this.populateCopiedArray();
Attribute[] prototypeAttributes;
Attribute sourceAttr = query.getAnalyzedOperation().getOriginalOperation().getResultObjectPortal().getFinder().getSourceAttribute();
Object source = null;
int[] maxLengths;
if (sourceAttr != null)
{
prototypeAttributes = new Attribute[2];
maxLengths = new int[2];
prototypeAttributes[1] = sourceAttr;
source = query.getSourceAttributeValueForCurrentSource();
}
else
{
prototypeAttributes = new Attribute[1];
maxLengths = new int[1];
}
prototypeAttributes[0] = this.getAttribute();
maxLengths[0] = this.getMaxLength();
TupleTempContext tempContext = new TupleTempContext(prototypeAttributes, sourceAttr, maxLengths, true);
try
{
tempContext.insert(this, query.getAnalyzedOperation().getOriginalOperation().getResultObjectPortal(),
IN_CLAUSE_BULK_INSERT_THRESHOLD, source, query.isParallel());
}
catch (RuntimeException e)
{
tempContext.destroy();
throw e;
}
return tempContext;
}
protected int getMaxLength()
{
return 0;
}
public void zToString(ToStringContext toStringContext)
{
this.getAttribute().zAppendToString(toStringContext);
if (isNot()) toStringContext.append("not");
toStringContext.append("in");
this.appendSetToString(toStringContext);
}
protected abstract void appendSetToString(ToStringContext toStringContext);
}
| |
// Copyright (c) 2003-present, Jodd Team (http://jodd.org)
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are met:
//
// 1. Redistributions of source code must retain the above copyright notice,
// this list of conditions and the following disclaimer.
//
// 2. Redistributions in binary form must reproduce the above copyright
// notice, this list of conditions and the following disclaimer in the
// documentation and/or other materials provided with the distribution.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
// AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
// ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
// LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
// CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
// SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
// INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
// CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
// POSSIBILITY OF SUCH DAMAGE.
package jodd.madvoc;
import jodd.introspector.ClassDescriptor;
import jodd.introspector.ClassIntrospector;
import jodd.introspector.MethodDescriptor;
import jodd.io.findfile.ClassScanner;
import jodd.madvoc.component.ActionConfigManager;
import jodd.madvoc.component.ActionsManager;
import jodd.madvoc.component.MadvocComponentLifecycle;
import jodd.madvoc.component.MadvocContainer;
import jodd.madvoc.meta.Action;
import jodd.madvoc.meta.MadvocAction;
import jodd.madvoc.meta.MadvocComponent;
import jodd.petite.meta.PetiteInject;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.lang.reflect.Method;
import java.lang.reflect.Modifier;
import java.util.ArrayList;
import java.util.List;
/**
* Default Madvoc configurator uses auto-magic to configure {@link WebApp}.
* It searches the class path for all classes which names ends with 'Action' and 'Result'
* suffixes. Each such class will be loaded and introspected to determine
* if it represents valid Madvoc entity and then registered into the web application.
* <p>
* Action class is scanned for the {@link MadvocAction}. All public methods with {@link Action}
* are registered as Madvoc actions.
*/
public class AutomagicMadvocConfigurator implements MadvocComponentLifecycle.Init, MadvocComponentLifecycle.Start {
private static final Logger log = LoggerFactory.getLogger(AutomagicMadvocConfigurator.class);
private final ClassScanner classScanner;
@PetiteInject
protected ActionConfigManager actionConfigManager;
@PetiteInject
protected ActionsManager actionsManager;
@PetiteInject
protected MadvocContainer madvocContainer;
protected String actionClassSuffix; // default action class suffix, for class path search
protected long elapsed;
protected static final byte[] MADVOC_COMPONENT_ANNOTATION = ClassScanner.bytecodeSignatureOfType(MadvocComponent.class);
protected List<Runnable> webappConfigurations = new ArrayList<>();
protected List<Runnable> madvocComponents = new ArrayList<>();
public AutomagicMadvocConfigurator() {
actionClassSuffix = "Action";
classScanner = new ClassScanner();
classScanner.detectEntriesMode(true);
classScanner.scanDefaultClasspath();
registerAsConsumer(classScanner);
}
public AutomagicMadvocConfigurator(final ClassScanner classScanner) {
actionClassSuffix = "Action";
this.classScanner = classScanner;
registerAsConsumer(classScanner);
}
@Override
public void init() {
final long startTime = System.currentTimeMillis();
try {
log.info("Scanning...");
classScanner.start();
} catch (final Exception ex) {
throw new MadvocException("Scan classpath error", ex);
}
madvocComponents.forEach(Runnable::run);
log.info("Scanning is complete.");
elapsed = System.currentTimeMillis() - startTime;
}
@Override
public void start() {
final long startTime = System.currentTimeMillis();
webappConfigurations.forEach(Runnable::run);
elapsed += (System.currentTimeMillis() - startTime);
log.info(createInfoMessage());
}
protected String createInfoMessage() {
return "Madvoc configured in " + elapsed + " ms. Total actions: " + actionsManager.getActionsCount();
}
/**
* Parses class name that matches madvoc-related names.
*/
protected void registerAsConsumer(final ClassScanner classScanner) {
classScanner.registerEntryConsumer(classPathEntry -> {
final String entryName = classPathEntry.name();
if (entryName.endsWith(actionClassSuffix)) {
try {
acceptActionClass(classPathEntry.loadClass());
} catch (final Exception ex) {
log.debug("Invalid Madvoc action, ignoring: " + entryName);
}
}
else if (classPathEntry.isTypeSignatureInUse(MADVOC_COMPONENT_ANNOTATION)) {
try {
acceptMadvocComponentClass(classPathEntry.loadClass());
} catch (final Exception ex) {
log.debug("Invalid Madvoc component ignoring: {}" + entryName);
}
}
});
}
// ---------------------------------------------------------------- class check
/**
* Determines if class should be examined for Madvoc annotations.
* Array, anonymous, primitive, interfaces and so on should be
* ignored. Sometimes, checking may fail due to e.g. <code>NoClassDefFoundError</code>;
* we should continue searching anyway.
*/
protected boolean checkClass(final Class clazz) {
try {
if (clazz.isAnonymousClass()) {
return false;
}
if (clazz.isArray() || clazz.isEnum()) {
return false;
}
if (clazz.isInterface()) {
return false;
}
if (clazz.isLocalClass()) {
return false;
}
if ((clazz.isMemberClass() ^ Modifier.isStatic(clazz.getModifiers()))) {
return false;
}
if (clazz.isPrimitive()) {
return false;
}
final int modifiers = clazz.getModifiers();
if (Modifier.isAbstract(modifiers)) {
return false;
}
return true;
} catch (final Throwable ignore) {
return false;
}
}
// ---------------------------------------------------------------- handlers
/**
* Builds action runtime configuration on founded action class.
* Action classes are annotated with {@link jodd.madvoc.meta.MadvocAction} annotation.
*/
@SuppressWarnings("NonConstantStringShouldBeStringBuffer")
protected void acceptActionClass(final Class<?> actionClass) {
if (actionClass == null) {
return;
}
if (!checkClass(actionClass)) {
return;
}
if (actionClass.getAnnotation(MadvocAction.class) == null) {
return;
}
final ClassDescriptor cd = ClassIntrospector.get().lookup(actionClass);
final MethodDescriptor[] allMethodDescriptors = cd.getAllMethodDescriptors();
for (final MethodDescriptor methodDescriptor : allMethodDescriptors) {
if (!methodDescriptor.isPublic()) {
continue;
}
// just public methods
final Method method = methodDescriptor.getMethod();
final boolean hasAnnotation = actionConfigManager.hasActionAnnotationOn(method);
if (!hasAnnotation) {
continue;
}
webappConfigurations.add(() -> actionsManager.registerAction(actionClass, method, null));
}
}
/**
* Registers new Madvoc component.
*/
protected void acceptMadvocComponentClass(final Class componentClass) {
if (componentClass == null) {
return;
}
if (!checkClass(componentClass)) {
return;
}
madvocComponents.add(() -> madvocContainer.registerComponent(componentClass));
}
}
| |
/*
* Copyright 2000-2016 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.ui.picker;
import com.intellij.openapi.util.SystemInfo;
import com.intellij.openapi.util.registry.Registry;
import com.intellij.util.Alarm;
import com.intellij.util.ui.UIUtil;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import java.awt.*;
import java.awt.event.KeyAdapter;
import java.awt.event.KeyEvent;
import java.awt.event.MouseAdapter;
import java.awt.event.MouseEvent;
import java.awt.image.BufferedImage;
public abstract class ColorPipetteBase implements ColorPipette {
private final Alarm myColorListenersNotifier = new Alarm(Alarm.ThreadToUse.SWING_THREAD, this);
protected final JComponent myParent;
private final ColorListener myColorListener;
protected final Robot myRobot;
private JDialog myPickerFrame;
private Color myCurrentColor;
private Color myInitialColor;
public ColorPipetteBase(@NotNull JComponent parent, @NotNull ColorListener colorListener) {
myParent = parent;
myColorListener = colorListener;
myRobot = createRobot();
}
public static boolean canUseMacPipette() {
return SystemInfo.isMac && Registry.is("ide.mac.new.color.picker");
}
@Override
public void pickAndClose() {
PointerInfo pointerInfo = MouseInfo.getPointerInfo();
Color pixelColor = getPixelColor(pointerInfo.getLocation());
cancelPipette();
notifyListener(pixelColor, 0);
setInitialColor(pixelColor);
}
protected Color getPixelColor(Point location) {
if (SystemInfo.isMac) {
BufferedImage image = MacColorPipette.captureScreen(myPickerFrame, new Rectangle(location.x, location.y, 1, 1));
if (image != null) {
//noinspection UseJBColor
return new Color(image.getRGB(0, 0));
}
}
return myRobot.getPixelColor(location.x, location.y);
}
@Nullable
protected Color getInitialColor() {
return myInitialColor;
}
@Override
public void setInitialColor(@Nullable Color initialColor) {
myInitialColor = initialColor;
setColor(initialColor);
}
protected void setColor(@Nullable Color color) {
myCurrentColor = color;
}
@Nullable
@Override
public Color getColor() {
return myCurrentColor;
}
@Override
public Dialog show() {
Dialog picker = getOrCreatePickerDialog();
updateLocation();
picker.setVisible(true);
return picker;
}
@Nullable
protected Point updateLocation() {
PointerInfo pointerInfo = MouseInfo.getPointerInfo();
if (pointerInfo == null) return null;
Point mouseLocation = pointerInfo.getLocation();
Dialog pickerDialog = getPickerDialog();
if (pickerDialog != null && mouseLocation != null) {
pickerDialog.setLocation(mouseLocation.x - pickerDialog.getWidth() / 2, mouseLocation.y - pickerDialog.getHeight() / 2);
}
return mouseLocation;
}
@Nullable
protected Dialog getPickerDialog() {
return myPickerFrame;
}
@NotNull
protected Dialog getOrCreatePickerDialog() {
if (myPickerFrame == null) {
Window owner = SwingUtilities.getWindowAncestor(myParent);
if (owner instanceof Dialog) {
myPickerFrame = new JDialog((Dialog)owner);
}
else if (owner instanceof Frame) {
myPickerFrame = new JDialog((Frame)owner);
}
else {
myPickerFrame = new JDialog(new JFrame());
}
myPickerFrame.setTitle("intellijPickerDialog");
}
myPickerFrame.addMouseListener(new MouseAdapter() {
@Override
public void mousePressed(MouseEvent e) {
e.consume();
pickAndClose();
}
@Override
public void mouseClicked(MouseEvent e) {
e.consume();
}
});
myPickerFrame.addKeyListener(new KeyAdapter() {
public void keyPressed(KeyEvent e) {
switch (e.getKeyCode()) {
case KeyEvent.VK_ESCAPE:
cancelPipette();
break;
case KeyEvent.VK_ENTER:
pickAndClose();
break;
}
}
});
myPickerFrame.setUndecorated(true);
myPickerFrame.setAlwaysOnTop(!SystemInfo.isJavaVersionAtLeast("1.8.0") || canUseMacPipette());
JRootPane rootPane = myPickerFrame.getRootPane();
rootPane.putClientProperty("Window.shadow", Boolean.FALSE);
return myPickerFrame;
}
protected void notifyListener(@NotNull final Color c, int delayMillis) {
if (!myColorListenersNotifier.isDisposed()) {
myColorListenersNotifier.cancelAllRequests();
myColorListenersNotifier.addRequest(() -> myColorListener.colorChanged(c, this), delayMillis);
}
}
@Override
public boolean imageUpdate(Image image, int i, int i1, int i2, int i3, int i4) {
return false;
}
@Override
public void cancelPipette() {
Dialog pickerDialog = getPickerDialog();
if (pickerDialog != null) {
pickerDialog.setVisible(false);
}
Color initialColor = getInitialColor();
if (initialColor != null) {
notifyListener(initialColor, 0);
}
}
@Override
public void dispose() {
UIUtil.dispose(myPickerFrame);
myPickerFrame = null;
setInitialColor(null);
setColor(null);
}
@Nullable
private static Robot createRobot() {
try {
return new Robot();
}
catch (AWTException e) {
return null;
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.druid.java.util.emitter.core;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.collect.ImmutableMap;
import org.apache.druid.java.util.common.StringUtils;
import org.apache.druid.java.util.common.lifecycle.Lifecycle;
import org.apache.druid.java.util.emitter.service.UnitEvent;
import org.asynchttpclient.ListenableFuture;
import org.asynchttpclient.Request;
import org.asynchttpclient.Response;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import java.nio.charset.StandardCharsets;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Properties;
public class ParametrizedUriEmitterTest
{
private static final ObjectMapper JSON_MAPPER = new ObjectMapper();
private MockHttpClient httpClient;
private Lifecycle lifecycle;
@Before
public void setUp()
{
httpClient = new MockHttpClient();
}
@After
public void tearDown()
{
if (lifecycle != null) {
lifecycle.stop();
}
}
private Emitter parametrizedEmmiter(String uriPattern) throws Exception
{
final Properties props = new Properties();
props.setProperty("org.apache.druid.java.util.emitter.type", "parametrized");
props.setProperty("org.apache.druid.java.util.emitter.recipientBaseUrlPattern", uriPattern);
props.setProperty(
"org.apache.druid.java.util.emitter.httpEmitting.flushTimeOut",
String.valueOf(BaseHttpEmittingConfig.TEST_FLUSH_TIMEOUT_MILLIS)
);
lifecycle = new Lifecycle();
Emitter emitter = Emitters.create(props, httpClient, lifecycle);
Assert.assertEquals(ParametrizedUriEmitter.class, emitter.getClass());
lifecycle.start();
return emitter;
}
@Test
public void testParametrizedEmitterCreated() throws Exception
{
parametrizedEmmiter("http://example.com/");
}
@Test
public void testEmitterWithFeedUriExtractor() throws Exception
{
Emitter emitter = parametrizedEmmiter("http://example.com/{feed}");
final List<UnitEvent> events = Arrays.asList(
new UnitEvent("test", 1),
new UnitEvent("test", 2)
);
httpClient.setGoHandler(
new GoHandler()
{
@Override
public ListenableFuture<Response> go(Request request) throws JsonProcessingException
{
Assert.assertEquals("http://example.com/test", request.getUrl());
Assert.assertEquals(
StringUtils.format(
"[%s,%s]\n",
JSON_MAPPER.writeValueAsString(events.get(0)),
JSON_MAPPER.writeValueAsString(events.get(1))
),
StandardCharsets.UTF_8.decode(request.getByteBufferData().slice()).toString()
);
return GoHandlers.immediateFuture(EmitterTest.okResponse());
}
}.times(1)
);
for (UnitEvent event : events) {
emitter.emit(event);
}
emitter.flush();
Assert.assertTrue(httpClient.succeeded());
}
@Test
public void testEmitterWithMultipleFeeds() throws Exception
{
Emitter emitter = parametrizedEmmiter("http://example.com/{feed}");
final List<UnitEvent> events = Arrays.asList(
new UnitEvent("test1", 1),
new UnitEvent("test2", 2)
);
final Map<String, String> results = new HashMap<>();
httpClient.setGoHandler(
new GoHandler()
{
@Override
protected ListenableFuture<Response> go(Request request)
{
results.put(
request.getUrl(),
StandardCharsets.UTF_8.decode(request.getByteBufferData().slice()).toString()
);
return GoHandlers.immediateFuture(EmitterTest.okResponse());
}
}.times(2)
);
for (UnitEvent event : events) {
emitter.emit(event);
}
emitter.flush();
Assert.assertTrue(httpClient.succeeded());
Map<String, String> expected = ImmutableMap.of(
"http://example.com/test1", StringUtils.format("[%s]\n", JSON_MAPPER.writeValueAsString(events.get(0))),
"http://example.com/test2", StringUtils.format("[%s]\n", JSON_MAPPER.writeValueAsString(events.get(1)))
);
Assert.assertEquals(expected, results);
}
@Test
public void testEmitterWithParametrizedUriExtractor() throws Exception
{
Emitter emitter = parametrizedEmmiter("http://example.com/{key1}/{key2}");
final List<UnitEvent> events = Arrays.asList(
new UnitEvent("test", 1, ImmutableMap.of("key1", "val1", "key2", "val2")),
new UnitEvent("test", 2, ImmutableMap.of("key1", "val1", "key2", "val2"))
);
httpClient.setGoHandler(
new GoHandler()
{
@Override
protected ListenableFuture<Response> go(Request request) throws JsonProcessingException
{
Assert.assertEquals("http://example.com/val1/val2", request.getUrl());
Assert.assertEquals(
StringUtils.format(
"[%s,%s]\n",
JSON_MAPPER.writeValueAsString(events.get(0)),
JSON_MAPPER.writeValueAsString(events.get(1))
),
StandardCharsets.UTF_8.decode(request.getByteBufferData().slice()).toString()
);
return GoHandlers.immediateFuture(EmitterTest.okResponse());
}
}.times(1)
);
for (UnitEvent event : events) {
emitter.emit(event);
}
emitter.flush();
Assert.assertTrue(httpClient.succeeded());
}
@Test
public void failEmitMalformedEvent() throws Exception
{
Emitter emitter = parametrizedEmmiter("http://example.com/{keyNotSetInEvents}");
Event event = new UnitEvent("test", 1);
httpClient.setGoHandler(GoHandlers.failingHandler());
try {
emitter.emit(event);
emitter.flush();
}
catch (IllegalArgumentException e) {
Assert.assertEquals(
e.getMessage(),
StringUtils.format(
"ParametrizedUriExtractor with pattern http://example.com/{keyNotSetInEvents} requires keyNotSetInEvents to be set in event, but found %s",
event.toMap()
)
);
}
}
}
| |
/*******************************************************************************
*
* Pentaho Data Integration
*
* Copyright (C) 2002-2012 by Pentaho : http://www.pentaho.com
*
*******************************************************************************
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
******************************************************************************/
package org.pentaho.di.trans.steps.changefileencoding;
import java.util.List;
import java.util.Map;
import org.pentaho.di.core.CheckResult;
import org.pentaho.di.core.CheckResultInterface;
import org.pentaho.di.core.Const;
import org.pentaho.di.core.Counter;
import org.pentaho.di.core.database.DatabaseMeta;
import org.pentaho.di.core.exception.KettleException;
import org.pentaho.di.core.exception.KettleXMLException;
import org.pentaho.di.core.row.RowMetaInterface;
import org.pentaho.di.core.xml.XMLHandler;
import org.pentaho.di.i18n.BaseMessages;
import org.pentaho.di.repository.ObjectId;
import org.pentaho.di.repository.Repository;
import org.pentaho.di.shared.SharedObjectInterface;
import org.pentaho.di.trans.Trans;
import org.pentaho.di.trans.TransMeta;
import org.pentaho.di.trans.step.BaseStepMeta;
import org.pentaho.di.trans.step.StepDataInterface;
import org.pentaho.di.trans.step.StepInterface;
import org.pentaho.di.trans.step.StepMeta;
import org.pentaho.di.trans.step.StepMetaInterface;
import org.w3c.dom.Node;
public class ChangeFileEncodingMeta extends BaseStepMeta implements StepMetaInterface
{
private static Class<?> PKG = ChangeFileEncoding.class; // for i18n purposes, needed by Translator2!! $NON-NLS-1$
private boolean addsourceresultfilenames;
private boolean addtargetresultfilenames;
/** dynamic filename */
private String filenamefield;
private String targetfilenamefield;
private String targetencoding;
private String sourceencoding;
private boolean createparentfolder;
public ChangeFileEncodingMeta()
{
super(); // allocate BaseStepMeta
}
/**
* @return Returns the filenamefield.
*/
public String getDynamicFilenameField()
{
return filenamefield;
}
/**
* @param filenamefield The filenamefield to set.
*/
public void setDynamicFilenameField(String filenamefield)
{
this.filenamefield = filenamefield;
}
/**
* @return Returns the targetfilenamefield.
*/
public String getTargetFilenameField()
{
return targetfilenamefield;
}
/**
* @param targetfilenamefield The targetfilenamefield to set.
*/
public void setTargetFilenameField(String targetfilenamefield)
{
this.targetfilenamefield = targetfilenamefield;
}
/**
* @return Returns the sourceencoding.
*/
public String getSourceEncoding()
{
return sourceencoding;
}
/**
* @param encoding The sourceencoding to set.
*/
public void setSourceEncoding(String encoding)
{
this.sourceencoding = encoding;
}
/**
* @return Returns the targetencoding.
*/
public String getTargetEncoding()
{
return targetencoding;
}
/**
* @param encoding The targetencoding to set.
*/
public void setTargetEncoding(String encoding)
{
this.targetencoding = encoding;
}
public boolean addSourceResultFilenames()
{
return addsourceresultfilenames;
}
public void setaddSourceResultFilenames(boolean addresultfilenames)
{
this.addsourceresultfilenames=addresultfilenames;
}
public boolean addTargetResultFilenames()
{
return addtargetresultfilenames;
}
public void setaddTargetResultFilenames(boolean addresultfilenames)
{
this.addtargetresultfilenames=addresultfilenames;
}
public boolean isCreateParentFolder()
{
return createparentfolder;
}
public void setCreateParentFolder(boolean createparentfolder)
{
this.createparentfolder=createparentfolder;
}
public void loadXML(Node stepnode, List<DatabaseMeta> databases, Map<String, Counter> counters)
throws KettleXMLException
{
readData(stepnode, databases);
}
public Object clone()
{
ChangeFileEncodingMeta retval = (ChangeFileEncodingMeta) super.clone();
return retval;
}
public void setDefault()
{
addsourceresultfilenames=false;
addtargetresultfilenames=false;
targetfilenamefield=null;
sourceencoding= System.getProperty("file.encoding");;
targetencoding=null;
createparentfolder=false;
}
public String getXML()
{
StringBuffer retval = new StringBuffer();
retval.append(" " + XMLHandler.addTagValue("filenamefield", filenamefield)); //$NON-NLS-1$ //$NON-NLS-2$
retval.append(" " + XMLHandler.addTagValue("targetfilenamefield", targetfilenamefield));
retval.append(" " + XMLHandler.addTagValue("sourceencoding", sourceencoding));
retval.append(" " + XMLHandler.addTagValue("targetencoding", targetencoding));
retval.append(" ").append(XMLHandler.addTagValue("addsourceresultfilenames", addsourceresultfilenames));
retval.append(" ").append(XMLHandler.addTagValue("addtargetresultfilenames", addtargetresultfilenames));
retval.append(" ").append(XMLHandler.addTagValue("createparentfolder", createparentfolder));
return retval.toString();
}
private void readData(Node stepnode, List<? extends SharedObjectInterface> databases)
throws KettleXMLException
{
try
{
filenamefield = XMLHandler.getTagValue(stepnode, "filenamefield"); //$NON-NLS-1$
targetfilenamefield = XMLHandler.getTagValue(stepnode, "targetfilenamefield");
sourceencoding = XMLHandler.getTagValue(stepnode, "sourceencoding");
targetencoding = XMLHandler.getTagValue(stepnode, "targetencoding");
addsourceresultfilenames = "Y".equalsIgnoreCase(XMLHandler.getTagValue(stepnode, "addsourceresultfilenames"));
addtargetresultfilenames = "Y".equalsIgnoreCase(XMLHandler.getTagValue(stepnode, "addtargetresultfilenames"));
createparentfolder = "Y".equalsIgnoreCase(XMLHandler.getTagValue(stepnode, "createparentfolder"));
}
catch (Exception e)
{
throw new KettleXMLException(BaseMessages.getString(PKG, "ChangeFileEncodingMeta.Exception.UnableToReadStepInfo"), e); //$NON-NLS-1$
}
}
public void readRep(Repository rep, ObjectId id_step, List<DatabaseMeta> databases, Map<String, Counter> counters)
throws KettleException
{
try
{
filenamefield = rep.getStepAttributeString(id_step, "filenamefield"); //$NON-NLS-1$
targetfilenamefield = rep.getStepAttributeString(id_step, "targetfilenamefield");
sourceencoding = rep.getStepAttributeString(id_step, "sourceencoding");
targetencoding = rep.getStepAttributeString(id_step, "targetencoding");
addsourceresultfilenames = rep.getStepAttributeBoolean(id_step, "addsourceresultfilenames");
addtargetresultfilenames = rep.getStepAttributeBoolean(id_step, "addtargetresultfilenames");
createparentfolder = rep.getStepAttributeBoolean(id_step, "createparentfolder");
}
catch (Exception e)
{
throw new KettleException(BaseMessages.getString(PKG, "ChangeFileEncodingMeta.Exception.UnexpectedErrorReadingStepInfo"), e); //$NON-NLS-1$
}
}
public void saveRep(Repository rep, ObjectId id_transformation, ObjectId id_step) throws KettleException
{
try
{
rep.saveStepAttribute(id_transformation, id_step, "filenamefield", filenamefield); //$NON-NLS-1$
rep.saveStepAttribute(id_transformation, id_step, "targetfilenamefield", targetfilenamefield);
rep.saveStepAttribute(id_transformation, id_step, "sourceencoding", sourceencoding);
rep.saveStepAttribute(id_transformation, id_step, "targetencoding", targetencoding);
rep.saveStepAttribute(id_transformation, id_step, "addsourceresultfilenames", addsourceresultfilenames);
rep.saveStepAttribute(id_transformation, id_step, "addtargetresultfilenames", addtargetresultfilenames);
rep.saveStepAttribute(id_transformation, id_step, "createparentfolder", createparentfolder);
}
catch (Exception e)
{
throw new KettleException(BaseMessages.getString(PKG, "ChangeFileEncodingMeta.Exception.UnableToSaveStepInfo") + id_step, e); //$NON-NLS-1$
}
}
public void check(List<CheckResultInterface> remarks, TransMeta transMeta, StepMeta stepMeta, RowMetaInterface prev, String input[], String output[], RowMetaInterface info)
{
CheckResult cr;
String error_message = ""; //$NON-NLS-1$
if (Const.isEmpty(filenamefield))
{
error_message = BaseMessages.getString(PKG, "ChangeFileEncodingMeta.CheckResult.FileFieldMissing"); //$NON-NLS-1$
cr = new CheckResult(CheckResult.TYPE_RESULT_ERROR, error_message, stepMeta);
remarks.add(cr);
}
else
{
error_message = BaseMessages.getString(PKG, "ChangeFileEncodingMeta.CheckResult.FileFieldOK"); //$NON-NLS-1$
cr = new CheckResult(CheckResult.TYPE_RESULT_OK, error_message, stepMeta);
remarks.add(cr);
}
if (Const.isEmpty(targetfilenamefield))
{
error_message = BaseMessages.getString(PKG, "ChangeFileEncodingMeta.CheckResult.TargetFileFieldMissing"); //$NON-NLS-1$
cr = new CheckResult(CheckResult.TYPE_RESULT_ERROR, error_message, stepMeta);
remarks.add(cr);
}
else
{
error_message = BaseMessages.getString(PKG, "ChangeFileEncodingMeta.CheckResult.TargetFileFieldOK"); //$NON-NLS-1$
cr = new CheckResult(CheckResult.TYPE_RESULT_OK, error_message, stepMeta);
remarks.add(cr);
}
String realSourceEncoding=transMeta.environmentSubstitute(getSourceEncoding());
if (Const.isEmpty(realSourceEncoding))
{
error_message = BaseMessages.getString(PKG, "ChangeFileEncodingMeta.CheckResult.SourceEncodingMissing"); //$NON-NLS-1$
cr = new CheckResult(CheckResult.TYPE_RESULT_ERROR, error_message, stepMeta);
remarks.add(cr);
}
else
{
error_message = BaseMessages.getString(PKG, "ChangeFileEncodingMeta.CheckResult.SourceEncodingOK"); //$NON-NLS-1$
cr = new CheckResult(CheckResult.TYPE_RESULT_OK, error_message, stepMeta);
remarks.add(cr);
}
String realTargetEncoding=transMeta.environmentSubstitute(getTargetEncoding());
if (Const.isEmpty(realTargetEncoding))
{
error_message = BaseMessages.getString(PKG, "ChangeFileEncodingMeta.CheckResult.TargetEncodingMissing"); //$NON-NLS-1$
cr = new CheckResult(CheckResult.TYPE_RESULT_ERROR, error_message, stepMeta);
remarks.add(cr);
}
else
{
error_message = BaseMessages.getString(PKG, "ChangeFileEncodingMeta.CheckResult.TargetEncodingOK"); //$NON-NLS-1$
cr = new CheckResult(CheckResult.TYPE_RESULT_OK, error_message, stepMeta);
remarks.add(cr);
}
// See if we have input streams leading to this step!
if (input.length > 0)
{
cr = new CheckResult(CheckResult.TYPE_RESULT_OK, BaseMessages.getString(PKG, "ChangeFileEncodingMeta.CheckResult.ReceivingInfoFromOtherSteps"), stepMeta); //$NON-NLS-1$
remarks.add(cr);
}
else
{
cr = new CheckResult(CheckResult.TYPE_RESULT_ERROR, BaseMessages.getString(PKG, "ChangeFileEncodingMeta.CheckResult.NoInpuReceived"), stepMeta); //$NON-NLS-1$
remarks.add(cr);
}
}
public StepInterface getStep(StepMeta stepMeta, StepDataInterface stepDataInterface, int cnr, TransMeta transMeta, Trans trans)
{
return new ChangeFileEncoding(stepMeta, stepDataInterface, cnr, transMeta, trans);
}
public StepDataInterface getStepData()
{
return new ChangeFileEncodingData();
}
public boolean supportsErrorHandling()
{
return true;
}
}
| |
/*
* Copyright 2000-2016 Vaadin Ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package com.vaadin.tests.tb3;
import java.io.IOException;
import java.io.InputStream;
import java.io.StringWriter;
import java.lang.reflect.Field;
import java.net.URL;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.logging.Level;
import org.apache.commons.io.IOUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.http.HttpHost;
import org.apache.http.HttpResponse;
import org.apache.http.impl.client.DefaultHttpClient;
import org.apache.http.message.BasicHttpEntityEnclosingRequest;
import org.junit.Assert;
import org.junit.Rule;
import org.junit.rules.TestName;
import org.junit.runner.RunWith;
import org.openqa.selenium.By;
import org.openqa.selenium.Dimension;
import org.openqa.selenium.JavascriptExecutor;
import org.openqa.selenium.NoSuchElementException;
import org.openqa.selenium.WebDriver;
import org.openqa.selenium.WebElement;
import org.openqa.selenium.interactions.Actions;
import org.openqa.selenium.interactions.HasInputDevices;
import org.openqa.selenium.interactions.Keyboard;
import org.openqa.selenium.interactions.Mouse;
import org.openqa.selenium.interactions.internal.Coordinates;
import org.openqa.selenium.internal.Locatable;
import org.openqa.selenium.internal.WrapsElement;
import org.openqa.selenium.remote.DesiredCapabilities;
import org.openqa.selenium.remote.HttpCommandExecutor;
import org.openqa.selenium.remote.RemoteWebDriver;
import org.openqa.selenium.support.ui.ExpectedCondition;
import org.openqa.selenium.support.ui.ExpectedConditions;
import org.openqa.selenium.support.ui.WebDriverWait;
import com.vaadin.server.LegacyApplication;
import com.vaadin.server.UIProvider;
import com.vaadin.testbench.TestBenchDriverProxy;
import com.vaadin.testbench.TestBenchElement;
import com.vaadin.testbench.annotations.BrowserConfiguration;
import com.vaadin.testbench.elements.CheckBoxElement;
import com.vaadin.testbench.elements.LabelElement;
import com.vaadin.testbench.elements.TableElement;
import com.vaadin.testbench.elements.VerticalLayoutElement;
import com.vaadin.testbench.parallel.Browser;
import com.vaadin.testbench.parallel.BrowserUtil;
import com.vaadin.testbench.parallel.ParallelTest;
import com.vaadin.ui.UI;
import elemental.json.JsonObject;
import elemental.json.impl.JsonUtil;
/**
* Base class for TestBench 3+ tests. All TB3+ tests in the project should
* extend this class.
*
* Provides:
* <ul>
* <li>Helpers for browser selection</li>
* <li>Hub connection setup and teardown</li>
* <li>Automatic generation of URL for a given test on the development server
* using {@link #getUIClass()} or by automatically finding an enclosing UI class
* and based on requested features, e.g. {@link #isDebug()},
* {@link #isPush()}</li>
* <li>Generic helpers for creating TB3+ tests</li>
* </ul>
*
* @author Vaadin Ltd
*/
@RunWith(TB3Runner.class)
public abstract class AbstractTB3Test extends ParallelTest {
@Rule
public TestName testName = new TestName();
@Rule
public RetryOnFail retry = new RetryOnFail();
/**
* Height of the screenshots we want to capture
*/
private static final int SCREENSHOT_HEIGHT = 850;
/**
* Width of the screenshots we want to capture
*/
private static final int SCREENSHOT_WIDTH = 1500;
/**
* Timeout used by the TB grid
*/
private static final int BROWSER_TIMEOUT_IN_MS = 30 * 1000;
protected static DesiredCapabilities PHANTOMJS2() {
DesiredCapabilities phantomjs2 = new VaadinBrowserFactory()
.create(Browser.PHANTOMJS, "2");
// Hack for the test cluster
phantomjs2.setCapability("phantomjs.binary.path",
"/usr/bin/phantomjs2");
return phantomjs2;
}
private boolean debug = false;
private boolean push = false;
static {
com.vaadin.testbench.Parameters
.setScreenshotComparisonCursorDetection(true);
}
/**
* Connect to the hub using a remote web driver, set the canvas size and
* opens the initial URL as specified by {@link #getTestUrl()}
*
* @throws Exception
*/
@Override
public void setup() throws Exception {
super.setup();
int w = SCREENSHOT_WIDTH;
int h = SCREENSHOT_HEIGHT;
try {
testBench().resizeViewPortTo(w, h);
} catch (UnsupportedOperationException e) {
// Opera does not support this...
}
}
/**
* Method for closing the tested application.
*/
protected void closeApplication() {
if (driver != null) {
try {
openTestURL("closeApplication");
} catch (Exception e) {
e.printStackTrace();
}
}
}
protected WebElement getTooltipErrorElement() {
WebElement tooltip = getDriver()
.findElement(com.vaadin.testbench.By.className("v-tooltip"));
return tooltip.findElement(By.className("v-errormessage"));
}
protected WebElement getTooltipElement() {
return getDriver().findElement(
com.vaadin.testbench.By.className("v-tooltip-text"));
}
protected Coordinates getCoordinates(TestBenchElement element) {
return ((Locatable) element.getWrappedElement()).getCoordinates();
}
private boolean hasDebugMessage(String message) {
return getDebugMessage(message) != null;
}
private WebElement getDebugMessage(String message) {
return driver.findElement(By.xpath(String.format(
"//span[@class='v-debugwindow-message' and text()='%s']",
message)));
}
protected void waitForDebugMessage(final String expectedMessage) {
waitForDebugMessage(expectedMessage, 30);
}
protected void waitForDebugMessage(final String expectedMessage,
int timeout) {
waitUntil(new ExpectedCondition<Boolean>() {
@Override
public Boolean apply(WebDriver input) {
return hasDebugMessage(expectedMessage);
}
}, timeout);
}
protected void clearDebugMessages() {
driver.findElement(By
.xpath("//button[@class='v-debugwindow-button' and @title='Clear log']"))
.click();
}
protected void waitUntilRowIsVisible(final TableElement table,
final int row) {
waitUntil(new ExpectedCondition<Object>() {
@Override
public Object apply(WebDriver input) {
try {
return table.getCell(row, 0) != null;
} catch (NoSuchElementException e) {
return false;
}
}
});
}
protected void scrollTable(TableElement table, int rows, int rowToWait) {
testBenchElement(table.findElement(By.className("v-scrollable")))
.scroll(rows * 30);
waitUntilRowIsVisible(table, rowToWait);
}
/**
* Opens the given test (defined by {@link #getTestUrl()}, optionally with
* debug window and/or push (depending on {@link #isDebug()} and
* {@link #isPush()}.
*/
protected void openTestURL() {
openTestURL(new String[0]);
}
/**
* Opens the given test (defined by {@link #getTestUrl()}, optionally with
* debug window and/or push (depending on {@link #isDebug()} and
* {@link #isPush()}.
*/
protected void openTestURL(String... parameters) {
openTestURL(getUIClass(), parameters);
}
/**
* Opens the given test (defined by {@link #getTestUrl()}, optionally with
* debug window and/or push (depending on {@link #isDebug()} and
* {@link #isPush()}.
*/
protected void openTestURL(Class<?> uiClass, String... parameters) {
openTestURL(uiClass, new HashSet<>(Arrays.asList(parameters)));
}
private void openTestURL(Class<?> uiClass, Set<String> parameters) {
String url = getTestURL(uiClass);
if (isDebug()) {
parameters.add("debug");
}
if (LegacyApplication.class.isAssignableFrom(uiClass)) {
parameters.add("restartApplication");
}
if (parameters.size() > 0) {
url += "?" + StringUtils.join(parameters, "&");
}
driver.get(url);
}
/**
* Returns the full URL to be used for the test
*
* @return the full URL for the test
*/
protected String getTestUrl() {
return StringUtils.strip(getBaseURL(), "/") + getDeploymentPath();
}
/**
* Returns the full URL to be used for the test for the provided UI class.
*
* @return the full URL for the test
*/
protected String getTestURL(Class<?> uiClass) {
return StringUtils.strip(getBaseURL(), "/")
+ getDeploymentPath(uiClass);
}
/**
* Used to determine what URL to initially open for the test
*
* @return the host name of development server
*/
protected abstract String getDeploymentHostname();
/**
* Used to determine what port the test is running on
*
* @return The port teh test is running on, by default 8888
*/
protected abstract int getDeploymentPort();
/**
* Produces a collection of browsers to run the test on. This method is
* executed by the test runner when determining how many test methods to
* invoke and with what parameters. For each returned value a test method is
* ran and before running that,
* {@link #setDesiredCapabilities(DesiredCapabilities)} is invoked with the
* value returned by this method.
*
* This method is not static to allow overriding it in sub classes. By
* default runs the test only on Firefox
*
* @return The browsers to run the test on
*/
@BrowserConfiguration
public List<DesiredCapabilities> getBrowsersToTest() {
return Collections
.singletonList(Browser.FIREFOX.getDesiredCapabilities());
}
/**
* Finds an element based on the part of a TB2 style locator following the
* :: (e.g. vaadin=runLabelModes::PID_Scheckboxaction-Enabled/domChild[0] ->
* PID_Scheckboxaction-Enabled/domChild[0]).
*
* @param vaadinLocator
* The part following :: of the vaadin locator string
* @return
*/
protected WebElement vaadinElement(String vaadinLocator) {
return driver.findElement(vaadinLocator(vaadinLocator));
}
/**
* Uses JavaScript to determine the currently focused element.
*
* @return Focused element or null
*/
protected WebElement getFocusedElement() {
Object focusedElement = executeScript("return document.activeElement");
if (null != focusedElement) {
return (WebElement) focusedElement;
} else {
return null;
}
}
/**
* Executes the given Javascript
*
* @param script
* the script to execute
* @return whatever
* {@link org.openqa.selenium.JavascriptExecutor#executeScript(String, Object...)}
* returns
*/
protected Object executeScript(String script, Object... args) {
return ((JavascriptExecutor) getDriver()).executeScript(script, args);
}
/**
* Find a Vaadin element based on its id given using Component.setId
*
* @param id
* The id to locate
* @return
*/
public WebElement vaadinElementById(String id) {
return driver.findElement(By.id(id));
}
/**
* Finds a {@link By} locator based on the part of a TB2 style locator
* following the :: (e.g.
* vaadin=runLabelModes::PID_Scheckboxaction-Enabled/domChild[0] ->
* PID_Scheckboxaction-Enabled/domChild[0]).
*
* @param vaadinLocator
* The part following :: of the vaadin locator string
* @return
*/
public org.openqa.selenium.By vaadinLocator(String vaadinLocator) {
String base = getApplicationId(getDeploymentPath());
base += "::";
return com.vaadin.testbench.By.vaadin(base + vaadinLocator);
}
/**
* Constructs a {@link By} locator for the id given using Component.setId
*
* @param id
* The id to locate
* @return a locator for the given id
*/
public By vaadinLocatorById(String id) {
return vaadinLocator("PID_S" + id);
}
/**
* Waits up to 10s for the given condition to become true. Use e.g. as
* {@link #waitUntil(ExpectedConditions.textToBePresentInElement(by, text))}
*
* @param condition
* the condition to wait for to become true
*/
protected <T> void waitUntil(ExpectedCondition<T> condition) {
waitUntil(condition, 10);
}
/**
* Waits the given number of seconds for the given condition to become true.
* Use e.g. as
* {@link #waitUntil(ExpectedConditions.textToBePresentInElement(by, text))}
*
* @param condition
* the condition to wait for to become true
*/
protected <T> void waitUntil(ExpectedCondition<T> condition,
long timeoutInSeconds) {
new WebDriverWait(driver, timeoutInSeconds).until(condition);
}
/**
* Waits up to 10s for the given condition to become false. Use e.g. as
* {@link #waitUntilNot(ExpectedConditions.textToBePresentInElement(by,
* text))}
*
* @param condition
* the condition to wait for to become false
*/
protected <T> void waitUntilNot(ExpectedCondition<T> condition) {
waitUntilNot(condition, 10);
}
/**
* Waits the given number of seconds for the given condition to become
* false. Use e.g. as
* {@link #waitUntilNot(ExpectedConditions.textToBePresentInElement(by,
* text))}
*
* @param condition
* the condition to wait for to become false
*/
protected <T> void waitUntilNot(ExpectedCondition<T> condition,
long timeoutInSeconds) {
waitUntil(ExpectedConditions.not(condition), timeoutInSeconds);
}
protected void waitForElementPresent(final By by) {
waitUntil(ExpectedConditions.presenceOfElementLocated(by));
}
protected void waitForElementNotPresent(final By by) {
waitUntil(new ExpectedCondition<Boolean>() {
@Override
public Boolean apply(WebDriver input) {
return input.findElements(by).isEmpty();
}
});
}
protected void waitForElementVisible(final By by) {
waitUntil(ExpectedConditions.visibilityOfElementLocated(by));
}
/**
* Checks if the given element has the given class name.
*
* Matches only full class names, i.e. has ("foo") does not match
* class="foobar"
*
* @param element
* @param className
* @return
*/
protected boolean hasCssClass(WebElement element, String className) {
String classes = element.getAttribute("class");
if (classes == null || classes.isEmpty()) {
return (className == null || className.isEmpty());
}
for (String cls : classes.split(" ")) {
if (className.equals(cls)) {
return true;
}
}
return false;
}
/**
* For tests extending AbstractTestUIWithLog, returns the element for the
* Nth log row
*
* @param rowNr
* The log row to retrieve
* @return the Nth log row
*/
protected WebElement getLogRowElement(int rowNr) {
return vaadinElementById("Log_row_" + rowNr);
}
/**
* For tests extending AbstractTestUIWithLog, returns the text in the Nth
* log row
*
* @param rowNr
* The log row to retrieve text for
* @return the text in the log row
*/
protected String getLogRow(int rowNr) {
return getLogRowElement(rowNr).getText();
}
/**
* Asserts that {@literal a} is >= {@literal b}
*
* @param message
* The message to include in the {@link AssertionError}
* @param a
* @param b
* @throws AssertionError
* If comparison fails
*/
public static final <T> void assertGreaterOrEqual(String message,
Comparable<T> a, T b) throws AssertionError {
if (a.compareTo(b) >= 0) {
return;
}
throw new AssertionError(decorate(message, a, b));
}
/**
* Asserts that {@literal a} is > {@literal b}
*
* @param message
* The message to include in the {@link AssertionError}
* @param a
* @param b
* @throws AssertionError
* If comparison fails
*/
public static final <T> void assertGreater(String message, Comparable<T> a,
T b) throws AssertionError {
if (a.compareTo(b) > 0) {
return;
}
throw new AssertionError(decorate(message, a, b));
}
/**
* Asserts that {@literal a} is <= {@literal b}
*
* @param message
* The message to include in the {@link AssertionError}
* @param a
* @param b
* @throws AssertionError
* If comparison fails
*/
public static final <T> void assertLessThanOrEqual(String message,
Comparable<T> a, T b) throws AssertionError {
if (a.compareTo(b) <= 0) {
return;
}
throw new AssertionError(decorate(message, a, b));
}
/**
* Asserts that {@literal a} is < {@literal b}
*
* @param message
* The message to include in the {@link AssertionError}
* @param a
* @param b
* @throws AssertionError
* If comparison fails
*/
public static final <T> void assertLessThan(String message, Comparable<T> a,
T b) throws AssertionError {
if (a.compareTo(b) < 0) {
return;
}
throw new AssertionError(decorate(message, a, b));
}
private static <T> String decorate(String message, Comparable<T> a, T b) {
message = message.replace("{0}", a.toString());
message = message.replace("{1}", b.toString());
return message;
}
/**
* Returns the path that should be used for the test. The path contains the
* full path (appended to hostname+port) and must start with a slash.
*
* @param push
* true if "?debug" should be added
* @param debug
* true if /run-push should be used instead of /run
*
* @return The URL path to the UI class to test
*/
protected String getDeploymentPath() {
Class<?> uiClass = getUIClass();
if (uiClass != null) {
return getDeploymentPath(uiClass);
}
throw new IllegalArgumentException("Unable to determine path for "
+ getClass().getCanonicalName());
}
/**
* Returns the UI class the current test is connected to (or in special
* cases UIProvider or LegacyApplication). Uses the enclosing class if the
* test class is a static inner class to a UI class.
*
* Test which are not enclosed by a UI class must implement this method and
* return the UI class they want to test.
*
* Note that this method will update the test name to the enclosing class to
* be compatible with TB2 screenshot naming
*
* @return the UI class the current test is connected to
*/
protected Class<?> getUIClass() {
try {
// Convention: SomeUITest uses the SomeUI UI class
String uiClassName = getClass().getName().replaceFirst("Test$", "");
Class<?> cls = Class.forName(uiClassName);
if (isSupportedRunnerClass(cls)) {
return cls;
}
} catch (Exception e) {
}
throw new RuntimeException(
"Could not determine UI class. Ensure the test is named UIClassTest and is in the same package as the UIClass");
}
/**
* @return true if the given class is supported by ApplicationServletRunner
*/
@SuppressWarnings("deprecation")
private boolean isSupportedRunnerClass(Class<?> cls) {
if (UI.class.isAssignableFrom(cls)) {
return true;
}
if (UIProvider.class.isAssignableFrom(cls)) {
return true;
}
if (LegacyApplication.class.isAssignableFrom(cls)) {
return true;
}
return false;
}
/**
* Returns whether to run the test in debug mode (with the debug console
* open) or not
*
* @return true to run with the debug window open, false by default
*/
protected final boolean isDebug() {
return debug;
}
/**
* Sets whether to run the test in debug mode (with the debug console open)
* or not.
*
* @param debug
* true to open debug window, false otherwise
*/
protected final void setDebug(boolean debug) {
this.debug = debug;
}
/**
* Returns whether to run the test with push enabled (using /run-push) or
* not. Note that push tests can and should typically be created using @Push
* on the UI instead of overriding this method
*
* @return true if /run-push is used, false otherwise
*/
protected final boolean isPush() {
return push;
}
/**
* Sets whether to run the test with push enabled (using /run-push) or not.
* Note that push tests can and should typically be created using @Push on
* the UI instead of overriding this method
*
* @param push
* true to use /run-push in the test, false otherwise
*/
protected final void setPush(boolean push) {
this.push = push;
}
/**
* Returns the path for the given UI class when deployed on the test server.
* The path contains the full path (appended to hostname+port) and must
* start with a slash.
*
* This method takes into account {@link #isPush()} and {@link #isDebug()}
* when the path is generated.
*
* @param uiClass
* @param push
* true if "?debug" should be added
* @param debug
* true if /run-push should be used instead of /run
* @return The path to the given UI class
*/
protected String getDeploymentPath(Class<?> uiClass) {
String runPath = "/run";
if (isPush()) {
runPath = "/run-push";
}
if (UI.class.isAssignableFrom(uiClass)
|| UIProvider.class.isAssignableFrom(uiClass)
|| LegacyApplication.class.isAssignableFrom(uiClass)) {
return runPath + "/" + uiClass.getCanonicalName();
} else {
throw new IllegalArgumentException(
"Unable to determine path for enclosing class "
+ uiClass.getCanonicalName());
}
}
/**
* Used to determine what URL to initially open for the test
*
* @return The base URL for the test. Does not include a trailing slash.
*/
protected String getBaseURL() {
return "http://" + getDeploymentHostname() + ":" + getDeploymentPort();
}
/**
* Generates the application id based on the URL in a way compatible with
* VaadinServletService.
*
* @param pathWithQueryParameters
* The path part of the URL, possibly still containing query
* parameters
* @return The application ID string used in Vaadin locators
*/
private String getApplicationId(String pathWithQueryParameters) {
// Remove any possible URL parameters
String pathWithoutQueryParameters = pathWithQueryParameters
.replaceAll("\\?.*", "");
if ("".equals(pathWithoutQueryParameters)) {
return "ROOT";
}
// Retain only a-z and numbers
return pathWithoutQueryParameters.replaceAll("[^a-zA-Z0-9]", "");
}
/**
* Sleeps for the given number of ms but ensures that the browser connection
* does not time out.
*
* @param timeoutMillis
* Number of ms to wait
*/
protected void sleep(int timeoutMillis) {
while (timeoutMillis > 0) {
int d = Math.min(BROWSER_TIMEOUT_IN_MS, timeoutMillis);
try {
Thread.sleep(d);
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
timeoutMillis -= d;
// Do something to keep the connection alive
getDriver().getTitle();
}
}
/**
* Called by the test runner whenever there is an exception in the test that
* will cause termination of the test
*
* @param t
* the throwable which caused the termination
*/
public void onUncaughtException(Throwable t) {
// Do nothing by default
}
/**
* Returns the mouse object for doing mouse commands
*
* @return Returns the mouse
*/
public Mouse getMouse() {
return ((HasInputDevices) getDriver()).getMouse();
}
/**
* Returns the keyboard object for controlling keyboard events
*
* @return Return the keyboard
*/
public Keyboard getKeyboard() {
return ((HasInputDevices) getDriver()).getKeyboard();
}
public void hitButton(String id) {
driver.findElement(By.id(id)).click();
}
protected void openDebugLogTab() {
waitUntil(new ExpectedCondition<Boolean>() {
@Override
public Boolean apply(WebDriver input) {
WebElement element = getDebugLogButton();
return element != null;
}
}, 15);
getDebugLogButton().click();
}
private WebElement getDebugLogButton() {
return findElement(By.xpath("//button[@title='Debug message log']"));
}
protected void assertNoDebugMessage(Level level) {
// class="v-debugwindow-row Level.getName()"
List<WebElement> logElements = driver.findElements(By.xpath(String
.format("//div[@class='v-debugwindow-row %s']/span[@class='v-debugwindow-message']",
level.getName())));
if (!logElements.isEmpty()) {
String logRows = "";
for (WebElement e : logElements) {
logRows += "\n" + e.getText();
}
Assert.fail("Found debug messages with level " + level.getName()
+ ": " + logRows);
}
}
/**
* Should the "require window focus" be enabled for Internet Explorer.
* RequireWindowFocus makes tests more stable but seems to be broken with
* certain commands such as sendKeys. Therefore it is not enabled by default
* for all tests
*
* @return true, to use the "require window focus" feature, false otherwise
*/
protected boolean requireWindowFocusForIE() {
return false;
}
/**
* Should the "enable persistent hover" be enabled for Internet Explorer.
*
* Persistent hovering causes continuous firing of mouse over events at the
* last location the mouse cursor has been moved to. This is to avoid
* problems where the real mouse cursor is inside the browser window and
* Internet Explorer uses that location for some undefined operation
* (http://
* jimevansmusic.blogspot.fi/2012/06/whats-wrong-with-internet-explorer
* .html)
*
* @return true, to use the "persistent hover" feature, false otherwise
*/
protected boolean usePersistentHoverForIE() {
return true;
}
/**
* Should the "native events" be enabled for Internet Explorer.
* <p>
* Native events sometimes cause failure in clicking on buttons/checkboxes
* but are possibly needed for some operations.
*
* @return true, to use "native events", false to use generated Javascript
* events
*/
protected boolean useNativeEventsForIE() {
return true;
}
// FIXME: Remove this once TB4 getRemoteControlName works properly
private RemoteWebDriver getRemoteDriver() {
WebDriver d = getDriver();
if (d instanceof TestBenchDriverProxy) {
try {
Field f = TestBenchDriverProxy.class
.getDeclaredField("actualDriver");
f.setAccessible(true);
return (RemoteWebDriver) f.get(d);
} catch (Exception e) {
e.printStackTrace();
}
}
if (d instanceof RemoteWebDriver) {
return (RemoteWebDriver) d;
}
return null;
}
// FIXME: Remove this once TB4 getRemoteControlName works properly
protected String getRemoteControlName() {
try {
RemoteWebDriver d = getRemoteDriver();
if (d == null) {
return null;
}
HttpCommandExecutor ce = (HttpCommandExecutor) d
.getCommandExecutor();
String hostName = ce.getAddressOfRemoteServer().getHost();
int port = ce.getAddressOfRemoteServer().getPort();
HttpHost host = new HttpHost(hostName, port);
try (DefaultHttpClient client = new DefaultHttpClient()) {
URL sessionURL = new URL("http://" + hostName + ":" + port
+ "/grid/api/testsession?session=" + d.getSessionId());
BasicHttpEntityEnclosingRequest r = new BasicHttpEntityEnclosingRequest(
"POST", sessionURL.toExternalForm());
HttpResponse response = client.execute(host, r);
JsonObject object = extractObject(response);
URL myURL = new URL(object.getString("proxyId"));
if ((myURL.getHost() != null) && (myURL.getPort() != -1)) {
return myURL.getHost();
}
}
} catch (Exception e) {
e.printStackTrace();
}
return null;
}
protected boolean logContainsText(String string) {
List<String> logs = getLogs();
for (String text : logs) {
if (text.contains(string)) {
return true;
}
}
return false;
}
protected List<String> getLogs() {
VerticalLayoutElement log = $(VerticalLayoutElement.class).id("Log");
List<LabelElement> logLabels = log.$(LabelElement.class).all();
List<String> logTexts = new ArrayList<>();
for (LabelElement label : logLabels) {
logTexts.add(label.getText());
}
return logTexts;
}
private static JsonObject extractObject(HttpResponse resp)
throws IOException {
InputStream contents = resp.getEntity().getContent();
StringWriter writer = new StringWriter();
IOUtils.copy(contents, writer, "UTF8");
return JsonUtil.parse(writer.toString());
}
protected void click(CheckBoxElement checkbox) {
WebElement cb = checkbox.findElement(By.xpath("input"));
if (BrowserUtil.isChrome(getDesiredCapabilities())) {
testBenchElement(cb).click(0, 0);
} else if (BrowserUtil.isFirefox(getDesiredCapabilities())) {
// Firefox workaround
getCommandExecutor().executeScript("arguments[0].click()", cb);
} else {
cb.click();
}
}
protected void clickElement(WebElement element) {
if (BrowserUtil.isFirefox(getDesiredCapabilities())) {
// Workaround for Selenium/TB and Firefox 45 issue
((TestBenchElement) (element)).clickHiddenElement();
} else {
element.click();
}
}
protected void contextClickElement(WebElement element) {
if (BrowserUtil.isFirefox(getDesiredCapabilities())) {
// Workaround for Selenium/TB and Firefox 45 issue
getCommandExecutor().executeScript(
"var ev = document.createEvent('HTMLEvents'); ev.initEvent('contextmenu', true, false); arguments[0].dispatchEvent(ev);",
element);
} else {
new Actions(getDriver()).contextClick(element).perform();
}
}
protected boolean isLoadingIndicatorVisible() {
WebElement loadingIndicator = findElement(
By.className("v-loading-indicator"));
return loadingIndicator.isDisplayed();
}
protected void waitUntilLoadingIndicatorVisible() {
waitUntil(input -> isLoadingIndicatorVisible());
}
protected void waitUntilLoadingIndicatorNotVisible() {
waitUntil(input -> !isLoadingIndicatorVisible());
}
/**
* Selects a menu item. By default, this will click on the menu item.
*
* @param menuCaption
* caption of the menu item
*/
protected void selectMenu(String menuCaption) {
selectMenu(menuCaption, true);
}
/**
* Selects a menu item.
*
* @param menuCaption
* caption of the menu item
* @param click
* <code>true</code> if should click the menu item;
* <code>false</code> if not
*/
protected void selectMenu(String menuCaption, boolean click) {
WebElement menuElement = getMenuElement(menuCaption);
Dimension size = menuElement.getSize();
new Actions(getDriver())
.moveToElement(menuElement, size.width - 10, size.height / 2)
.perform();
if (click) {
new Actions(getDriver()).click().perform();
}
}
/**
* Finds the menu item from the DOM based on menu item caption.
*
* @param menuCaption
* caption of the menu item
* @return the found menu item
* @throws NoSuchElementException
* if menu item is not found
*/
protected WebElement getMenuElement(String menuCaption)
throws NoSuchElementException {
// Need the parent span to obtain the correct size
return getDriver().findElement(
By.xpath("//span[text() = '" + menuCaption + "']/.."));
}
/**
* Selects a submenu described by a path of menus from the first MenuBar in
* the UI.
*
* @param menuCaptions
* array of menu captions
*/
protected void selectMenuPath(String... menuCaptions) {
selectMenu(menuCaptions[0], true);
// Move to the menu item opened below the menu bar.
new Actions(getDriver())
.moveByOffset(0,
getMenuElement(menuCaptions[0]).getSize().getHeight())
.perform();
for (int i = 1; i < menuCaptions.length - 1; i++) {
selectMenu(menuCaptions[i]);
new Actions(getDriver()).moveByOffset(40, 0).build().perform();
}
selectMenu(menuCaptions[menuCaptions.length - 1], true);
}
/**
* Asserts that an element is present
*
* @param by
* the locatore for the element
*/
protected void assertElementPresent(By by) {
Assert.assertTrue("Element is not present", isElementPresent(by));
}
/**
* Asserts that an element is not present
*
* @param by
* the locatore for the element
*/
protected void assertElementNotPresent(By by) {
Assert.assertFalse("Element is present", isElementPresent(by));
}
/**
* Asserts that no error notifications are shown. Requires the use of
* "?debug" as exceptions are otherwise not shown as notifications.
*/
protected void assertNoErrorNotifications() {
Assert.assertFalse(
"Error notification with client side exception is shown",
isNotificationPresent("error"));
}
/**
* Asserts that no system notifications are shown.
*/
protected void assertNoSystemNotifications() {
Assert.assertFalse(
"Error notification with system error exception is shown",
isNotificationPresent("system"));
}
/**
* Asserts that a system notification is shown.
*/
protected void assertSystemNotification() {
Assert.assertTrue(
"Error notification with system error exception is not shown",
isNotificationPresent("system"));
}
private boolean isNotificationPresent(String type) {
if ("error".equals(type)) {
Assert.assertTrue(
"Debug window must be open to be able to see error notifications",
isDebugWindowOpen());
}
return isElementPresent(By.className("v-Notification-" + type));
}
private boolean isDebugWindowOpen() {
return isElementPresent(By.className("v-debugwindow"));
}
protected void assertNoHorizontalScrollbar(WebElement element,
String errorMessage) {
assertHasHorizontalScrollbar(element, errorMessage, false);
}
protected void assertHorizontalScrollbar(WebElement element,
String errorMessage) {
assertHasHorizontalScrollbar(element, errorMessage, true);
}
private void assertHasHorizontalScrollbar(WebElement element,
String errorMessage, boolean expected) {
// IE rounds clientWidth/clientHeight down and scrollHeight/scrollWidth
// up, so using clientWidth/clientHeight will fail if the element height
// is not an integer
int clientWidth = getClientWidth(element);
int scrollWidth = getScrollWidth(element);
boolean hasScrollbar = scrollWidth > clientWidth;
String message = "The element should";
if (!expected) {
message += " not";
}
message += " have a horizontal scrollbar (scrollWidth: " + scrollWidth
+ ", clientWidth: " + clientWidth + "): " + errorMessage;
Assert.assertEquals(message, expected, hasScrollbar);
}
protected void assertNoVerticalScrollbar(WebElement element,
String errorMessage) {
// IE rounds clientWidth/clientHeight down and scrollHeight/scrollWidth
// up, so using clientWidth/clientHeight will fail if the element height
// is not an integer
int clientHeight = getClientHeight(element);
int scrollHeight = getScrollHeight(element);
boolean hasScrollbar = scrollHeight > clientHeight;
Assert.assertFalse(
"The element should not have a vertical scrollbar (scrollHeight: "
+ scrollHeight + ", clientHeight: " + clientHeight
+ "): " + errorMessage,
hasScrollbar);
}
protected int getScrollHeight(WebElement element) {
return ((Number) executeScript("return arguments[0].scrollHeight;",
element)).intValue();
}
protected int getScrollWidth(WebElement element) {
return ((Number) executeScript("return arguments[0].scrollWidth;",
element)).intValue();
}
/**
* Returns client height rounded up instead of as double because of IE9
* issues: https://dev.vaadin.com/ticket/18469
*/
protected int getClientHeight(WebElement e) {
String script = "var cs = window.getComputedStyle(arguments[0]);"
+ "return Math.ceil(parseFloat(cs.height)+parseFloat(cs.paddingTop)+parseFloat(cs.paddingBottom));";
return ((Number) executeScript(script, e)).intValue();
}
/**
* Returns client width rounded up instead of as double because of IE9
* issues: https://dev.vaadin.com/ticket/18469
*/
protected int getClientWidth(WebElement e) {
String script = "var cs = window.getComputedStyle(arguments[0]);"
+ "var h = parseFloat(cs.width)+parseFloat(cs.paddingLeft)+parseFloat(cs.paddingRight);"
+ "return Math.ceil(h);";
return ((Number) executeScript(script, e)).intValue();
}
protected void assertElementsEquals(WebElement expectedElement,
WebElement actualElement) {
while (expectedElement instanceof WrapsElement) {
expectedElement = ((WrapsElement) expectedElement)
.getWrappedElement();
}
while (actualElement instanceof WrapsElement) {
actualElement = ((WrapsElement) actualElement).getWrappedElement();
}
Assert.assertEquals(expectedElement, actualElement);
}
protected WebElement getActiveElement() {
return (WebElement) executeScript("return document.activeElement;");
}
protected void waitForThemeToChange(final String theme) {
final WebElement rootDiv = findElement(
By.xpath("//div[contains(@class,'v-app')]"));
waitUntil(new ExpectedCondition<Boolean>() {
@Override
public Boolean apply(WebDriver input) {
String rootClass = rootDiv.getAttribute("class").trim();
return rootClass.contains(theme);
}
}, 30);
}
}
| |
//
// Copyright 2018 SenX S.A.S.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
package io.warp10.script.functions;
import io.warp10.WarpDist;
import io.warp10.continuum.TimeSource;
import io.warp10.continuum.Tokens;
import io.warp10.continuum.egress.EgressFetchHandler;
import io.warp10.continuum.gts.GTSDecoder;
import io.warp10.continuum.gts.GTSHelper;
import io.warp10.continuum.gts.GeoTimeSerie;
import io.warp10.continuum.gts.GeoTimeSerie.TYPE;
import io.warp10.continuum.sensision.SensisionConstants;
import io.warp10.continuum.store.Constants;
import io.warp10.continuum.store.DirectoryClient;
import io.warp10.continuum.store.GTSDecoderIterator;
import io.warp10.continuum.store.MetadataIterator;
import io.warp10.continuum.store.StoreClient;
import io.warp10.continuum.store.thrift.data.DirectoryRequest;
import io.warp10.continuum.store.thrift.data.MetaSet;
import io.warp10.continuum.store.thrift.data.Metadata;
import io.warp10.crypto.CryptoUtils;
import io.warp10.crypto.KeyStore;
import io.warp10.crypto.OrderPreservingBase64;
import io.warp10.crypto.SipHashInline;
import io.warp10.quasar.token.thrift.data.ReadToken;
import io.warp10.script.NamedWarpScriptFunction;
import io.warp10.script.WarpScriptStackFunction;
import io.warp10.script.WarpScriptException;
import io.warp10.script.WarpScriptStack;
import io.warp10.sensision.Sensision;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.atomic.AtomicLong;
import java.util.zip.GZIPInputStream;
import org.apache.commons.io.output.ByteArrayOutputStream;
import org.apache.commons.lang3.JavaVersion;
import org.apache.commons.lang3.SystemUtils;
import org.apache.commons.lang3.tuple.Pair;
import org.apache.thrift.TDeserializer;
import org.apache.thrift.TException;
import org.apache.thrift.protocol.TCompactProtocol;
import org.joda.time.format.DateTimeFormatter;
import org.joda.time.format.ISODateTimeFormat;
import com.geoxp.GeoXPLib.GeoXPShape;
import com.google.common.base.Charsets;
/**
* Fetch GeoTimeSeries from continuum
* FIXME(hbs): we need to retrieve an OAuth token, where do we put it?
*
* The top of the stack must contain a list of the following parameters
*
* @param token The OAuth 2.0 token to use for data retrieval
* @param classSelector Class selector.
* @param labelsSelectors Map of label name to label selector.
* @param now Most recent timestamp to consider (in us since the Epoch)
* @param timespan Width of time period to consider (in us). Timestamps at or before now - timespan will be ignored.
*
* The last two parameters can be replaced by String parameters representing the end and start ISO8601 timestamps
*/
public class FETCH extends NamedWarpScriptFunction implements WarpScriptStackFunction {
public static final String PARAM_CLASS = "class";
/**
* Extra classes to retrieve after Directory have been called
*/
public static final String PARAM_EXTRA = "extra";
public static final String PARAM_LABELS = "labels";
public static final String PARAM_SELECTOR = "selector";
public static final String PARAM_SELECTORS = "selectors";
public static final String PARAM_SELECTOR_PAIRS = "selpairs";
public static final String PARAM_TOKEN = "token";
public static final String PARAM_END = "end";
public static final String PARAM_START = "start";
public static final String PARAM_COUNT = "count";
public static final String PARAM_TIMESPAN = "timespan";
public static final String PARAM_TYPE = "type";
public static final String PARAM_WRITE_TIMESTAMP = "wtimestamp";
public static final String PARAM_SHOWUUID = "showuuid";
public static final String PARAM_TYPEATTR = "typeattr";
public static final String PARAM_METASET = "metaset";
public static final String PARAM_GTS = "gts";
public static final String PARAM_ACTIVE_AFTER = "active.after";
public static final String PARAM_QUIET_AFTER = "quiet.after";
public static final String POSTFETCH_HOOK = "postfetch";
private DateTimeFormatter fmt = ISODateTimeFormat.dateTimeParser();
private WarpScriptStackFunction listTo = new LISTTO("");
private final boolean fromArchive;
private final TYPE forcedType;
private final long[] SIPHASH_CLASS;
private final long[] SIPHASH_LABELS;
private final byte[] AES_METASET;
public FETCH(String name, boolean fromArchive, TYPE type) {
super(name);
this.fromArchive = fromArchive;
this.forcedType = type;
KeyStore ks = null;
try {
ks = WarpDist.getKeyStore();
} catch (Throwable t) {
// Catch NoClassDefFound
}
if (null != ks) {
this.SIPHASH_CLASS = SipHashInline.getKey(ks.getKey(KeyStore.SIPHASH_CLASS));
this.SIPHASH_LABELS = SipHashInline.getKey(ks.getKey(KeyStore.SIPHASH_LABELS));
this.AES_METASET = ks.getKey(KeyStore.AES_METASETS);
} else {
this.SIPHASH_CLASS = null;
this.SIPHASH_LABELS = null;
this.AES_METASET = null;
}
}
@Override
public Object apply(WarpScriptStack stack) throws WarpScriptException {
//
// Extract parameters from the stack
//
Object top = stack.peek();
//
// Handle the new (as of 20150805) parameter passing mechanism as a map
//
Map<String,Object> params = null;
if (top instanceof Map) {
stack.pop();
params = paramsFromMap(stack, (Map<String,Object>) top);
}
if (top instanceof List) {
if (5 != ((List) top).size()) {
stack.drop();
throw new WarpScriptException(getName() + " expects 5 parameters.");
}
//
// Explode list and remove its size
//
listTo.apply(stack);
stack.drop();
}
if (null == params) {
params = new HashMap<String, Object>();
//
// Extract time span
//
Object oStop = stack.pop();
Object oStart = stack.pop();
long endts;
long timespan;
if (oStart instanceof String && oStop instanceof String) {
long start;
long stop;
if (SystemUtils.isJavaVersionAtLeast(JavaVersion.JAVA_1_8)) {
start = io.warp10.script.unary.TOTIMESTAMP.parseTimestamp(oStart.toString());
} else {
start = fmt.parseDateTime((String) oStart).getMillis() * Constants.TIME_UNITS_PER_MS;
}
if (SystemUtils.isJavaVersionAtLeast(JavaVersion.JAVA_1_8)) {
stop = io.warp10.script.unary.TOTIMESTAMP.parseTimestamp(oStop.toString());
} else {
stop = fmt.parseDateTime((String) oStop).getMillis() * Constants.TIME_UNITS_PER_MS;
}
if (start < stop) {
endts = stop;
timespan = stop - start;
} else {
endts = start;
timespan = start - stop;
}
} else if (oStart instanceof Long && oStop instanceof Long) {
endts = (long) oStart;
timespan = (long) oStop;
} else {
throw new WarpScriptException("Invalid timespan specification.");
}
params.put(PARAM_END, endts);
if (timespan < 0) {
// Make sure negation will be positive
if(Long.MIN_VALUE == timespan){
timespan++; // It's ok to modify a bit the count of points as it is impossible to return Long.MAX_VALUE points
}
params.put(PARAM_COUNT, -timespan);
} else {
params.put(PARAM_TIMESPAN, timespan);
}
//
// Extract labels selector
//
Object oLabelsSelector = stack.pop();
if (!(oLabelsSelector instanceof Map)) {
throw new WarpScriptException("Label selectors must be a map.");
}
Map<String,String> labelSelectors = new HashMap<String,String>((Map<String,String>) oLabelsSelector);
params.put(PARAM_LABELS, labelSelectors);
//
// Extract class selector
//
Object oClassSelector = stack.pop();
if (!(oClassSelector instanceof String)) {
throw new WarpScriptException("Class selector must be a string.");
}
String classSelector = (String) oClassSelector;
params.put(PARAM_CLASS, classSelector);
//
// Extract token
//
Object oToken = stack.pop();
if (!(oToken instanceof String)) {
throw new WarpScriptException("Token must be a string.");
}
String token = (String) oToken;
params.put(PARAM_TOKEN, token);
}
StoreClient gtsStore = stack.getStoreClient();
DirectoryClient directoryClient = stack.getDirectoryClient();
GeoTimeSerie base = null;
GeoTimeSerie[] bases = null;
String typelabel = (String) params.get(PARAM_TYPEATTR);
if (null != typelabel) {
bases = new GeoTimeSerie[4];
}
ReadToken rtoken = Tokens.extractReadToken(params.get(PARAM_TOKEN).toString());
List<String> clsSels = new ArrayList<String>();
List<Map<String,String>> lblsSels = new ArrayList<Map<String,String>>();
MetaSet metaset = null;
List<Metadata> metadatas = null;
Iterator<Metadata> iter = null;
if (params.containsKey(PARAM_METASET)) {
metaset = (MetaSet) params.get(PARAM_METASET);
iter = metaset.getMetadatas().iterator();
} else if (params.containsKey(PARAM_GTS)) {
List<Metadata> metas = (List<Metadata>) params.get(PARAM_GTS);
for (Metadata m: metas) {
if (null == m.getLabels()) {
m.setLabels(new HashMap<String,String>());
}
m.getLabels().remove(Constants.PRODUCER_LABEL);
m.getLabels().remove(Constants.OWNER_LABEL);
m.getLabels().remove(Constants.APPLICATION_LABEL);
m.getLabels().putAll(Tokens.labelSelectorsFromReadToken(rtoken));
if (m.getLabels().containsKey(Constants.PRODUCER_LABEL) && '=' == m.getLabels().get(Constants.PRODUCER_LABEL).charAt(0)) {
m.getLabels().put(Constants.PRODUCER_LABEL, m.getLabels().get(Constants.PRODUCER_LABEL).substring(1));
} else if (m.getLabels().containsKey(Constants.PRODUCER_LABEL)) {
throw new WarpScriptException(getName() + " provided token is incompatible with '" + PARAM_GTS + "' parameter, expecting a single producer.");
}
if (m.getLabels().containsKey(Constants.OWNER_LABEL) && '=' == m.getLabels().get(Constants.OWNER_LABEL).charAt(0)) {
m.getLabels().put(Constants.OWNER_LABEL, m.getLabels().get(Constants.OWNER_LABEL).substring(1));
} else {
throw new WarpScriptException(getName() + " provided token is incompatible with '" + PARAM_GTS + "' parameter, expecting a single owner.");
}
if (m.getLabels().containsKey(Constants.APPLICATION_LABEL) && '=' == m.getLabels().get(Constants.APPLICATION_LABEL).charAt(0)) {
m.getLabels().put(Constants.APPLICATION_LABEL, m.getLabels().get(Constants.APPLICATION_LABEL).substring(1));
} else {
throw new WarpScriptException(getName() + " provided token is incompatible with '" + PARAM_GTS + "' parameter, expecting a single application.");
}
}
iter = ((List<Metadata>) params.get(PARAM_GTS)).iterator();
} else {
if (params.containsKey(PARAM_SELECTOR_PAIRS)) {
for (Pair<Object,Object> pair: (List<Pair<Object,Object>>) params.get(PARAM_SELECTOR_PAIRS)) {
clsSels.add(pair.getLeft().toString());
Map<String,String> labelSelectors = (Map<String,String>) pair.getRight();
labelSelectors.putAll(Tokens.labelSelectorsFromReadToken(rtoken));
lblsSels.add((Map<String,String>) labelSelectors);
}
} else {
Map<String,String> labelSelectors = (Map<String,String>) params.get(PARAM_LABELS);
labelSelectors.putAll(Tokens.labelSelectorsFromReadToken(rtoken));
clsSels.add(params.get(PARAM_CLASS).toString());
lblsSels.add(labelSelectors);
}
DirectoryRequest drequest = new DirectoryRequest();
drequest.setClassSelectors(clsSels);
drequest.setLabelsSelectors(lblsSels);
if (params.containsKey(PARAM_ACTIVE_AFTER)) {
drequest.setActiveAfter((long) params.get(PARAM_ACTIVE_AFTER));
}
if (params.containsKey(PARAM_QUIET_AFTER)) {
drequest.setQuietAfter((long) params.get(PARAM_QUIET_AFTER));
}
try {
metadatas = directoryClient.find(drequest);
iter = metadatas.iterator();
} catch (IOException ioe) {
try {
iter = directoryClient.iterator(drequest);
} catch (Exception e) {
throw new WarpScriptException(e);
}
}
}
metadatas = new ArrayList<Metadata>();
List<GeoTimeSerie> series = new ArrayList<GeoTimeSerie>();
AtomicLong fetched = (AtomicLong) stack.getAttribute(WarpScriptStack.ATTRIBUTE_FETCH_COUNT);
long fetchLimit = (long) stack.getAttribute(WarpScriptStack.ATTRIBUTE_FETCH_LIMIT);
long gtsLimit = (long) stack.getAttribute(WarpScriptStack.ATTRIBUTE_GTS_LIMIT);
AtomicLong gtscount = (AtomicLong) stack.getAttribute(WarpScriptStack.ATTRIBUTE_GTS_COUNT);
// Variables to keep track of the last Metadata and fetched count
Metadata lastMetadata = null;
long lastCount = 0L;
try {
while(iter.hasNext()) {
metadatas.add(iter.next());
if (gtscount.incrementAndGet() > gtsLimit) {
throw new WarpScriptException(getName() + " exceeded limit of " + gtsLimit + " Geo Time Series, current count is " + gtscount);
}
if (metadatas.size() < EgressFetchHandler.FETCH_BATCHSIZE && iter.hasNext()) {
continue;
}
//
// Generate extra Metadata if PARAM_EXTRA is set
//
if (params.containsKey(PARAM_EXTRA)) {
Set<Metadata> withextra = new HashSet<Metadata>();
withextra.addAll(metadatas);
for (Metadata meta: metadatas) {
for (String cls: (Set<String>) params.get(PARAM_EXTRA)) {
// The following is safe, the constructor allocates new maps
Metadata metadata = new Metadata(meta);
metadata.setName(cls);
metadata.setClassId(GTSHelper.classId(this.SIPHASH_CLASS, cls));
metadata.setLabelsId(GTSHelper.labelsId(this.SIPHASH_LABELS, metadata.getLabels()));
withextra.add(metadata);
}
}
metadatas.clear();
metadatas.addAll(withextra);
}
//
// We assume that GTS will be fetched in a continuous way, i.e. without having a GTSDecoder from one
// then one from another, then one from the first one.
//
long timespan = params.containsKey(PARAM_TIMESPAN) ? (long) params.get(PARAM_TIMESPAN) : - ((long) params.get(PARAM_COUNT));
TYPE type = (TYPE) params.get(PARAM_TYPE);
if (null != this.forcedType) {
if (null != type) {
throw new WarpScriptException(getName() + " type of fetched GTS cannot be changed.");
}
type = this.forcedType;
}
boolean writeTimestamp = Boolean.TRUE.equals(params.get(PARAM_WRITE_TIMESTAMP));
boolean showUUID = Boolean.TRUE.equals(params.get(PARAM_SHOWUUID));
TYPE lastType = TYPE.UNDEFINED;
try (GTSDecoderIterator gtsiter = gtsStore.fetch(rtoken, metadatas, (long) params.get(PARAM_END), timespan, fromArchive, writeTimestamp)) {
while(gtsiter.hasNext()) {
GTSDecoder decoder = gtsiter.next();
boolean identical = true;
if (null == lastMetadata || !lastMetadata.equals(decoder.getMetadata())) {
lastMetadata = decoder.getMetadata();
identical = false;
lastCount = 0;
lastType = TYPE.UNDEFINED;
}
GeoTimeSerie gts;
//
// If we should ventilate per type, do so now
//
if (null != typelabel) {
Map<String,String> labels = new HashMap<String,String>(decoder.getMetadata().getLabels());
labels.remove(Constants.PRODUCER_LABEL);
labels.remove(Constants.OWNER_LABEL);
java.util.UUID uuid = null;
if (showUUID) {
uuid = new java.util.UUID(decoder.getClassId(), decoder.getLabelsId());
}
long count = 0;
Metadata decoderMeta = decoder.getMetadata();
while(decoder.next()) {
// If we've read enough data, exit
if (identical && timespan < 0 && lastCount + count >= -timespan) {
break;
}
count++;
long ts = decoder.getTimestamp();
long location = decoder.getLocation();
long elevation = decoder.getElevation();
Object value = decoder.getValue();
int gtsidx = 0;
String typename = "DOUBLE";
if (value instanceof Long) {
gtsidx = 1;
typename = "LONG";
} else if (value instanceof Boolean) {
gtsidx = 2;
typename = "BOOLEAN";
} else if (value instanceof String) {
gtsidx = 3;
typename = "STRING";
}
base = bases[gtsidx];
if (null == base || !base.getMetadata().getName().equals(decoderMeta.getName()) || !base.getMetadata().getLabels().equals(decoderMeta.getLabels())) {
bases[gtsidx] = new GeoTimeSerie();
base = bases[gtsidx];
series.add(base);
base.setLabels(decoder.getLabels());
base.getMetadata().putToAttributes(typelabel, typename);
base.setName(decoder.getName());
if (null != uuid) {
base.getMetadata().putToAttributes(Constants.UUID_ATTRIBUTE, uuid.toString());
}
}
GTSHelper.setValue(base, ts, location, elevation, value, false);
}
if (fetched.addAndGet(count) > fetchLimit) {
Map<String,String> sensisionLabels = new HashMap<String, String>();
sensisionLabels.put(SensisionConstants.SENSISION_LABEL_CONSUMERID, Tokens.getUUID(rtoken.getBilledId()));
Sensision.update(SensisionConstants.SENSISION_CLASS_WARPSCRIPT_FETCHCOUNT_EXCEEDED, sensisionLabels, 1);
throw new WarpScriptException(getName() + " exceeded limit of " + fetchLimit + " datapoints, current count is " + fetched.get());
}
lastCount += count;
continue;
}
if (null != type) {
gts = decoder.decode(type);
} else {
//
// We need to decode using the same type as the previous decoder for the same GTS
// Otherwise, if it happens that the current decoder starts with a value of another
// type then the merge will not take into account this decoder as the decoded GTS
// will be of a different type.
if (identical && lastType != TYPE.UNDEFINED) {
gts = decoder.decode(lastType);
} else {
gts = decoder.decode();
}
lastType = gts.getType();
}
if (identical && timespan < 0 && lastCount + GTSHelper.nvalues(gts) > -timespan) {
// We would add too many datapoints, we will shrink the GTS.
// As it it sorted in reverse order of the ticks (since the datapoints are organized
// this way in HBase), we just need to shrink the GTS.
gts = GTSHelper.shrinkTo(gts, (int) Math.max(-timespan - lastCount, 0));
}
lastCount += GTSHelper.nvalues(gts);
//
// Remove producer/owner labels
//
//
// Add a .uuid attribute if instructed to do so
//
if (showUUID) {
java.util.UUID uuid = new java.util.UUID(gts.getClassId(), gts.getLabelsId());
gts.getMetadata().putToAttributes(Constants.UUID_ATTRIBUTE, uuid.toString());
}
Map<String,String> labels = new HashMap<String, String>();
labels.putAll(gts.getMetadata().getLabels());
labels.remove(Constants.PRODUCER_LABEL);
labels.remove(Constants.OWNER_LABEL);
gts.setLabels(labels);
//
// If it's the first GTS, take it as is.
//
if (null == base) {
base = gts;
} else {
//
// If name and labels are identical to the previous GTS, merge them
// Otherwise add 'base' to the stack and set it to 'gts'.
//
if (!base.getMetadata().getName().equals(gts.getMetadata().getName()) || !base.getMetadata().getLabels().equals(gts.getMetadata().getLabels())) {
series.add(base);
base = gts;
} else {
base = GTSHelper.merge(base, gts);
}
}
if (fetched.addAndGet(gts.size()) > fetchLimit) {
Map<String,String> sensisionLabels = new HashMap<String, String>();
sensisionLabels.put(SensisionConstants.SENSISION_LABEL_CONSUMERID, Tokens.getUUID(rtoken.getBilledId()));
Sensision.update(SensisionConstants.SENSISION_CLASS_WARPSCRIPT_FETCHCOUNT_EXCEEDED, sensisionLabels, 1);
throw new WarpScriptException(getName() + " exceeded limit of " + fetchLimit + " datapoints, current count is " + fetched.get());
//break;
}
}
} catch (WarpScriptException ee) {
throw ee;
} catch (Throwable t) {
throw new WarpScriptException(t);
}
//
// If there is one current GTS, push it onto the stack (only if not ventilating per type)
//
if (null != base && null == typelabel) {
series.add(base);
}
//
// Reset state
//
base = null;
metadatas.clear();
}
} catch (Throwable t) {
throw t;
} finally {
if (iter instanceof MetadataIterator) {
try {
((MetadataIterator) iter).close();
} catch (Exception e) {
}
}
}
stack.push(series);
//
// Apply a possible postfetch hook
//
if (rtoken.getHooksSize() > 0 && rtoken.getHooks().containsKey(POSTFETCH_HOOK)) {
stack.execMulti(rtoken.getHooks().get(POSTFETCH_HOOK));
}
return stack;
}
private Map<String,Object> paramsFromMap(WarpScriptStack stack, Map<String,Object> map) throws WarpScriptException {
Map<String,Object> params = new HashMap<String, Object>();
//
// Handle the case where a MetaSet was passed as this will
// modify some other parameters
//
MetaSet metaset = null;
if (map.containsKey(PARAM_METASET)) {
if (null == AES_METASET) {
throw new WarpScriptException(getName() + " MetaSet support not available.");
}
Object ms = map.get(PARAM_METASET);
if (!(ms instanceof byte[])) {
// Decode
byte[] decoded = OrderPreservingBase64.decode(ms.toString().getBytes(Charsets.US_ASCII));
// Decrypt
byte[] decrypted = CryptoUtils.unwrap(AES_METASET, decoded);
// Decompress
try {
ByteArrayOutputStream out = new ByteArrayOutputStream(decrypted.length);
InputStream in = new GZIPInputStream(new ByteArrayInputStream(decrypted));
byte[] buf = new byte[1024];
while(true) {
int len = in.read(buf);
if (len < 0) {
break;
}
out.write(buf, 0, len);
}
in.close();
out.close();
ms = out.toByteArray();
} catch (IOException e) {
throw new WarpScriptException(getName() + " encountered an invalid MetaSet.", e);
}
}
metaset = new MetaSet();
TDeserializer deser = new TDeserializer(new TCompactProtocol.Factory());
try {
deser.deserialize(metaset, (byte[]) ms);
} catch (TException te) {
throw new WarpScriptException(getName() + " was unable to decode the provided MetaSet.", te);
}
//
// Check if MetaSet has expired
//
if (metaset.getExpiry() < System.currentTimeMillis()) {
throw new WarpScriptException(getName() + " MetaSet has expired.");
}
// Attempt to extract token, this will raise an exception if token has expired or was revoked
ReadToken rtoken = Tokens.extractReadToken(metaset.getToken());
params.put(PARAM_METASET, metaset);
params.put(PARAM_TOKEN, metaset.getToken());
}
if (!params.containsKey(PARAM_TOKEN)) {
if (!map.containsKey(PARAM_TOKEN)) {
throw new WarpScriptException(getName() + " Missing '" + PARAM_TOKEN + "' parameter");
}
params.put(PARAM_TOKEN, map.get(PARAM_TOKEN));
}
if (map.containsKey(PARAM_GTS)) {
Object o = map.get(PARAM_GTS);
if (!(o instanceof List)) {
throw new WarpScriptException(getName() + " invalid '" + PARAM_GTS + "' parameter, expected a list of Geo Time Series.");
}
List<Metadata> metadatas = new ArrayList<Metadata>();
for (Object elt: (List<Object>) o) {
if (!(elt instanceof GeoTimeSerie)) {
throw new WarpScriptException(getName() + " invalid '" + PARAM_GTS + "' parameter, expected a list of Geo Time Series.");
}
metadatas.add((new Metadata(((GeoTimeSerie) elt).getMetadata())));
}
params.put(PARAM_GTS, metadatas);
}
if (map.containsKey(PARAM_SELECTORS)) {
Object sels = map.get(PARAM_SELECTORS);
if (!(sels instanceof List)) {
throw new WarpScriptException(getName() + " Invalid parameter '" + PARAM_SELECTORS + "'");
}
List<Pair<Object, Object>> selectors = new ArrayList<Pair<Object,Object>>();
for (Object sel: (List) sels) {
Object[] clslbls = PARSESELECTOR.parse(sel.toString());
selectors.add(Pair.of(clslbls[0], clslbls[1]));
}
params.put(PARAM_SELECTOR_PAIRS, selectors);
} else if (map.containsKey(PARAM_SELECTOR)) {
Object[] clslbls = PARSESELECTOR.parse(map.get(PARAM_SELECTOR).toString());
params.put(PARAM_CLASS, clslbls[0]);
params.put(PARAM_LABELS, clslbls[1]);
} else if (map.containsKey(PARAM_CLASS) && map.containsKey(PARAM_LABELS)) {
params.put(PARAM_CLASS, map.get(PARAM_CLASS));
params.put(PARAM_LABELS, map.get(PARAM_LABELS));
} else if (!params.containsKey(PARAM_METASET) && !params.containsKey(PARAM_GTS)) {
throw new WarpScriptException(getName() + " Missing '" + PARAM_METASET + "', '" + PARAM_GTS + "', '" + PARAM_SELECTOR + "', '" + PARAM_SELECTORS + "' or '" + PARAM_CLASS + "' and '" + PARAM_LABELS + "' parameters.");
}
if (!map.containsKey(PARAM_END)) {
throw new WarpScriptException(getName() + " Missing '" + PARAM_END + "' parameter.");
}
if (map.get(PARAM_END) instanceof Long) {
params.put(PARAM_END, map.get(PARAM_END));
} else if (map.get(PARAM_END) instanceof String) {
if (SystemUtils.isJavaVersionAtLeast(JavaVersion.JAVA_1_8)) {
params.put(PARAM_END, io.warp10.script.unary.TOTIMESTAMP.parseTimestamp(map.get(PARAM_END).toString()));
} else {
params.put(PARAM_END, fmt.parseDateTime(map.get(PARAM_END).toString()).getMillis() * Constants.TIME_UNITS_PER_MS);
}
} else {
throw new WarpScriptException(getName() + " Invalid format for parameter '" + PARAM_END + "'.");
}
if (map.containsKey(PARAM_TIMESPAN)) {
params.put(PARAM_TIMESPAN, (long) map.get(PARAM_TIMESPAN));
} else if (map.containsKey(PARAM_COUNT)) {
params.put(PARAM_COUNT, (long) map.get(PARAM_COUNT));
} else if (map.containsKey(PARAM_START)) {
long end = (long) params.get(PARAM_END);
long start;
if (map.get(PARAM_START) instanceof Long) {
start = (long) map.get(PARAM_START);
} else {
if (SystemUtils.isJavaVersionAtLeast(JavaVersion.JAVA_1_8)) {
start = io.warp10.script.unary.TOTIMESTAMP.parseTimestamp(map.get(PARAM_START).toString());
} else {
start = fmt.parseDateTime(map.get(PARAM_START).toString()).getMillis() * Constants.TIME_UNITS_PER_MS;
}
}
long timespan;
if (start < end) {
timespan = end - start;
} else {
timespan = start - end;
end = start;
}
params.put(PARAM_END, end);
params.put(PARAM_TIMESPAN, timespan);
} else {
throw new WarpScriptException(getName() + " Missing parameter '" + PARAM_TIMESPAN + "' or '" + PARAM_COUNT + "' or '" + PARAM_START + "'");
}
//
// Check end/timespan against MetaSet, adjust limits accordingly
//
if (null != metaset) {
long end = (long) params.get(PARAM_END);
long timespan = params.containsKey(PARAM_TIMESPAN) ? (long) params.get(PARAM_TIMESPAN) : -1;
long count = params.containsKey(PARAM_COUNT) ? (long) params.get(PARAM_COUNT) : -1;
if (metaset.isSetMaxduration()) {
// Force 'end' to 'now'
params.put(PARAM_END, TimeSource.getTime());
if (-1 != count && metaset.getMaxduration() >= 0) {
throw new WarpScriptException(getName() + " MetaSet forbids count based requests.");
}
if (-1 != timespan && metaset.getMaxduration() <= 0) {
throw new WarpScriptException(getName() + " MetaSet forbids duration based requests.");
}
if (-1 != count && count > -metaset.getMaxduration()) {
count = -metaset.getMaxduration();
params.put(PARAM_COUNT, count);
}
if (-1 != timespan && timespan > metaset.getMaxduration()) {
timespan = metaset.getMaxduration();
params.put(PARAM_TIMESPAN, timespan);
}
}
if (metaset.isSetNotbefore()) {
// forbid count based requests
if (-1 != count) {
throw new WarpScriptException(getName() + " MetaSet forbids count based requests.");
}
if (end < metaset.getNotbefore()) {
throw new WarpScriptException(getName() + " MetaSet forbids time ranges before " + metaset.getNotbefore());
}
// Adjust timespan so maxDuration is respected
if (timespan > metaset.getMaxduration()) {
timespan = metaset.getMaxduration();
params.put(PARAM_TIMESPAN, timespan);
}
}
if (metaset.isSetNotafter() && end >= metaset.getNotafter()) {
end = metaset.getNotafter();
params.put(PARAM_END, end);
}
}
if (map.containsKey(PARAM_TYPE)) {
String type = map.get(PARAM_TYPE).toString();
if (TYPE.LONG.name().equalsIgnoreCase(type)) {
params.put(PARAM_TYPE, TYPE.LONG);
} else if (TYPE.DOUBLE.name().equalsIgnoreCase(type)) {
params.put(PARAM_TYPE, TYPE.DOUBLE);
} else if (TYPE.STRING.name().equalsIgnoreCase(type)) {
params.put(PARAM_TYPE, TYPE.STRING);
} else if (TYPE.BOOLEAN.name().equalsIgnoreCase(type)) {
params.put(PARAM_TYPE, TYPE.BOOLEAN);
} else {
throw new WarpScriptException(getName() + " Invalid value for parameter '" + PARAM_TYPE + "'.");
}
}
if (map.containsKey(PARAM_TYPEATTR)) {
if (map.containsKey(PARAM_TYPE)) {
throw new WarpScriptException(getName() + " Incompatible parameters '" + PARAM_TYPE + "' and '" + PARAM_TYPEATTR + "'.");
}
params.put(PARAM_TYPEATTR, map.get(PARAM_TYPEATTR).toString());
}
if (map.containsKey(PARAM_EXTRA)) {
// Check that we are not using a MetaSet
if (params.containsKey(PARAM_METASET)) {
throw new WarpScriptException(getName() + " Cannot specify '" + PARAM_EXTRA + "' when '" + PARAM_METASET + "' is used.");
}
// Check that we are not using a MetaSet
if (params.containsKey(PARAM_GTS)) {
throw new WarpScriptException(getName() + " Cannot specify '" + PARAM_EXTRA + "' when '" + PARAM_GTS + "' is used.");
}
if (!(map.get(PARAM_EXTRA) instanceof List)) {
throw new WarpScriptException(getName() + " Invalid type for parameter '" + PARAM_EXTRA + "'.");
}
Set<String> extra = new HashSet<String>();
for (Object o: (List) map.get(PARAM_EXTRA)) {
if (!(o instanceof String)) {
throw new WarpScriptException(getName() + " Invalid type for parameter '" + PARAM_EXTRA + "'.");
}
extra.add(o.toString());
}
params.put(PARAM_EXTRA, extra);
}
if (map.containsKey(PARAM_WRITE_TIMESTAMP)) {
params.put(PARAM_WRITE_TIMESTAMP, Boolean.TRUE.equals(map.get(PARAM_WRITE_TIMESTAMP)));
}
if (map.containsKey(PARAM_ACTIVE_AFTER)) {
if (!(map.get(PARAM_ACTIVE_AFTER) instanceof Long)) {
throw new WarpScriptException(getName() + " Invalid type for parameter '" + PARAM_ACTIVE_AFTER + "'.");
}
params.put(PARAM_ACTIVE_AFTER, ((long) map.get(PARAM_ACTIVE_AFTER)) / Constants.TIME_UNITS_PER_MS);
}
if (map.containsKey(PARAM_QUIET_AFTER)) {
if (!(map.get(PARAM_QUIET_AFTER) instanceof Long)) {
throw new WarpScriptException(getName() + " Invalid type for parameter '" + PARAM_QUIET_AFTER + "'.");
}
params.put(PARAM_QUIET_AFTER, ((long) map.get(PARAM_QUIET_AFTER)) / Constants.TIME_UNITS_PER_MS);
}
return params;
}
}
| |
package developer.htaihm.minfirebaseapp;
import com.google.firebase.auth.FirebaseUser;
import com.google.firebase.database.DataSnapshot;
import com.google.firebase.database.DatabaseError;
import com.google.firebase.database.DatabaseReference;
import com.google.firebase.database.FirebaseDatabase;
import com.google.firebase.database.ValueEventListener;
import android.os.Bundle;
import android.support.annotation.Nullable;
import android.support.design.widget.Snackbar;
import android.support.v7.app.AppCompatActivity;
import android.view.View;
import android.widget.TextView;
import android.widget.Toast;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.Map;
import developer.htaihm.minfirebaseapp.models.Dao;
import developer.htaihm.minfirebaseapp.models.Employee;
import developer.htaihm.minfirebaseapp.models.Trip;
import developer.htaihm.minfirebaseapp.models.TripDao;
public class MainActivity extends AppCompatActivity {
private static final String TAG = "MainActivity";
private TextView displayTextView;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
displayTextView = (TextView) findViewById(R.id.display_text_view);
}
public void sayHello(View view) {
Toast.makeText(this, "Hello, it's now " + new Date(), Toast.LENGTH_LONG).show();
}
public void login(View view) {
FirebaseUtil.login(this, new FirebaseUtil.OnAuthCompleteListener(){
@Override
public void onCompleteSuccessfully(FirebaseUser user) {
UiUtil.logAndToast(
MainActivity.this,
TAG,
"Login successfully: " + FirebaseUtil.userString(user));
}
});
}
public void create(View view) {
List<String> watchers1 = new ArrayList<>();
watchers1.add("111");
watchers1.add("222");
Trip trip1 = new Trip();
trip1.setCreator("1234567");
trip1.setWatchers(watchers1);
Trip trip2 = new Trip();
trip2.setCreator("111");
List<String> watchers2 = new ArrayList<>();
watchers2.add("1234567");
watchers2.add("222");
trip2.setWatchers(watchers2);
TripDao tripDao = TripDao.getTripDao();
tripDao.insert(trip1, new Dao.CompletionListener() {
@Override
public void OnCompletion(@Nullable DatabaseError databaseError) {
UiUtil.logAndToast(
MainActivity.this,
TAG,
"Trip 1 saved!"
);
}
});
tripDao.insert(trip2, new Dao.CompletionListener() {
@Override
public void OnCompletion(@Nullable DatabaseError databaseError) {
UiUtil.logAndToast(
MainActivity.this,
TAG,
"Trip 2 saved!"
);
}
});
}
public void findTripsByCreator(View view) {
TripDao tripDao = TripDao.getTripDao();
tripDao.findTripsByCreator("1234567", new Dao.QueryDataFetchedListener<Trip>() {
@Override
public void onDataFetched(List<Trip> trips) {
UiUtil.logAndToast(
MainActivity.this,
TAG,
"Successfully fetched " + trips.size() + " trips"
);
StringBuilder sb = new StringBuilder("findTripsByCreator result");
for (Trip trip : trips) {
sb.append(trip.toString());
sb.append("\n\n");
}
displayTextView.setText(sb);
}
@Override
public void onError(DatabaseError error) {
ErrorHandlingUtil.logErrorAndToast(
MainActivity.this,
TAG,
"findTripsByCreator error: " + error,
error.toException()
);
}
});
}
public void findTripsParticipatedBy(View view) {
TripDao tripDao = TripDao.getTripDao();
tripDao.findTripsParticipatedBy("1234567", new Dao.QueryDataFetchedListener<Trip>() {
@Override
public void onDataFetched(List<Trip> trips) {
UiUtil.logAndToast(
MainActivity.this,
TAG,
"Successfully fetched " + trips.size() + " trips"
);
StringBuilder sb = new StringBuilder("findTripsParticipatedBy result");
for (Trip trip : trips) {
sb.append(trip.toString());
sb.append("\n\n");
}
displayTextView.setText(sb);
}
@Override
public void onError(DatabaseError error) {
ErrorHandlingUtil.logErrorAndToast(
MainActivity.this,
TAG,
"findTripsParticipatedBy error: " + error,
error.toException()
);
}
});
}
public void createEmployee(final View view) {
final Employee employee = new Employee();
employee.setName("peter");
employee.setId("123");
Map<String, Object> modelMap = employee.toMap();
DatabaseReference dbRef = FirebaseDatabase.getInstance().getReference();
dbRef
.child("employees")
.child(employee.getId())
.updateChildren(modelMap, new DatabaseReference.CompletionListener() {
@Override
public void onComplete(DatabaseError databaseError, DatabaseReference databaseReference) {
if (databaseError != null) {
Snackbar.make(
view.getRootView(),
"Error creating employee: " + databaseError,
Snackbar.LENGTH_INDEFINITE).show();
return;
}
Snackbar.make(
view.getRootView(),
"Employee created: " + employee,
Snackbar.LENGTH_LONG).show();
}
});
}
public void showEmployee(final View view) {
DatabaseReference dbRef = FirebaseDatabase.getInstance().getReference();
dbRef.child("employees").child("123").addListenerForSingleValueEvent(new ValueEventListener() {
@Override
public void onDataChange(DataSnapshot dataSnapshot) {
if (dataSnapshot == null) {
Snackbar.make(
view.getRootView(),
"No employees found",
Snackbar.LENGTH_LONG).show();
}
Employee e = dataSnapshot.getValue(Employee.class);
Snackbar.make(
view.getRootView(),
"Employee found: " + e,
Snackbar.LENGTH_INDEFINITE).show();
}
@Override
public void onCancelled(DatabaseError databaseError) {
Snackbar.make(
view.getRootView(),
"Error getting employee: " + databaseError,
Snackbar.LENGTH_INDEFINITE).show();
}
});
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.mapper;
import org.apache.lucene.index.DocValues;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.IndexableField;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.NumericDocValues;
import org.apache.lucene.index.SortedNumericDocValues;
import org.apache.lucene.search.BoostQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.SortField;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.common.Explicit;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentParser.Token;
import org.elasticsearch.index.Index;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.fielddata.AtomicNumericFieldData;
import org.elasticsearch.index.fielddata.FieldData;
import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource.Nested;
import org.elasticsearch.index.fielddata.IndexFieldDataCache;
import org.elasticsearch.index.fielddata.IndexNumericFieldData;
import org.elasticsearch.index.fielddata.NumericDoubleValues;
import org.elasticsearch.index.fielddata.ScriptDocValues;
import org.elasticsearch.index.fielddata.SortedBinaryDocValues;
import org.elasticsearch.index.fielddata.SortedNumericDoubleValues;
import org.elasticsearch.index.fielddata.fieldcomparator.DoubleValuesComparatorSource;
import org.elasticsearch.index.fielddata.plain.DocValuesIndexFieldData;
import org.elasticsearch.index.mapper.NumberFieldMapper.Defaults;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.indices.breaker.CircuitBreakerService;
import org.elasticsearch.search.DocValueFormat;
import org.elasticsearch.search.MultiValueMode;
import org.joda.time.DateTimeZone;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
/** A {@link FieldMapper} for scaled floats. Values are internally multiplied
* by a scaling factor and rounded to the closest long. */
public class ScaledFloatFieldMapper extends FieldMapper {
public static final String CONTENT_TYPE = "scaled_float";
// use the same default as numbers
private static final Setting<Boolean> COERCE_SETTING = NumberFieldMapper.COERCE_SETTING;
public static class Builder extends FieldMapper.Builder<Builder, ScaledFloatFieldMapper> {
private boolean scalingFactorSet = false;
private Boolean ignoreMalformed;
private Boolean coerce;
public Builder(String name) {
super(name, new ScaledFloatFieldType(), new ScaledFloatFieldType());
builder = this;
}
public Builder ignoreMalformed(boolean ignoreMalformed) {
this.ignoreMalformed = ignoreMalformed;
return builder;
}
protected Explicit<Boolean> ignoreMalformed(BuilderContext context) {
if (ignoreMalformed != null) {
return new Explicit<>(ignoreMalformed, true);
}
if (context.indexSettings() != null) {
return new Explicit<>(IGNORE_MALFORMED_SETTING.get(context.indexSettings()), false);
}
return Defaults.IGNORE_MALFORMED;
}
public Builder coerce(boolean coerce) {
this.coerce = coerce;
return builder;
}
public Builder scalingFactor(double scalingFactor) {
((ScaledFloatFieldType) fieldType).setScalingFactor(scalingFactor);
scalingFactorSet = true;
return this;
}
protected Explicit<Boolean> coerce(BuilderContext context) {
if (coerce != null) {
return new Explicit<>(coerce, true);
}
if (context.indexSettings() != null) {
return new Explicit<>(COERCE_SETTING.get(context.indexSettings()), false);
}
return Defaults.COERCE;
}
@Override
public ScaledFloatFieldMapper build(BuilderContext context) {
if (scalingFactorSet == false) {
throw new IllegalArgumentException("Field [" + name + "] misses required parameter [scaling_factor]");
}
setupFieldType(context);
return new ScaledFloatFieldMapper(name, fieldType, defaultFieldType, ignoreMalformed(context),
coerce(context), context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo);
}
}
public static class TypeParser implements Mapper.TypeParser {
@Override
public Mapper.Builder<?,?> parse(String name, Map<String, Object> node,
ParserContext parserContext) throws MapperParsingException {
Builder builder = new Builder(name);
TypeParsers.parseField(builder, name, node, parserContext);
for (Iterator<Map.Entry<String, Object>> iterator = node.entrySet().iterator(); iterator.hasNext();) {
Map.Entry<String, Object> entry = iterator.next();
String propName = entry.getKey();
Object propNode = entry.getValue();
if (propName.equals("null_value")) {
if (propNode == null) {
throw new MapperParsingException("Property [null_value] cannot be null.");
}
builder.nullValue(ScaledFloatFieldMapper.parse(propNode));
iterator.remove();
} else if (propName.equals("ignore_malformed")) {
builder.ignoreMalformed(TypeParsers.nodeBooleanValue(name, "ignore_malformed", propNode, parserContext));
iterator.remove();
} else if (propName.equals("coerce")) {
builder.coerce(TypeParsers.nodeBooleanValue(name, "coerce", propNode, parserContext));
iterator.remove();
} else if (propName.equals("scaling_factor")) {
builder.scalingFactor(ScaledFloatFieldMapper.parse(propNode));
iterator.remove();
}
}
return builder;
}
}
public static final class ScaledFloatFieldType extends MappedFieldType {
private double scalingFactor;
public ScaledFloatFieldType() {
super();
setTokenized(false);
setHasDocValues(true);
setOmitNorms(true);
}
ScaledFloatFieldType(ScaledFloatFieldType other) {
super(other);
this.scalingFactor = other.scalingFactor;
}
public double getScalingFactor() {
return scalingFactor;
}
public void setScalingFactor(double scalingFactor) {
checkIfFrozen();
this.scalingFactor = scalingFactor;
}
@Override
public MappedFieldType clone() {
return new ScaledFloatFieldType(this);
}
@Override
public String typeName() {
return CONTENT_TYPE;
}
@Override
public void checkCompatibility(MappedFieldType other, List<String> conflicts, boolean strict) {
super.checkCompatibility(other, conflicts, strict);
if (scalingFactor != ((ScaledFloatFieldType) other).getScalingFactor()) {
conflicts.add("mapper [" + name() + "] has different [scaling_factor] values");
}
}
@Override
public Query termQuery(Object value, QueryShardContext context) {
failIfNotIndexed();
double queryValue = parse(value);
long scaledValue = Math.round(queryValue * scalingFactor);
Query query = NumberFieldMapper.NumberType.LONG.termQuery(name(), scaledValue);
if (boost() != 1f) {
query = new BoostQuery(query, boost());
}
return query;
}
@Override
public Query termsQuery(List<?> values, QueryShardContext context) {
failIfNotIndexed();
List<Long> scaledValues = new ArrayList<>(values.size());
for (Object value : values) {
double queryValue = parse(value);
long scaledValue = Math.round(queryValue * scalingFactor);
scaledValues.add(scaledValue);
}
Query query = NumberFieldMapper.NumberType.LONG.termsQuery(name(), Collections.unmodifiableList(scaledValues));
if (boost() != 1f) {
query = new BoostQuery(query, boost());
}
return query;
}
@Override
public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, QueryShardContext context) {
failIfNotIndexed();
Long lo = null;
if (lowerTerm != null) {
double dValue = parse(lowerTerm);
if (includeLower == false) {
dValue = Math.nextUp(dValue);
}
lo = Math.round(Math.ceil(dValue * scalingFactor));
}
Long hi = null;
if (upperTerm != null) {
double dValue = parse(upperTerm);
if (includeUpper == false) {
dValue = Math.nextDown(dValue);
}
hi = Math.round(Math.floor(dValue * scalingFactor));
}
Query query = NumberFieldMapper.NumberType.LONG.rangeQuery(name(), lo, hi, true, true, hasDocValues());
if (boost() != 1f) {
query = new BoostQuery(query, boost());
}
return query;
}
@Override
public IndexFieldData.Builder fielddataBuilder(String fullyQualifiedIndexName) {
failIfNoDocValues();
return new IndexFieldData.Builder() {
@Override
public IndexFieldData<?> build(IndexSettings indexSettings, MappedFieldType fieldType, IndexFieldDataCache cache,
CircuitBreakerService breakerService, MapperService mapperService) {
final IndexNumericFieldData scaledValues = (IndexNumericFieldData) new DocValuesIndexFieldData.Builder()
.numericType(IndexNumericFieldData.NumericType.LONG)
.build(indexSettings, fieldType, cache, breakerService, mapperService);
return new ScaledFloatIndexFieldData(scaledValues, scalingFactor);
}
};
}
@Override
public Object valueForDisplay(Object value) {
if (value == null) {
return null;
}
return ((Number) value).longValue() / scalingFactor;
}
@Override
public DocValueFormat docValueFormat(String format, DateTimeZone timeZone) {
if (timeZone != null) {
throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName()
+ "] does not support custom time zones");
}
if (format == null) {
return DocValueFormat.RAW;
} else {
return new DocValueFormat.Decimal(format);
}
}
@Override
public boolean equals(Object o) {
if (super.equals(o) == false) {
return false;
}
return scalingFactor == ((ScaledFloatFieldType) o).scalingFactor;
}
@Override
public int hashCode() {
return 31 * super.hashCode() + Double.hashCode(scalingFactor);
}
}
private Explicit<Boolean> ignoreMalformed;
private Explicit<Boolean> coerce;
private ScaledFloatFieldMapper(
String simpleName,
MappedFieldType fieldType,
MappedFieldType defaultFieldType,
Explicit<Boolean> ignoreMalformed,
Explicit<Boolean> coerce,
Settings indexSettings,
MultiFields multiFields,
CopyTo copyTo) {
super(simpleName, fieldType, defaultFieldType, indexSettings, multiFields, copyTo);
final double scalingFactor = fieldType().getScalingFactor();
if (Double.isFinite(scalingFactor) == false || scalingFactor <= 0) {
throw new IllegalArgumentException("[scaling_factor] must be a positive number, got [" + scalingFactor + "]");
}
this.ignoreMalformed = ignoreMalformed;
this.coerce = coerce;
}
@Override
public ScaledFloatFieldType fieldType() {
return (ScaledFloatFieldType) super.fieldType();
}
@Override
protected String contentType() {
return fieldType.typeName();
}
@Override
protected ScaledFloatFieldMapper clone() {
return (ScaledFloatFieldMapper) super.clone();
}
@Override
protected void parseCreateField(ParseContext context, List<IndexableField> fields) throws IOException {
XContentParser parser = context.parser();
Object value;
Number numericValue = null;
if (context.externalValueSet()) {
value = context.externalValue();
} else if (parser.currentToken() == Token.VALUE_NULL) {
value = null;
} else if (coerce.value()
&& parser.currentToken() == Token.VALUE_STRING
&& parser.textLength() == 0) {
value = null;
} else {
try {
numericValue = parse(parser, coerce.value());
} catch (IllegalArgumentException e) {
if (ignoreMalformed.value()) {
return;
} else {
throw e;
}
}
value = numericValue;
}
if (value == null) {
value = fieldType().nullValue();
}
if (value == null) {
return;
}
if (numericValue == null) {
numericValue = parse(value);
}
double doubleValue = numericValue.doubleValue();
if (Double.isFinite(doubleValue) == false) {
if (ignoreMalformed.value()) {
return;
} else {
// since we encode to a long, we have no way to carry NaNs and infinities
throw new IllegalArgumentException("[scaled_float] only supports finite values, but got [" + doubleValue + "]");
}
}
long scaledValue = Math.round(doubleValue * fieldType().getScalingFactor());
boolean indexed = fieldType().indexOptions() != IndexOptions.NONE;
boolean docValued = fieldType().hasDocValues();
boolean stored = fieldType().stored();
fields.addAll(NumberFieldMapper.NumberType.LONG.createFields(fieldType().name(), scaledValue, indexed, docValued, stored));
}
@Override
protected void doMerge(Mapper mergeWith, boolean updateAllTypes) {
super.doMerge(mergeWith, updateAllTypes);
ScaledFloatFieldMapper other = (ScaledFloatFieldMapper) mergeWith;
if (other.ignoreMalformed.explicit()) {
this.ignoreMalformed = other.ignoreMalformed;
}
if (other.coerce.explicit()) {
this.coerce = other.coerce;
}
}
@Override
protected void doXContentBody(XContentBuilder builder, boolean includeDefaults, Params params) throws IOException {
super.doXContentBody(builder, includeDefaults, params);
builder.field("scaling_factor", fieldType().getScalingFactor());
if (includeDefaults || ignoreMalformed.explicit()) {
builder.field("ignore_malformed", ignoreMalformed.value());
}
if (includeDefaults || coerce.explicit()) {
builder.field("coerce", coerce.value());
}
if (includeDefaults || fieldType().nullValue() != null) {
builder.field("null_value", fieldType().nullValue());
}
}
static Double parse(Object value) {
return objectToDouble(value);
}
private static Double parse(XContentParser parser, boolean coerce) throws IOException {
return parser.doubleValue(coerce);
}
/**
* Converts an Object to a double by checking it against known types first
*/
private static double objectToDouble(Object value) {
double doubleValue;
if (value instanceof Number) {
doubleValue = ((Number) value).doubleValue();
} else if (value instanceof BytesRef) {
doubleValue = Double.parseDouble(((BytesRef) value).utf8ToString());
} else {
doubleValue = Double.parseDouble(value.toString());
}
return doubleValue;
}
private static class ScaledFloatIndexFieldData implements IndexNumericFieldData {
private final IndexNumericFieldData scaledFieldData;
private final double scalingFactor;
ScaledFloatIndexFieldData(IndexNumericFieldData scaledFieldData, double scalingFactor) {
this.scaledFieldData = scaledFieldData;
this.scalingFactor = scalingFactor;
}
@Override
public String getFieldName() {
return scaledFieldData.getFieldName();
}
@Override
public AtomicNumericFieldData load(LeafReaderContext context) {
return new ScaledFloatLeafFieldData(scaledFieldData.load(context), scalingFactor);
}
@Override
public AtomicNumericFieldData loadDirect(LeafReaderContext context) throws Exception {
return new ScaledFloatLeafFieldData(scaledFieldData.loadDirect(context), scalingFactor);
}
@Override
public SortField sortField(@Nullable Object missingValue, MultiValueMode sortMode, Nested nested, boolean reverse) {
final XFieldComparatorSource source = new DoubleValuesComparatorSource(this, missingValue, sortMode, nested);
return new SortField(getFieldName(), source, reverse);
}
@Override
public void clear() {
scaledFieldData.clear();
}
@Override
public Index index() {
return scaledFieldData.index();
}
@Override
public NumericType getNumericType() {
/**
* {@link ScaledFloatLeafFieldData#getDoubleValues()} transforms the raw long values in `scaled` floats.
*/
return NumericType.DOUBLE;
}
}
private static class ScaledFloatLeafFieldData implements AtomicNumericFieldData {
private final AtomicNumericFieldData scaledFieldData;
private final double scalingFactorInverse;
ScaledFloatLeafFieldData(AtomicNumericFieldData scaledFieldData, double scalingFactor) {
this.scaledFieldData = scaledFieldData;
this.scalingFactorInverse = 1d / scalingFactor;
}
@Override
public ScriptDocValues.Doubles getScriptValues() {
return new ScriptDocValues.Doubles(getDoubleValues());
}
@Override
public SortedBinaryDocValues getBytesValues() {
return FieldData.toString(getDoubleValues());
}
@Override
public long ramBytesUsed() {
return scaledFieldData.ramBytesUsed();
}
@Override
public void close() {
scaledFieldData.close();
}
@Override
public SortedNumericDocValues getLongValues() {
return FieldData.castToLong(getDoubleValues());
}
@Override
public SortedNumericDoubleValues getDoubleValues() {
final SortedNumericDocValues values = scaledFieldData.getLongValues();
final NumericDocValues singleValues = DocValues.unwrapSingleton(values);
if (singleValues != null) {
return FieldData.singleton(new NumericDoubleValues() {
@Override
public boolean advanceExact(int doc) throws IOException {
return singleValues.advanceExact(doc);
}
@Override
public double doubleValue() throws IOException {
return singleValues.longValue() * scalingFactorInverse;
}
});
} else {
return new SortedNumericDoubleValues() {
@Override
public boolean advanceExact(int target) throws IOException {
return values.advanceExact(target);
}
@Override
public double nextValue() throws IOException {
return values.nextValue() * scalingFactorInverse;
}
@Override
public int docValueCount() {
return values.docValueCount();
}
};
}
}
}
}
| |
/*
* Copyright (C) 2009 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hp.myidea.obdproxy.base;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.util.UUID;
import android.bluetooth.BluetoothAdapter;
import android.bluetooth.BluetoothDevice;
import android.bluetooth.BluetoothSocket;
import android.content.Context;
import android.os.Bundle;
import android.os.Handler;
import android.os.Message;
import android.util.Log;
/**
* This class does all the work for setting up and managing Bluetooth
* connections with other devices. It has a thread that listens for
* incoming connections, a thread for connecting with a device, and a
* thread for performing data transmissions when connected.
*/
public class BluetoothConnector {
// Debugging
private static final String TAG = BluetoothConnector.class.getSimpleName();
private static final boolean D = true;
//private static String FILE_NAME = "OBDProxy_log.txt";
private Context appContext;
private FileOutputStream fos;
// Unique UUID for this application
private static final UUID MY_UUID = UUID.fromString("00001101-0000-1000-8000-00805F9B34FB");
// Member fields
private final BluetoothAdapter mAdapter;
private final Handler mHandler;
private ConnectThread mConnectThread;
private ListenerThread mListenerThread;
private int mState;
// Message types sent
public static final int MESSAGE_STATE_CHANGE = 1;
public static final int MESSAGE_READ = 2;
public static final int MESSAGE_WRITE = 3;
public static final int MESSAGE_DEVICE_NAME = 4;
public static final int MESSAGE_TOAST = 5;
// Key names
public static final String DEVICE_NAME = "device_name";
public static final String DEVICE_ADRESS = "device_address";
public static final String TOAST = "toast";
// Constants that indicate the current connection state
public static final int STATE_NONE = 0; // we're doing nothing
public static final int STATE_LISTEN = 1; // now listening for incoming connections
public static final int STATE_CONNECTING = 2; // now initiating an outgoing connection
public static final int STATE_CONNECTED = 3; // now connected to a remote device
public static final int STATE_FAILED = 4; // failed to connect
public static final int STATE_LOST = 5; // an existing connection was lost
/**
* Constructor. Prepares a new BluetoothReceiverActivity session.
* @param context The UI Activity Context
* @param handler A Handler to send messages back to the UI Activity
*/
public BluetoothConnector(Context context, Handler handler) {
appContext = context;
mHandler = handler;
mAdapter = BluetoothAdapter.getDefaultAdapter();
mState = STATE_NONE;
}
/**
* Set the current state of the chat connection
* @param state An integer defining the current connection state
*/
private synchronized void setState(int state) {
if (D) Log.d(TAG, "setState() " + mState + " -> " + state);
mState = state;
// Give the new state to the Handler so the UI Activity can update
mHandler.obtainMessage(BluetoothConnector.MESSAGE_STATE_CHANGE, state, -1).sendToTarget();
}
/**
* Return the current connection state. */
public synchronized int getState() {
return mState;
}
/**
* Start the chat service. Specifically start AcceptThread to begin a
* session in listening (server) mode. Called by the Activity onResume() */
public synchronized void start() {
if (D) Log.d(TAG, "start");
// Cancel any thread attempting to make a connection
if (mConnectThread != null) {mConnectThread.cancel(); mConnectThread = null;}
// Cancel any thread currently running a connection
if (mListenerThread != null) {mListenerThread.cancel(); mListenerThread = null;}
setState(STATE_LISTEN);
}
/**
* Start the ConnectThread to initiate a connection to a remote device.
* @param device The BluetoothDevice to connect
*/
public synchronized void connect(BluetoothDevice device) {
if (D) Log.d(TAG, "connect to: " + device);
// Cancel any thread attempting to make a connection
if (mState == STATE_CONNECTING) {
if (mConnectThread != null) {mConnectThread.cancel(); mConnectThread = null;}
}
// Cancel any thread currently running a connection
if (mListenerThread != null) {mListenerThread.cancel(); mListenerThread = null;}
// Start the thread to connect with the given device
mConnectThread = new ConnectThread(device);
mConnectThread.start();
setState(STATE_CONNECTING);
}
/**
* Start the ListenerThread to begin managing a Bluetooth connection
* @param socket The BluetoothSocket on which the connection was made
* @param device The BluetoothDevice that has been connected
*/
public synchronized void connected(BluetoothSocket socket, BluetoothDevice device) {
if (D) Log.d(TAG, "connected");
// Cancel the thread that completed the connection
if (mConnectThread != null) {mConnectThread.cancel(); mConnectThread = null;}
// Cancel any thread currently running a connection
if (mListenerThread != null) {mListenerThread.cancel(); mListenerThread = null;}
// Start the thread to manage the connection and perform transmissions
mListenerThread = new ListenerThread(socket);
mListenerThread.start();
// Send the name of the connected device back to the UI Activity
Message msg = mHandler.obtainMessage(BluetoothConnector.MESSAGE_DEVICE_NAME);
Bundle bundle = new Bundle();
bundle.putString(BluetoothConnector.DEVICE_NAME, device.getName());
bundle.putString(BluetoothConnector.DEVICE_ADRESS, device.getAddress());
msg.setData(bundle);
mHandler.sendMessage(msg);
setState(STATE_CONNECTED);
}
/**
* Stop all threads
*/
public synchronized void stop() {
if (D) Log.d(TAG, "stop");
if (mConnectThread != null) {mConnectThread.cancel(); mConnectThread = null;}
if (mListenerThread != null) {mListenerThread.cancel(); mListenerThread = null;}
setState(STATE_NONE);
}
/**
* Write to the ListenerThread in an unsynchronized manner
* @param out The bytes to write
* @see ListenerThread#write(byte[])
*/
public void write(byte[] out) {
// Create temporary object
ListenerThread r;
// Synchronize a copy of the ListenerThread
synchronized (this) {
if (mState != STATE_CONNECTED) return;
r = mListenerThread;
}
/* try {
this.fos = this.appContext.openFileOutput(FILE_NAME, Context.MODE_APPEND);
this.fos.write("\nSent: ".getBytes());
this.fos.write(out);
this.fos.close();
} catch (FileNotFoundException e) {
// Nothing to do
} catch (IOException e) {
// Nothing to do
}
*/
// Perform the write unsynchronized
r.write(out);
}
/**
* Read from the ListenerThread in an unsynchronized manner
*
* @see ListenerThread#read()
*/
public byte[] read() {
// Create temporary object
ListenerThread r;
// Synchronize a copy of the ListenerThread
synchronized (this) {
if (mState != STATE_CONNECTED) return null;
r = mListenerThread;
}
// Perform the read unsynchronized
byte[] read = r.read();
/* try {
this.fos = this.appContext.openFileOutput(FILE_NAME, Context.MODE_APPEND);
this.fos.write("\nReceived: ".getBytes());
this.fos.write(read);
this.fos.close();
} catch (FileNotFoundException e) {
// Nothing to do
} catch (IOException e) {
// Nothing to do
}
*/
return read;
}
/**
* Indicate that the connection attempt failed and notify the UI Activity.
*/
private void sayConnectionFailed(BluetoothDevice device) {
setState(STATE_FAILED);
// Send a failure message back to the Activity
Message msg = mHandler.obtainMessage(BluetoothConnector.MESSAGE_TOAST);
Bundle bundle = new Bundle();
bundle.putString(BluetoothConnector.TOAST, "Unable to connect device: " + device.getName());
msg.setData(bundle);
mHandler.sendMessage(msg);
}
/**
* Indicate that the connection was lost and notify the UI Activity.
*/
private void connectionLost() {
setState(STATE_LOST);
// Send a failure message back to the Activity
Message msg = mHandler.obtainMessage(BluetoothConnector.MESSAGE_TOAST);
Bundle bundle = new Bundle();
bundle.putString(BluetoothConnector.TOAST, "Device connection was lost");
msg.setData(bundle);
mHandler.sendMessage(msg);
}
/**
* This thread runs while attempting to make an outgoing connection
* with a device. It runs straight through; the connection either
* succeeds or fails.
*/
private class ConnectThread extends Thread {
private BluetoothSocket mmSocket;
private BluetoothDevice mmDevice;
public ConnectThread(BluetoothDevice device) {
mmDevice = device;
BluetoothSocket tmp = null;
// Get a BluetoothSocket for a connection with the
// given BluetoothDevice
try {
tmp = device.createRfcommSocketToServiceRecord(MY_UUID);
} catch (IOException e) {
Log.e(TAG, "create() failed", e);
}
mmSocket = tmp;
}
public void run() {
Log.i(TAG, "BEGIN mConnectThread");
setName("ConnectThread");
// Always cancel discovery because it will slow down a connection
mAdapter.cancelDiscovery();
// Make a connection to the BluetoothSocket
int retries = 5;
boolean succeed = false;
while (!succeed && retries-- > 0) {
try {
// This is a blocking call and will only return on a
// successful connection or an exception
mmSocket.connect();
succeed = true;
} catch (IOException e) {
Log.e(TAG, "Connection failed: ", e);
// http://stackoverflow.com/questions/18657427/ioexception-read-failed-socket-might-closed-bluetooth-on-android-4-3/18786701details
try {
Log.e(TAG,"trying fallback...");
mmSocket = (BluetoothSocket) mmDevice.getClass().getMethod("createRfcommSocket", new Class[] {int.class}).invoke(mmDevice, 1);
mmSocket.connect();
Log.e(TAG,"Connected");
succeed = true;
} catch (Exception e2) {
Log.e(TAG, "Couldn't establish Bluetooth connection!", e2);
}
Log.d(TAG, "Will retry " + retries + " times.");
try {
ConnectThread.sleep(1000);
} catch (InterruptedException e1) {
// Nothing to do
}
}
}
if (!succeed) {
sayConnectionFailed(mmDevice);
try {
mmSocket.close();
} catch (IOException e2) {
Log.e(TAG, "unable to close() socket during connection failure", e2);
}
// Start the service over to restart listening mode
BluetoothConnector.this.start();
return;
}
// Reset the ConnectThread because we're done
synchronized (BluetoothConnector.this) {
mConnectThread = null;
}
// Start the connected thread
connected(mmSocket, mmDevice);
}
public void cancel() {
try {
mmSocket.close();
} catch (IOException e) {
Log.e(TAG, "close() of connect socket failed", e);
}
}
}
/**
* This thread runs during a connection with a remote device.
* It handles all incoming and outgoing transmissions.
*/
private class ListenerThread extends Thread {
private final BluetoothSocket mmSocket;
private final InputStream mmInStream;
private final OutputStream mmOutStream;
public ListenerThread(BluetoothSocket socket) {
Log.d(TAG, "create ListenerThread");
mmSocket = socket;
InputStream tmpIn = null;
OutputStream tmpOut = null;
// Get the BluetoothSocket input and output streams
try {
tmpIn = socket.getInputStream();
tmpOut = socket.getOutputStream();
} catch (IOException e) {
Log.e(TAG, "temp sockets not created", e);
}
mmInStream = tmpIn;
mmOutStream = tmpOut;
}
public void run() {
Log.i(TAG, "BEGIN mListenerThread");
byte[] buffer = new byte[1024];
int bytes;
// Keep listening to the InputStream while connected
while (true) {
/* try {
// Read from the InputStream
buffer = new byte[256];
bytes = mmInStream.read(buffer);
if (bytes > 0) {
byte[] readBuffer = new byte[bytes];
System.arraycopy(buffer, 0, readBuffer, 0, bytes);
// Send the obtained bytes to the UI Activity
mHandler.obtainMessage(BluetoothConnector.MESSAGE_READ, readBuffer.length, -1, readBuffer).sendToTarget();
}
} catch (IOException e) {
Log.e(TAG, "disconnected", e);
connectionLost();
break;
}
*/
}
}
/**
* Read from the connected InputStream
*
* @return - byte array read
*/
public byte[] read() {
byte b = 0;
StringBuilder res = new StringBuilder();
// read until '>' arrives
try {
while ((char) (b = (byte) mmInStream.read()) != '>') {
if ((char) b != ' ') {
res.append((char) b);
}
}
} catch (IOException e) {
Log.e(TAG, "Exception during read", e);
connectionLost();
}
String rawData = res.toString().trim();
if (rawData.contains("SEARCHING") || rawData.contains("DATA")) {
rawData = "NODATA";
}
return rawData.getBytes();
}
/**
* Write to the connected OutStream.
* @param buffer The bytes to write
*
*/
public void write(byte[] buffer) {
try {
mmOutStream.write(buffer);
// Share the sent message back to the UI Activity
mHandler.obtainMessage(BluetoothConnector.MESSAGE_WRITE, -1, -1, buffer).sendToTarget();
} catch (IOException e) {
Log.e(TAG, "Exception during write", e);
}
}
public void cancel() {
try {
mmSocket.close();
} catch (IOException e) {
Log.e(TAG, "close() of connect socket failed", e);
}
}
}
}
| |
/*
* Zed Attack Proxy (ZAP) and its related class files.
*
* ZAP is an HTTP/HTTPS proxy for assessing web application security.
*
* Copyright 2014 The ZAP Development Team
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.zaproxy.zap.extension.zest;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import javax.script.ScriptException;
import org.apache.commons.httpclient.URI;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.parosproxy.paros.network.HttpHeader;
import org.parosproxy.paros.network.HttpMessage;
import org.zaproxy.addon.network.ExtensionNetwork;
import org.zaproxy.addon.network.server.HttpMessageHandler;
import org.zaproxy.addon.network.server.HttpMessageHandlerContext;
import org.zaproxy.addon.network.server.Server;
import org.zaproxy.zap.authentication.AuthenticationHelper;
import org.zaproxy.zap.authentication.GenericAuthenticationCredentials;
import org.zaproxy.zap.authentication.ScriptBasedAuthenticationMethodType.AuthenticationScript;
import org.zaproxy.zest.core.v1.ZestClient;
import org.zaproxy.zest.core.v1.ZestRequest;
import org.zaproxy.zest.core.v1.ZestResponse;
import org.zaproxy.zest.core.v1.ZestStatement;
import org.zaproxy.zest.core.v1.ZestVariables;
import org.zaproxy.zest.impl.ZestBasicRunner;
public class ZestAuthenticationRunner extends ZestZapRunner implements AuthenticationScript {
private static final Logger LOGGER = LogManager.getLogger(ZestAuthenticationRunner.class);
private static final String PROXY_ADDRESS = "127.0.0.1";
private static final String USERNAME = "Username";
private static final String PASSWORD = "Password";
private ZestScriptWrapper script = null;
private AuthenticationHelper helper;
private final ExtensionNetwork extensionNetwork;
public ZestAuthenticationRunner(
ExtensionZest extension, ExtensionNetwork extensionNetwork, ZestScriptWrapper script) {
super(extension, script);
this.script = script;
this.extensionNetwork = extensionNetwork;
}
@Override
public String[] getRequiredParamsNames() {
List<String> requiredParameters = new ArrayList<>();
for (String[] vars : script.getZestScript().getParameters().getVariables()) {
String variableName = vars[0];
if (!isCredentialParameter(variableName) && vars[1].length() == 0) {
requiredParameters.add(variableName);
}
}
return requiredParameters.toArray(new String[requiredParameters.size()]);
}
private static boolean isCredentialParameter(String variableName) {
return USERNAME.equals(variableName) || PASSWORD.equals(variableName);
}
@Override
public String[] getOptionalParamsNames() {
List<String> optionalParameters = new ArrayList<>();
for (String[] vars : script.getZestScript().getParameters().getVariables()) {
String variableName = vars[0];
if (!isCredentialParameter(variableName) && vars[1].length() != 0) {
optionalParameters.add(variableName);
}
}
return optionalParameters.toArray(new String[optionalParameters.size()]);
}
@Override
public String[] getCredentialsParamsNames() {
return new String[] {USERNAME, PASSWORD};
}
@Override
public HttpMessage authenticate(
AuthenticationHelper helper,
Map<String, String> paramsValues,
GenericAuthenticationCredentials credentials)
throws ScriptException {
this.helper = helper;
Server proxyServer = null;
try {
if (hasClientStatements()) {
proxyServer =
extensionNetwork.createHttpProxy(
helper.getHttpSender(), new ZestMessageHandler(this, helper));
int port = proxyServer.start(PROXY_ADDRESS, Server.ANY_PORT);
this.setProxy(PROXY_ADDRESS, port);
}
paramsValues.put(USERNAME, credentials.getParam(USERNAME));
paramsValues.put(PASSWORD, credentials.getParam(PASSWORD));
this.run(script.getZestScript(), paramsValues);
String respUrl = this.getVariable(ZestVariables.RESPONSE_URL);
HttpMessage msg = new HttpMessage(new URI(respUrl, true));
msg.setRequestHeader(
this.getVariable(ZestVariables.REQUEST_METHOD)
+ " "
+ this.getVariable(ZestVariables.REQUEST_URL)
+ " "
+ msg.getRequestHeader().getVersion()
+ HttpHeader.CRLF
+ this.getVariable(ZestVariables.REQUEST_HEADER));
msg.setRequestBody(this.getVariable(ZestVariables.REQUEST_BODY));
msg.getRequestHeader().setContentLength(msg.getRequestBody().length());
msg.setResponseHeader(this.getVariable(ZestVariables.RESPONSE_HEADER));
msg.setResponseBody(this.getVariable(ZestVariables.RESPONSE_BODY));
// Make sure the proper requesting user is set on the returned message
msg.setRequestingUser(helper.getRequestingUser());
return msg;
} catch (Exception e) {
throw new ScriptException(e);
} finally {
if (proxyServer != null) {
try {
proxyServer.close();
} catch (IOException e) {
LOGGER.debug("An error occurred while stopping the proxy.", e);
}
}
}
}
private boolean hasClientStatements() {
ZestStatement next = script.getZestScript().getNext();
while (next != null) {
if (next instanceof ZestClient && next.isEnabled()) {
return true;
}
next = next.getNext();
}
return false;
}
@Override
public ZestResponse send(ZestRequest request) throws IOException {
HttpMessage msg = ZestZapUtils.toHttpMessage(request, null);
msg.setRequestingUser(helper.getRequestingUser());
helper.sendAndReceive(msg, request.isFollowRedirects());
return ZestZapUtils.toZestResponse(msg);
}
private static class ZestMessageHandler implements HttpMessageHandler {
private final ZestBasicRunner runner;
private final AuthenticationHelper helper;
private ZestMessageHandler(ZestBasicRunner runner, AuthenticationHelper helper) {
this.runner = runner;
this.helper = helper;
}
@Override
public void handleMessage(HttpMessageHandlerContext ctx, HttpMessage msg) {
if (ctx.isFromClient()) {
msg.setRequestingUser(helper.getRequestingUser());
return;
}
runner.setVariable(
ZestVariables.REQUEST_URL, msg.getRequestHeader().getURI().toString());
runner.setVariable(
ZestVariables.REQUEST_HEADER, msg.getRequestHeader().getHeadersAsString());
runner.setVariable(ZestVariables.REQUEST_METHOD, msg.getRequestHeader().getMethod());
runner.setVariable(ZestVariables.REQUEST_BODY, msg.getRequestBody().toString());
runner.setVariable(
ZestVariables.RESPONSE_URL, msg.getRequestHeader().getURI().toString());
runner.setVariable(ZestVariables.RESPONSE_HEADER, msg.getResponseHeader().toString());
runner.setVariable(ZestVariables.RESPONSE_BODY, msg.getResponseBody().toString());
}
}
}
| |
package org.apache.mesos.hdfs;
import com.google.common.collect.Lists;
import org.apache.hadoop.conf.Configuration;
import org.apache.mesos.Protos;
import org.apache.mesos.SchedulerDriver;
import org.apache.mesos.hdfs.config.HdfsFrameworkConfig;
import org.apache.mesos.hdfs.scheduler.HdfsScheduler;
import org.apache.mesos.hdfs.state.AcquisitionPhase;
import org.apache.mesos.hdfs.state.LiveState;
import org.apache.mesos.hdfs.state.IPersistentStateStore;
import org.apache.mesos.hdfs.util.DnsResolver;
import org.apache.mesos.hdfs.util.HDFSConstants;
import org.junit.Before;
import org.junit.Test;
import static org.junit.Assert.assertEquals;
import org.mockito.ArgumentCaptor;
import org.mockito.Captor;
import org.mockito.Mock;
import org.mockito.MockitoAnnotations;
import java.util.Arrays;
import java.util.Collection;
import java.util.Iterator;
import java.util.HashMap;
import static org.junit.Assert.assertTrue;
import static org.mockito.Matchers.any;
import static org.mockito.Mockito.*;
@SuppressWarnings("unchecked")
public class TestScheduler {
private final HdfsFrameworkConfig hdfsFrameworkConfig = new HdfsFrameworkConfig(new Configuration());
@Mock
SchedulerDriver driver;
@Mock
IPersistentStateStore persistenceStore;
@Mock
LiveState liveState;
@Mock
DnsResolver dnsResolver;
@Captor
ArgumentCaptor<Collection<Protos.TaskInfo>> taskInfosCapture;
HdfsScheduler scheduler;
@Test
public void statusUpdateWasStagingNowRunning() {
when(liveState.getCurrentAcquisitionPhase()).thenReturn(AcquisitionPhase.JOURNAL_NODES);
Protos.TaskID taskId = createTaskId("1");
scheduler.statusUpdate(driver, createTaskStatus(taskId, Protos.TaskState.TASK_RUNNING));
verify(liveState).removeStagingTask(taskId);
}
@Test
public void statusUpdateTransitionFromAcquiringJournalNodesToStartingNameNodes() {
Protos.TaskID taskId = createTaskId("1");
when(liveState.getCurrentAcquisitionPhase()).thenReturn(AcquisitionPhase.JOURNAL_NODES);
when(liveState.getJournalNodeSize()).thenReturn(3);
scheduler.statusUpdate(driver,
createTaskStatus(taskId, Protos.TaskState.TASK_RUNNING));
verify(liveState).transitionTo(AcquisitionPhase.START_NAME_NODES);
}
@Test
public void statusUpdateAcquiringJournalNodesNotEnoughYet() {
Protos.TaskID taskId = createTaskId("1");
when(liveState.getCurrentAcquisitionPhase()).thenReturn(AcquisitionPhase.JOURNAL_NODES);
when(liveState.getJournalNodeSize()).thenReturn(2);
scheduler.statusUpdate(driver,
createTaskStatus(taskId, Protos.TaskState.TASK_RUNNING));
verify(liveState, never()).transitionTo(AcquisitionPhase.START_NAME_NODES);
}
@Test
public void statusUpdateTransitionFromStartingNameNodesToFormateNameNodes() {
Protos.TaskID taskId = createTaskId(HDFSConstants.NAME_NODE_TASKID + "1");
Protos.SlaveID slaveId = createSlaveId("1");
when(liveState.getCurrentAcquisitionPhase()).thenReturn(AcquisitionPhase.START_NAME_NODES);
when(liveState.getNameNodeSize()).thenReturn(2);
when(liveState.getJournalNodeSize()).thenReturn(hdfsFrameworkConfig.getJournalNodeCount());
when(liveState.getFirstNameNodeTaskId()).thenReturn(taskId);
when(liveState.getFirstNameNodeSlaveId()).thenReturn(slaveId);
scheduler.statusUpdate(driver,
createTaskStatus(taskId, Protos.TaskState.TASK_RUNNING));
verify(liveState).transitionTo(AcquisitionPhase.FORMAT_NAME_NODES);
}
@Test
public void statusUpdateTransitionFromFormatNameNodesToDataNodes() {
when(liveState.getCurrentAcquisitionPhase()).thenReturn(AcquisitionPhase.FORMAT_NAME_NODES);
when(liveState.getJournalNodeSize()).thenReturn(hdfsFrameworkConfig.getJournalNodeCount());
when(liveState.getNameNodeSize()).thenReturn(HDFSConstants.TOTAL_NAME_NODES);
when(liveState.isNameNode1Initialized()).thenReturn(true);
when(liveState.isNameNode2Initialized()).thenReturn(true);
scheduler.statusUpdate(
driver,
createTaskStatus(createTaskId(HDFSConstants.NAME_NODE_TASKID),
Protos.TaskState.TASK_RUNNING));
verify(liveState).transitionTo(AcquisitionPhase.DATA_NODES);
}
@Test
public void statusUpdateAquiringDataNodesJustStays() {
Protos.TaskID taskId = createTaskId("1");
when(liveState.getCurrentAcquisitionPhase()).thenReturn(AcquisitionPhase.DATA_NODES);
scheduler.statusUpdate(driver,
createTaskStatus(taskId, Protos.TaskState.TASK_RUNNING));
verify(liveState, never()).transitionTo(any(AcquisitionPhase.class));
}
@Test
public void startsAJournalNodeWhenGivenAnOffer() {
when(liveState.getCurrentAcquisitionPhase()).thenReturn(AcquisitionPhase.JOURNAL_NODES);
scheduler.resourceOffers(driver,
Lists.newArrayList(createTestOfferWithResources(0, 2, 2048)));
verify(driver, times(1)).launchTasks(anyList(), taskInfosCapture.capture());
assertEquals(1, taskInfosCapture.getValue().size());
}
@Test
public void launchesOnlyNeededNumberOfJournalNodes() {
when(liveState.getCurrentAcquisitionPhase()).thenReturn(AcquisitionPhase.JOURNAL_NODES);
HashMap<String, String> journalNodes = new HashMap<String, String>();
journalNodes.put("host1", "journalnode1");
journalNodes.put("host2", "journalnode2");
journalNodes.put("host3", "journalnode3");
when(persistenceStore.getJournalNodes()).thenReturn(journalNodes);
scheduler.resourceOffers(driver, Lists.newArrayList(createTestOffer(0)));
verify(driver, never()).launchTasks(anyList(), anyList());
}
@Test
public void launchesNamenodeWhenInNamenode1Phase() {
when(liveState.getCurrentAcquisitionPhase()).thenReturn(AcquisitionPhase.START_NAME_NODES);
when(persistenceStore.getNameNodeTaskNames()).thenReturn(new HashMap<String, String>());
when(persistenceStore.journalNodeRunningOnSlave("host0")).thenReturn(true);
when(dnsResolver.journalNodesResolvable()).thenReturn(true);
scheduler.resourceOffers(driver, Lists.newArrayList(createTestOffer(0)));
verify(driver, times(1)).launchTasks(anyList(), taskInfosCapture.capture());
assertTrue(taskInfosCapture.getValue().size() == 2);
Iterator<Protos.TaskInfo> taskInfoIterator = taskInfosCapture.getValue().iterator();
String firstTask = taskInfoIterator.next().getName();
assertTrue(firstTask.contains(HDFSConstants.NAME_NODE_ID)
|| firstTask.contains(HDFSConstants.ZKFC_NODE_ID));
String secondTask = taskInfoIterator.next().getName();
assertTrue(secondTask.contains(HDFSConstants.NAME_NODE_ID)
|| secondTask.contains(HDFSConstants.ZKFC_NODE_ID));
}
@Test
public void declinesAnyOffersPastWhatItNeeds() {
when(liveState.getCurrentAcquisitionPhase()).thenReturn(AcquisitionPhase.DATA_NODES);
scheduler.resourceOffers(driver,
Lists.newArrayList(
createTestOffer(0),
createTestOffer(1),
createTestOffer(2),
createTestOffer(3)
));
verify(driver, times(3)).declineOffer(any(Protos.OfferID.class));
}
@Test
public void launchesDataNodesWhenInDatanodesPhase() {
when(liveState.getCurrentAcquisitionPhase()).thenReturn(AcquisitionPhase.DATA_NODES);
scheduler.resourceOffers(driver,
Lists.newArrayList(
createTestOffer(0)
)
);
verify(driver, times(1)).launchTasks(anyList(), taskInfosCapture.capture());
Protos.TaskInfo taskInfo = taskInfosCapture.getValue().iterator().next();
assertTrue(taskInfo.getName().contains(HDFSConstants.DATA_NODE_ID));
}
@Test
public void removesTerminalTasksFromLiveState() {
when(liveState.getCurrentAcquisitionPhase()).thenReturn(AcquisitionPhase.DATA_NODES);
scheduler.statusUpdate(driver, createTaskStatus(createTaskId("0"),
Protos.TaskState.TASK_FAILED));
scheduler.statusUpdate(driver, createTaskStatus(createTaskId("1"),
Protos.TaskState.TASK_FINISHED));
scheduler.statusUpdate(driver, createTaskStatus(createTaskId("2"),
Protos.TaskState.TASK_KILLED));
scheduler.statusUpdate(driver, createTaskStatus(createTaskId("3"),
Protos.TaskState.TASK_LOST));
verify(liveState, times(4)).removeStagingTask(any(Protos.TaskID.class));
verify(liveState, times(4)).removeRunningTask(any(Protos.TaskID.class));
}
@Test
public void declinesOffersWithNotEnoughResources() {
when(liveState.getCurrentAcquisitionPhase()).thenReturn(AcquisitionPhase.DATA_NODES);
Protos.Offer offer = createTestOfferWithResources(0, 0.1, 64);
scheduler.resourceOffers(driver, Lists.newArrayList(offer));
verify(driver, times(1)).declineOffer(offer.getId());
}
@Before
public void setup() {
MockitoAnnotations.initMocks(this);
this.scheduler = new HdfsScheduler(hdfsFrameworkConfig, liveState, persistenceStore);
}
private Protos.TaskID createTaskId(String id) {
return Protos.TaskID.newBuilder().setValue(id).build();
}
private Protos.OfferID createTestOfferId(int instanceNumber) {
return Protos.OfferID.newBuilder().setValue("offer" + instanceNumber).build();
}
private Protos.SlaveID createSlaveId(String slaveId) {
return Protos.SlaveID.newBuilder().setValue(slaveId).build();
}
private Protos.ExecutorID createExecutorId(String executorId) {
return Protos.ExecutorID.newBuilder().setValue(executorId).build();
}
private Protos.Offer createTestOffer(int instanceNumber) {
return Protos.Offer.newBuilder()
.setId(createTestOfferId(instanceNumber))
.setFrameworkId(Protos.FrameworkID.newBuilder().setValue("framework1").build())
.setSlaveId(Protos.SlaveID.newBuilder().setValue("slave" + instanceNumber).build())
.setHostname("host" + instanceNumber)
.build();
}
private Protos.Offer createTestOfferWithResources(int instanceNumber, double cpus, int mem) {
return Protos.Offer.newBuilder()
.setId(createTestOfferId(instanceNumber))
.setFrameworkId(Protos.FrameworkID.newBuilder().setValue("framework1").build())
.setSlaveId(Protos.SlaveID.newBuilder().setValue("slave" + instanceNumber).build())
.setHostname("host" + instanceNumber)
.addAllResources(Arrays.asList(
Protos.Resource.newBuilder()
.setName("cpus")
.setType(Protos.Value.Type.SCALAR)
.setScalar(Protos.Value.Scalar.newBuilder()
.setValue(cpus).build())
.setRole("*")
.build(),
Protos.Resource.newBuilder()
.setName("mem")
.setType(Protos.Value.Type.SCALAR)
.setScalar(Protos.Value.Scalar.newBuilder()
.setValue(mem).build())
.setRole("*")
.build()))
.build();
}
private Protos.TaskStatus createTaskStatus(Protos.TaskID taskID, Protos.TaskState state) {
return Protos.TaskStatus.newBuilder()
.setTaskId(taskID)
.setState(state)
.setSlaveId(Protos.SlaveID.newBuilder().setValue("slave").build())
.setMessage("From Test")
.build();
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.pig.backend.hadoop.executionengine.tez;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.pig.backend.executionengine.ExecException;
import org.apache.pig.backend.hadoop.HDataType;
import org.apache.pig.backend.hadoop.executionengine.physicalLayer.POStatus;
import org.apache.pig.backend.hadoop.executionengine.physicalLayer.Result;
import org.apache.pig.backend.hadoop.executionengine.physicalLayer.expressionOperators.ExpressionOperator;
import org.apache.pig.backend.hadoop.executionengine.physicalLayer.plans.PhysicalPlan;
import org.apache.pig.data.BagFactory;
import org.apache.pig.data.DataBag;
import org.apache.pig.data.DataType;
import org.apache.pig.data.Tuple;
import org.apache.pig.data.TupleFactory;
import org.apache.pig.impl.builtin.PartitionSkewedKeys;
import org.apache.pig.impl.io.NullablePartitionWritable;
import org.apache.pig.impl.io.NullableTuple;
import org.apache.pig.impl.io.PigNullableWritable;
import org.apache.pig.impl.plan.OperatorKey;
import org.apache.pig.impl.util.Pair;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
/**
* The partition rearrange operator is a part of the skewed join implementation.
* It has an embedded physical plan that generates tuples of the form
* (inpKey,reducerIndex,(indxed inp Tuple)).
*/
public class POPartitionRearrangeTez extends POLocalRearrangeTez {
private static final long serialVersionUID = 1L;
private static final Log LOG = LogFactory.getLog(POPartitionRearrangeTez.class);
private static final TupleFactory tf = TupleFactory.getInstance();
private static final BagFactory mBagFactory = BagFactory.getInstance();
// ReducerMap will store the tuple, max reducer index & min reducer index
private Map<Object, Pair<Integer, Integer>> reducerMap = Maps.newHashMap();
private Integer totalReducers = -1;
private boolean inited = false;
public POPartitionRearrangeTez(OperatorKey k) {
this(k, -1);
}
public POPartitionRearrangeTez(OperatorKey k, int rp) {
super(k, rp);
index = -1;
leafOps = Lists.newArrayList();
}
@Override
public String name() {
return getAliasString() + "Partition Rearrange" + "["
+ DataType.findTypeName(resultType) + "]" + "{"
+ DataType.findTypeName(keyType) + "}" + "(" + mIsDistinct
+ ") - " + mKey.toString() + "\t->\t " + outputKey;
}
/**
* Calls getNext on the generate operator inside the nested physical plan.
* Converts the generated tuple into the proper format, i.e,
* (key,indexedTuple(value))
*/
@Override
public Result getNextTuple() throws ExecException {
if (!inited) {
init();
}
while (true) {
inp = processInput();
if (inp.returnStatus == POStatus.STATUS_EOP || inp.returnStatus == POStatus.STATUS_ERR) {
break;
}
if (inp.returnStatus == POStatus.STATUS_NULL) {
continue;
}
for (PhysicalPlan ep : plans) {
ep.attachInput((Tuple)inp.result);
}
List<Result> resLst = new ArrayList<Result>();
for (ExpressionOperator op : leafOps) {
res = op.getNext(op.getResultType());
if (res.returnStatus != POStatus.STATUS_OK) {
return res;
}
resLst.add(res);
}
res.result = constructPROutput(resLst, (Tuple)inp.result);
if (writer == null) { // In the case of combiner
return res;
}
Iterator<Tuple> its = ((DataBag)res.result).iterator();
while (its.hasNext()) {
Tuple result = its.next();
Byte index = (Byte)result.get(0);
PigNullableWritable key =
HDataType.getWritableComparableTypes(result.get(2), keyType);
NullableTuple val = new NullableTuple((Tuple)result.get(3));
NullablePartitionWritable wrappedKey = new NullablePartitionWritable(key);
wrappedKey.setIndex(index);
wrappedKey.setPartition((Integer)result.get(1));
val.setIndex(index);
try {
writer.write(wrappedKey, val);
} catch (IOException ioe) {
int errCode = 2135;
String msg = "Received error from POPartitionRearrage function." + ioe.getMessage();
throw new ExecException(msg, errCode, ioe);
}
}
res = RESULT_EMPTY;
}
return inp;
}
// Returns bag of tuples
protected DataBag constructPROutput(List<Result> resLst, Tuple value) throws ExecException{
Tuple t = super.constructLROutput(resLst, null, value);
//Construct key
Object key = t.get(1);
// Construct an output bag and feed in the tuples
DataBag opBag = mBagFactory.newDefaultBag();
// Put the index, key, and value in a tuple and return
// first -> min, second -> max
Pair <Integer, Integer> indexes = reducerMap.get(key);
// For non skewed keys, we set the partition index to be -1
if (indexes == null) {
indexes = new Pair <Integer, Integer>(-1,0);
}
for (Integer reducerIdx = indexes.first, cnt = 0; cnt <= indexes.second; reducerIdx++, cnt++) {
if (reducerIdx >= totalReducers) {
reducerIdx = 0;
}
Tuple opTuple = mTupleFactory.newTuple(4);
opTuple.set(0, t.get(0));
// set the partition index
opTuple.set(1, reducerIdx.intValue());
opTuple.set(2, key);
opTuple.set(3, t.get(2));
opBag.add(opTuple);
}
return opBag;
}
@SuppressWarnings("unchecked")
private void init() throws RuntimeException {
ObjectCache cache = ObjectCache.getInstance();
String isCachedKey = "sample-" + PigProcessor.sampleVertex + ".cached";
String totalReducersCacheKey = "sample-" + PigProcessor.sampleVertex + ".totalReducers";
String reducerMapCacheKey = "sample-" + PigProcessor.sampleVertex + ".reducerMap";
if (cache.retrieve(isCachedKey) == Boolean.TRUE) {
totalReducers = (Integer) cache.retrieve(totalReducersCacheKey);
reducerMap = (Map<Object, Pair<Integer, Integer>>) cache.retrieve(reducerMapCacheKey);
LOG.info("Found totalReducers and reducerMap in Tez cache. cachekey="
+ totalReducersCacheKey + "," + reducerMapCacheKey);
inited = true;
return;
}
Map<String, Object> distMap = null;
if (PigProcessor.sampleMap != null) {
// We've already collected sampleMap in PigProcessor
distMap = PigProcessor.sampleMap;
} else {
LOG.info("Key distribution map is empty");
inited = true;
return;
}
long start = System.currentTimeMillis();
try {
// The distMap is structured as (key, min, max) where min, max
// being the index of the reducers
DataBag partitionList = (DataBag) distMap.get(PartitionSkewedKeys.PARTITION_LIST);
totalReducers = Integer.valueOf("" + distMap.get(PartitionSkewedKeys.TOTAL_REDUCERS));
Iterator<Tuple> it = partitionList.iterator();
while (it.hasNext()) {
Tuple idxTuple = it.next();
Integer maxIndex = (Integer) idxTuple.get(idxTuple.size() - 1);
Integer minIndex = (Integer) idxTuple.get(idxTuple.size() - 2);
// Used to replace the maxIndex with the number of reducers
if (maxIndex < minIndex) {
maxIndex = totalReducers + maxIndex;
}
Object keyT;
// if the join is on more than 1 key
if (idxTuple.size() > 3) {
// remove the last 2 fields of the tuple, i.e: minIndex
// and maxIndex and store it in the reducer map
Tuple keyTuple = tf.newTuple();
for (int i=0; i < idxTuple.size() - 2; i++) {
keyTuple.append(idxTuple.get(i));
}
keyT = keyTuple;
} else {
keyT = idxTuple.get(0);
}
// number of reducers
Integer cnt = maxIndex - minIndex;
// 1 is added to account for the 0 index
reducerMap.put(keyT, new Pair<Integer, Integer>(minIndex, cnt));
}
} catch (Exception e) {
throw new RuntimeException(e);
}
LOG.info("Initialized POPartitionRearrangeTez. Time taken: " + (System.currentTimeMillis() - start));
cache.cache(isCachedKey, Boolean.TRUE);
cache.cache(totalReducersCacheKey, totalReducers);
cache.cache(reducerMapCacheKey, reducerMap);
inited = true;
}
}
| |
// Copyright 2019 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package org.chromium.chrome.browser.customtabs.content;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.anyBoolean;
import static org.mockito.ArgumentMatchers.anyInt;
import static org.mockito.ArgumentMatchers.eq;
import static org.mockito.Mockito.doNothing;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
import android.content.Intent;
import android.os.Bundle;
import android.view.View;
import androidx.browser.customtabs.CustomTabsSessionToken;
import org.junit.rules.TestWatcher;
import org.junit.runner.Description;
import org.mockito.ArgumentCaptor;
import org.mockito.Captor;
import org.mockito.Mock;
import org.mockito.MockitoAnnotations;
import org.chromium.base.UserDataHost;
import org.chromium.chrome.browser.ActivityTabProvider;
import org.chromium.chrome.browser.ActivityTabProvider.ActivityTabObserver;
import org.chromium.chrome.browser.ChromeActivity;
import org.chromium.chrome.browser.IntentHandler;
import org.chromium.chrome.browser.WarmupManager;
import org.chromium.chrome.browser.WebContentsFactory;
import org.chromium.chrome.browser.compositor.CompositorViewHolder;
import org.chromium.chrome.browser.customtabs.CloseButtonNavigator;
import org.chromium.chrome.browser.customtabs.CustomTabDelegateFactory;
import org.chromium.chrome.browser.customtabs.CustomTabIntentDataProvider;
import org.chromium.chrome.browser.customtabs.CustomTabNavigationEventObserver;
import org.chromium.chrome.browser.customtabs.CustomTabObserver;
import org.chromium.chrome.browser.customtabs.CustomTabTabPersistencePolicy;
import org.chromium.chrome.browser.customtabs.CustomTabsConnection;
import org.chromium.chrome.browser.customtabs.ReparentingTaskProvider;
import org.chromium.chrome.browser.customtabs.shadows.ShadowExternalNavigationDelegateImpl;
import org.chromium.chrome.browser.fullscreen.ChromeFullscreenManager;
import org.chromium.chrome.browser.init.ChromeBrowserInitializer;
import org.chromium.chrome.browser.init.StartupTabPreloader;
import org.chromium.chrome.browser.lifecycle.ActivityLifecycleDispatcher;
import org.chromium.chrome.browser.tab.Tab;
import org.chromium.chrome.browser.tab.TabImpl;
import org.chromium.chrome.browser.tab_activity_glue.ReparentingTask;
import org.chromium.chrome.browser.tabmodel.AsyncTabParamsManager;
import org.chromium.chrome.browser.tabmodel.TabModel;
import org.chromium.chrome.browser.tabmodel.TabModelSelectorImpl;
import org.chromium.chrome.browser.tabmodel.document.AsyncTabCreationParams;
import org.chromium.chrome.browser.toolbar.ToolbarManager;
import org.chromium.content_public.browser.LoadUrlParams;
import org.chromium.content_public.browser.NavigationController;
import org.chromium.content_public.browser.WebContents;
/**
* A TestRule that sets up the mocks and contains helper methods for JUnit/Robolectric tests scoped
* to the content layer of Custom Tabs code.
*/
public class CustomTabActivityContentTestEnvironment extends TestWatcher {
public static final String INITIAL_URL = "https://initial.com";
public static final String SPECULATED_URL = "https://speculated.com";
public static final String OTHER_URL = "https://other.com";
public final Intent intent = new Intent();
// clang-format off
@Mock public CustomTabDelegateFactory customTabDelegateFactory;
@Mock public ChromeActivity activity;
@Mock public CustomTabsConnection connection;
@Mock public CustomTabIntentDataProvider intentDataProvider;
@Mock public TabObserverRegistrar tabObserverRegistrar;
@Mock public CompositorViewHolder compositorViewHolder;
@Mock public WarmupManager warmupManager;
@Mock public CustomTabTabPersistencePolicy tabPersistencePolicy;
@Mock public CustomTabActivityTabFactory tabFactory;
@Mock public CustomTabObserver customTabObserver;
@Mock public WebContentsFactory webContentsFactory;
@Mock public ActivityTabProvider activityTabProvider;
@Mock public ActivityLifecycleDispatcher lifecycleDispatcher;
@Mock public CustomTabsSessionToken session;
@Mock public TabModelSelectorImpl tabModelSelector;
@Mock public TabModel tabModel;
@Mock public ReparentingTaskProvider reparentingTaskProvider;
@Mock public ReparentingTask reparentingTask;
@Mock public CustomTabNavigationEventObserver navigationEventObserver;
@Mock public CloseButtonNavigator closeButtonNavigator;
@Mock public ToolbarManager toolbarManager;
@Mock public ChromeBrowserInitializer browserInitializer;
@Mock public ChromeFullscreenManager fullscreenManager;
@Mock public StartupTabPreloader startupTabPreloader;
// clang-format on
public final CustomTabActivityTabProvider tabProvider = new CustomTabActivityTabProvider();
@Captor
public ArgumentCaptor<ActivityTabObserver> activityTabObserverCaptor;
// Captures the WebContents with which tabFromFactory is initialized
@Captor public ArgumentCaptor<WebContents> webContentsCaptor;
public Tab tabFromFactory;
@Override
protected void starting(Description description) {
MockitoAnnotations.initMocks(this);
tabFromFactory = prepareTab();
when(intentDataProvider.getIntent()).thenReturn(intent);
when(intentDataProvider.getSession()).thenReturn(session);
when(intentDataProvider.getUrlToLoad()).thenReturn(INITIAL_URL);
when(tabFactory.createTab(webContentsCaptor.capture(), any(), any()))
.thenReturn(tabFromFactory);
when(tabFactory.getTabModelSelector()).thenReturn(tabModelSelector);
when(tabModelSelector.getModel(anyBoolean())).thenReturn(tabModel);
when(connection.getSpeculatedUrl(any())).thenReturn(SPECULATED_URL);
when(browserInitializer.isFullBrowserInitialized()).thenReturn(true);
// Default setup is toolbarManager doesn't consume back press event.
when(toolbarManager.back()).thenReturn(false);
when(startupTabPreloader.takeTabIfMatchingOrDestroy(any(), anyInt())).thenReturn(null);
when(reparentingTaskProvider.get(any())).thenReturn(reparentingTask);
doNothing()
.when(activityTabProvider)
.addObserverAndTrigger(activityTabObserverCaptor.capture());
}
@Override
protected void finished(Description description) {
AsyncTabParamsManager.getAsyncTabParams().clear();
ShadowExternalNavigationDelegateImpl.setWillChromeHandleIntent(false);
}
// clang-format off
public CustomTabActivityTabController createTabController() {
return new CustomTabActivityTabController(activity, () -> customTabDelegateFactory,
connection, intentDataProvider, activityTabProvider, tabObserverRegistrar,
() -> compositorViewHolder, lifecycleDispatcher, warmupManager,
tabPersistencePolicy, tabFactory, () -> customTabObserver, webContentsFactory,
navigationEventObserver, tabProvider, startupTabPreloader, reparentingTaskProvider);
}
// clang-format on
public CustomTabActivityNavigationController createNavigationController(
CustomTabActivityTabController tabController) {
CustomTabActivityNavigationController controller =
new CustomTabActivityNavigationController(tabController, tabProvider,
intentDataProvider, connection,
()
-> customTabObserver,
closeButtonNavigator, browserInitializer, activity, lifecycleDispatcher,
() -> fullscreenManager);
controller.onToolbarInitialized(toolbarManager);
return controller;
}
public CustomTabIntentHandler createIntentHandler(
CustomTabActivityNavigationController navigationController) {
CustomTabIntentHandlingStrategy strategy = new DefaultCustomTabIntentHandlingStrategy(
tabProvider, navigationController, navigationEventObserver,
() -> customTabObserver);
return new CustomTabIntentHandler(tabProvider,
intentDataProvider, strategy, (intent) -> false, activity);
}
public void warmUp() {
when(connection.hasWarmUpBeenFinished()).thenReturn(true);
}
public void changeTab(Tab newTab) {
when(activityTabProvider.get()).thenReturn(newTab);
for (ActivityTabObserver observer : activityTabObserverCaptor.getAllValues()) {
observer.onActivityTabChanged(newTab, false);
}
}
public void saveTab(Tab tab) {
when(activity.getSavedInstanceState()).thenReturn(new Bundle());
when(tabModelSelector.getCurrentTab()).thenReturn(tab);
}
// Dispatches lifecycle events up to native init.
public void reachNativeInit(CustomTabActivityTabController tabController) {
tabController.onPreInflationStartup();
tabController.onPostInflationStartup();
tabController.onFinishNativeInitialization();
}
public WebContents prepareTransferredWebcontents() {
int tabId = 1;
WebContents webContents = mock(WebContents.class);
AsyncTabParamsManager.add(tabId, new AsyncTabCreationParams(mock(LoadUrlParams.class),
webContents));
intent.putExtra(IntentHandler.EXTRA_TAB_ID, tabId);
return webContents;
}
public WebContents prepareSpareWebcontents() {
WebContents webContents = mock(WebContents.class);
when(warmupManager.takeSpareWebContents(
anyBoolean(), anyBoolean(), eq(WarmupManager.FOR_CCT)))
.thenReturn(webContents);
return webContents;
}
public TabImpl prepareHiddenTab() {
warmUp();
TabImpl hiddenTab = prepareTab();
when(connection.takeHiddenTab(any(), any(), any())).thenReturn(hiddenTab);
return hiddenTab;
}
public TabImpl prepareTab() {
TabImpl tab = mock(TabImpl.class);
when(tab.getView()).thenReturn(mock(View.class));
when(tab.getUserDataHost()).thenReturn(new UserDataHost());
WebContents webContents = mock(WebContents.class);
when(tab.getWebContents()).thenReturn(webContents);
NavigationController navigationController = mock(NavigationController.class);
when(webContents.getNavigationController()).thenReturn(navigationController);
return tab;
}
}
| |
package com.aventura.model.world.shape;
import java.awt.Color;
import com.aventura.math.vector.Vector4;
import com.aventura.model.texture.Texture;
import com.aventura.model.world.Vertex;
import com.aventura.model.world.triangle.FanMesh;
import com.aventura.model.world.triangle.RectangleMesh;
/**
* ------------------------------------------------------------------------------
* MIT License
*
* Copyright (c) 2016-2021 Olivier BARRY
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
* ------------------------------------------------------------------------------
*
* @author Olivier BARRY
* @since March 2017
*/
public class Pyramid extends Element {
protected static final String PYRAMID_DEFAULT_NAME = "pyramid";
protected float x_dim, y_dim, z_dim;
protected Vertex[][] vertices;
protected RectangleMesh bottom;
protected FanMesh left, right, front, back;
protected Vertex summit;
protected Texture bottom_tex, left_tex, right_tex, front_tex, back_tex = null;
protected Color bottom_col, left_col, right_col, front_col, back_col = null;
/**
* Create a Pyramid aligned on axis. Need to be rotated for a different orientation.
*
* @param x_dim dimension of the pyramid on x axis
* @param y_dim dimension of the pyramid on y axis
* @param z_dim dimension of the pyramid on z axis
*/
public Pyramid(float x_dim, float y_dim, float z_dim) {
super(PYRAMID_DEFAULT_NAME, true); // A Pyramid is a closed Element
subelements = null;
this.x_dim = x_dim;
this.y_dim = y_dim;
this.z_dim = z_dim;
}
/**
* Create a Pyramid aligned on axis. Need to be rotated for a different orientation.
*
* @param x_dim dimension of the pyramid on x axis
* @param y_dim dimension of the pyramid on y axis
* @param z_dim dimension of the pyramid on z axis
*/
public Pyramid(float x_dim, float y_dim, float z_dim, Texture tex) {
super(PYRAMID_DEFAULT_NAME, true); // A Pyramid is a closed Element
subelements = null;
this.bottom_tex = tex;
this.left_tex = tex;
this.right_tex = tex;
this.front_tex = tex;
this.back_tex = tex;
this.x_dim = x_dim;
this.y_dim = y_dim;
this.z_dim = z_dim;
}
/**
* Create a Pyramid aligned on axis. Need to be rotated for a different orientation.
*
* @param x_dim dimension of the pyramid on x axis
* @param y_dim dimension of the pyramid on y axis
* @param z_dim dimension of the pyramid on z axis
*/
public Pyramid(float x_dim, float y_dim, float z_dim, Texture base_tex, Texture left_tex, Texture right_tex, Texture front_tex, Texture back_tex) {
super(PYRAMID_DEFAULT_NAME, true); // A Pyramid is a closed Element
subelements = null;
this.bottom_tex = base_tex;
this.left_tex = left_tex;
this.right_tex = right_tex;
this.front_tex = front_tex;
this.back_tex = back_tex;
this.x_dim = x_dim;
this.y_dim = y_dim;
this.z_dim = z_dim;
}
public void createGeometry() {
vertices = new Vertex[2][2];
// Calculate dimensions
float xh = x_dim/2;
float yh = y_dim/2;
float zh = z_dim/2;
// Build the Element: Create Vertices of the bottom of the Pyramid: 4 vertices
vertices[0][0] = createVertex(new Vector4(-xh, -yh, -zh, 1));
vertices[0][1] = createVertex(new Vector4(-xh, yh, -zh, 1));
vertices[1][1] = createVertex(new Vector4(xh, yh, -zh, 1));
vertices[1][0] = createVertex(new Vector4(xh, -yh, -zh, 1));
// Create summit
summit = createVertex(new Vector4(0, 0, zh, 1));
// Create RectangleMeshs for each face of the box to wrap each face into Textures
// For this create 6 temporary Vertex arrays used to point on the box vertices of each face
Vertex [] left_array = new Vertex [] {vertices[0][0],vertices[1][0]};
Vertex [] right_array = new Vertex [] {vertices[1][0],vertices[1][1]};
Vertex [] front_array = new Vertex [] {vertices[1][1],vertices[0][1]};
Vertex [] back_array = new Vertex [] {vertices[0][1],vertices[0][0]};
Vertex [][] base_array = new Vertex [][] {{vertices[0][0],vertices[1][0]},{vertices[0][1],vertices[1][1]}};
// Then create the Meshs
left = new FanMesh(this, left_array, summit, left_tex);
right = new FanMesh(this, right_array, summit, right_tex);
front = new FanMesh(this, front_array, summit, front_tex);
back = new FanMesh(this, back_array, summit, back_tex);
bottom = new RectangleMesh(this, base_array, bottom_tex);
// Set color to each face
bottom.setCol(this.bottom_col);
left.setCol(this.left_col);
right.setCol(this.right_col);
front.setCol(this.front_col);
back.setCol(this.back_col);
// At last create Triangles of all meshes
left.createTriangles(FanMesh.MESH_ORIENTED_TRIANGLES);
right.createTriangles(FanMesh.MESH_ORIENTED_TRIANGLES);
front.createTriangles(FanMesh.MESH_ORIENTED_TRIANGLES);
back.createTriangles(FanMesh.MESH_ORIENTED_TRIANGLES);
bottom.createTriangles(RectangleMesh.MESH_ORIENTED_TRIANGLES);
}
@Override
public Element setBottomTexture(Texture tex) {
// TODO Auto-generated method stub
this.bottom_tex = tex;
return this;
}
@Override
public Element setLeftTexture(Texture tex) {
// TODO Auto-generated method stub
this.left_tex = tex;
return this;
}
@Override
public Element setRightTexture(Texture tex) {
// TODO Auto-generated method stub
this.right_tex = tex;
return this;
}
@Override
public Element setFrontTexture(Texture tex) {
// TODO Auto-generated method stub
this.front_tex = tex;
return this;
}
@Override
public Element setBackTexture(Texture tex) {
// TODO Auto-generated method stub
this.back_tex = tex;
return this;
}
@Override
public Element setBottomColor(Color c) {
// TODO Auto-generated method stub
this.bottom_col = c;
return this;
}
@Override
public Element setLeftColor(Color c) {
// TODO Auto-generated method stub
this.left_col = c;
return this;
}
@Override
public Element setRightColor(Color c) {
// TODO Auto-generated method stub
this.right_col = c;
return this;
}
@Override
public Element setFrontColor(Color c) {
// TODO Auto-generated method stub
this.front_col = c;
return this;
}
@Override
public Element setBackColor(Color c) {
// TODO Auto-generated method stub
this.back_col = c;
return this;
}
}
| |
package org.archboy.clobaframe.resource.impl;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.Stack;
import javax.annotation.PostConstruct;
import javax.inject.Inject;
import javax.inject.Named;
import org.archboy.clobaframe.io.NamedResourceInfo;
import org.archboy.clobaframe.resource.NotificationCacheableResourceInfo;
import org.archboy.clobaframe.resource.ResourceUpdateListener;
import org.archboy.clobaframe.resource.ResourceProviderSet;
import org.archboy.clobaframe.resource.LocationStrategy;
import org.archboy.clobaframe.resource.ManageResourceCache;
import org.archboy.clobaframe.resource.ContentHashResourceInfo;
import org.archboy.clobaframe.resource.ResourceManager;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Value;
/**
*
* @author yang
*
*/
@Named
public class ResourceManagerImpl implements ResourceManager {
//, InitializingBean {
public static final String DEFAULT_LOCATION_STRATEGY = "default";
public static final boolean DEFAULT_CAN_MINIFY = false;
public static final boolean DEFAULT_CAN_COMPRESS = true;
public static final boolean DEFAULT_CAN_SERVER_CACHE = true;
public static final int DEFAULT_SERVER_CACHE_SECONDS = 10 * 60;
public static final String SETTING_KEY_LOCATION_STRATEGY = "clobaframe.resource.locationStrategy";
public static final String SETTING_KEY_CAN_MINIFY = "clobaframe.resource.minify";
public static final String SETTING_KEY_CAN_COMPRESS = "clobaframe.resource.compress";
public static final String SETTING_KEY_CAN_SERVER_CACHE = "clobaframe.resource.serverCache";
public static final String SETTING_KEY_SERVER_CACHE_SECONDS = "clobaframe.resource.serverCacheSeconds";
private ManageResourceCache manageResourceCache;
@Value("${" + SETTING_KEY_LOCATION_STRATEGY + ":" + DEFAULT_LOCATION_STRATEGY + "}")
private String locationStrategyName;
@Inject
private List<LocationStrategy> locationStrategys;
private LocationStrategy locationStrategy;
@Inject
private ResourceProviderSet resourceProviderSet;
private List<String> compressibleResourceMimeTypes;
private List<String> minifyResourceMimeTypes;
@Value("${" + SETTING_KEY_CAN_MINIFY + ":" + DEFAULT_CAN_MINIFY + "}")
private boolean canMinify = DEFAULT_CAN_MINIFY;
@Value("${" + SETTING_KEY_CAN_COMPRESS + ":" + DEFAULT_CAN_COMPRESS + "}")
private boolean canCompress = DEFAULT_CAN_COMPRESS;
@Value("${" + SETTING_KEY_CAN_SERVER_CACHE + ":" + DEFAULT_CAN_SERVER_CACHE + "}")
private boolean canServerCache = DEFAULT_CAN_SERVER_CACHE;
@Value("${" + SETTING_KEY_SERVER_CACHE_SECONDS + ":" + DEFAULT_SERVER_CACHE_SECONDS + "}")
private int cacheSeconds = DEFAULT_SERVER_CACHE_SECONDS;
private final Logger logger = LoggerFactory.getLogger(ResourceManagerImpl.class);
// keep the current post-handling resources name.
// to prevent infinite loop
private Stack<String> buildingResourceNames = new Stack<String>();
public void setLocationStrategys(List<LocationStrategy> locationStrategys) {
this.locationStrategys = locationStrategys;
}
public void setLocationStrategyName(String locationStrategyName) {
this.locationStrategyName = locationStrategyName;
}
public void setResourceProviderSet(ResourceProviderSet resourceProviderSet) {
this.resourceProviderSet = resourceProviderSet;
}
public void setCanMinify(boolean canMinify) {
this.canMinify = canMinify;
}
public void setCanCompress(boolean canCompress) {
this.canCompress = canCompress;
}
public void setCanServerCache(boolean canServerCache) {
this.canServerCache = canServerCache;
}
public void setCacheSeconds(int cacheSeconds) {
this.cacheSeconds = cacheSeconds;
}
@PostConstruct
//@Override
public void init() throws Exception {
for(LocationStrategy strategy : locationStrategys) {
if (strategy.getName().equals(locationStrategyName)) {
this.locationStrategy = strategy;
break;
}
}
if (locationStrategy == null) {
throw new IllegalArgumentException(String.format(
"Can not find the location strategy [%s]", locationStrategyName));
}
logger.info("Using [{}] web resource location strategy.", locationStrategyName);
manageResourceCache = new InMemoryManageResourceCache();
compressibleResourceMimeTypes = new ArrayList<String>();
compressibleResourceMimeTypes.add(MIME_TYPE_STYLE_SHEET);
compressibleResourceMimeTypes.addAll(MIME_TYPE_JAVA_SCRIPT);
compressibleResourceMimeTypes.addAll(MIME_TYPE_TEXT);
// Some types of font file are compressed,
// excepted the ttf and svg.
compressibleResourceMimeTypes.add("application/x-font-ttf"); // ttf
compressibleResourceMimeTypes.add("image/svg+xml"); // svg
minifyResourceMimeTypes = new ArrayList<String>();
minifyResourceMimeTypes.add(MIME_TYPE_STYLE_SHEET);
minifyResourceMimeTypes.addAll(MIME_TYPE_JAVA_SCRIPT);
}
/**
* Cache, compress, minify etc. the web resource.
* @param name
* @return NULL if the specify resource not found.
*/
protected NamedResourceInfo serveResource(String name) {
// load from in-momery cache first
NamedResourceInfo resourceInfo = manageResourceCache.get(name);
if (resourceInfo != null) {
return resourceInfo;
}
// then load from repository set
resourceInfo = resourceProviderSet.getByName(name);
if (resourceInfo == null) {
return null;
}
// post-handle resource
// to prevent infinite loop
if (!buildingResourceNames.empty() && buildingResourceNames.contains(name)) {
return null;
}
buildingResourceNames.push(name);
Collection<String> childResourceNames = null;
// transform url location
if (resourceInfo.getMimeType().equals(MIME_TYPE_STYLE_SHEET)) {
resourceInfo = new DefaultLocationTransformResourceInfo(this, resourceInfo);
childResourceNames = ((DefaultLocationTransformResourceInfo)resourceInfo).listChildResourceNames();
}
// minify
if (canMinify && minifyResourceMimeTypes.contains(resourceInfo.getMimeType())) {
resourceInfo = new DefaultMinifyResourceInfo(resourceInfo);
}
// compress
if (canCompress && compressibleResourceMimeTypes.contains(resourceInfo.getMimeType())) {
resourceInfo = new DefaultCompressibleResourceInfo(resourceInfo);
}
// server cache
if (canServerCache) {
resourceInfo = new DefaultCacheableResourceInfo(resourceInfo, cacheSeconds);
// insert the update listener into the child resources
if (childResourceNames != null){
for(String n : childResourceNames) {
NamedResourceInfo r = serveResource(n);
if (r != null && r instanceof NotificationCacheableResourceInfo) {
((NotificationCacheableResourceInfo)r).addUpdateListener((ResourceUpdateListener)resourceInfo);
}
}
}
}
// store into in-momery cache
manageResourceCache.set(resourceInfo);
buildingResourceNames.pop();
return resourceInfo;
}
@Override
public NamedResourceInfo getServedResource(String name) {
return serveResource(name);
}
@Override
public NamedResourceInfo get(String name) {
return resourceProviderSet.getByName(name);
}
@Override
public String getLocation(NamedResourceInfo resourceInfo) {
return locationStrategy.getLocation(resourceInfo);
}
@Override
public String getLocation(String name) {
NamedResourceInfo resource = getServedResource(name);
return resource == null ? null : getLocation(resource);
}
@Override
public void refresh(String name) {
NamedResourceInfo resource = serveResource(name);
if (resource != null) {
if (resource instanceof NotificationCacheableResourceInfo) {
((NotificationCacheableResourceInfo)resource).refresh();
}
}
}
@Override
public Collection<NamedResourceInfo> list() {
return resourceProviderSet.list();
}
@Override
public NamedResourceInfo getServedResourceByVersionName(String versionName) {
String name = locationStrategy.fromVersionName(versionName);
return (name == null ? null : getServedResource(name));
}
}
| |
// Copyright (C) 2014 The Android Open Source Project
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.gerrit.server.git;
import static com.google.gerrit.common.data.Permission.forLabel;
import static com.google.gerrit.server.group.SystemGroupBackend.REGISTERED_USERS;
import static com.google.gerrit.server.project.Util.allow;
import static com.google.gerrit.server.project.Util.category;
import static com.google.gerrit.server.project.Util.value;
import static org.junit.Assert.assertEquals;
import com.google.common.collect.ImmutableList;
import com.google.gerrit.common.TimeUtil;
import com.google.gerrit.common.data.AccessSection;
import com.google.gerrit.common.data.LabelType;
import com.google.gerrit.lifecycle.LifecycleManager;
import com.google.gerrit.reviewdb.client.Account;
import com.google.gerrit.reviewdb.client.Branch;
import com.google.gerrit.reviewdb.client.Change;
import com.google.gerrit.reviewdb.client.LabelId;
import com.google.gerrit.reviewdb.client.PatchSet;
import com.google.gerrit.reviewdb.client.PatchSetApproval;
import com.google.gerrit.reviewdb.client.PatchSetInfo;
import com.google.gerrit.reviewdb.server.ReviewDb;
import com.google.gerrit.server.IdentifiedUser;
import com.google.gerrit.server.account.AccountManager;
import com.google.gerrit.server.account.AuthRequest;
import com.google.gerrit.server.config.AllProjectsName;
import com.google.gerrit.server.git.LabelNormalizer.Result;
import com.google.gerrit.server.project.ProjectCache;
import com.google.gerrit.server.schema.SchemaCreator;
import com.google.gerrit.testutil.InMemoryDatabase;
import com.google.gerrit.testutil.InMemoryModule;
import com.google.inject.Guice;
import com.google.inject.Inject;
import com.google.inject.Injector;
import com.google.inject.util.Providers;
import org.eclipse.jgit.lib.Repository;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import java.util.List;
/** Unit tests for {@link LabelNormalizer}. */
public class LabelNormalizerTest {
@Inject private AccountManager accountManager;
@Inject private AllProjectsName allProjects;
@Inject private GitRepositoryManager repoManager;
@Inject private IdentifiedUser.GenericFactory userFactory;
@Inject private InMemoryDatabase schemaFactory;
@Inject private LabelNormalizer norm;
@Inject private MetaDataUpdate.User metaDataUpdateFactory;
@Inject private ProjectCache projectCache;
@Inject private SchemaCreator schemaCreator;
private LifecycleManager lifecycle;
private ReviewDb db;
private Account.Id userId;
private IdentifiedUser user;
private Change change;
@Before
public void setUpInjector() throws Exception {
Injector injector = Guice.createInjector(new InMemoryModule());
injector.injectMembers(this);
lifecycle = new LifecycleManager();
lifecycle.add(injector);
lifecycle.start();
db = schemaFactory.open();
schemaCreator.create(db);
userId = accountManager.authenticate(AuthRequest.forUser("user"))
.getAccountId();
user = userFactory.create(Providers.of(db), userId);
configureProject();
setUpChange();
}
private void configureProject() throws Exception {
ProjectConfig pc = loadAllProjects();
for (AccessSection sec : pc.getAccessSections()) {
for (String label : pc.getLabelSections().keySet()) {
sec.removePermission(forLabel(label));
}
}
LabelType lt = category("Verified",
value(1, "Verified"),
value(0, "No score"),
value(-1, "Fails"));
pc.getLabelSections().put(lt.getName(), lt);
save(pc);
}
private void setUpChange() throws Exception {
change = new Change(
new Change.Key("Iabcd1234abcd1234abcd1234abcd1234abcd1234"),
new Change.Id(1), userId,
new Branch.NameKey(allProjects, "refs/heads/master"),
TimeUtil.nowTs());
PatchSetInfo ps = new PatchSetInfo(new PatchSet.Id(change.getId(), 1));
ps.setSubject("Test change");
change.setCurrentPatchSet(ps);
db.changes().insert(ImmutableList.of(change));
}
@After
public void tearDown() {
if (lifecycle != null) {
lifecycle.stop();
}
if (db != null) {
db.close();
}
InMemoryDatabase.drop(schemaFactory);
}
@Test
public void normalizeByPermission() throws Exception {
ProjectConfig pc = loadAllProjects();
allow(pc, forLabel("Code-Review"), -1, 1, REGISTERED_USERS, "refs/heads/*");
allow(pc, forLabel("Verified"), -1, 1, REGISTERED_USERS, "refs/heads/*");
save(pc);
PatchSetApproval cr = psa(userId, "Code-Review", 2);
PatchSetApproval v = psa(userId, "Verified", 1);
assertEquals(Result.create(
list(v),
list(copy(cr, 1)),
list()),
norm.normalize(change, list(cr, v)));
}
@Test
public void normalizeByType() throws Exception {
ProjectConfig pc = loadAllProjects();
allow(pc, forLabel("Code-Review"), -5, 5, REGISTERED_USERS, "refs/heads/*");
allow(pc, forLabel("Verified"), -5, 5, REGISTERED_USERS, "refs/heads/*");
save(pc);
PatchSetApproval cr = psa(userId, "Code-Review", 5);
PatchSetApproval v = psa(userId, "Verified", 5);
assertEquals(Result.create(
list(),
list(copy(cr, 2), copy(v, 1)),
list()),
norm.normalize(change, list(cr, v)));
}
@Test
public void emptyPermissionRangeOmitsResult() throws Exception {
PatchSetApproval cr = psa(userId, "Code-Review", 1);
PatchSetApproval v = psa(userId, "Verified", 1);
assertEquals(Result.create(
list(),
list(),
list(cr, v)),
norm.normalize(change, list(cr, v)));
}
@Test
public void explicitZeroVoteOnNonEmptyRangeIsPresent() throws Exception {
ProjectConfig pc = loadAllProjects();
allow(pc, forLabel("Code-Review"), -1, 1, REGISTERED_USERS, "refs/heads/*");
save(pc);
PatchSetApproval cr = psa(userId, "Code-Review", 0);
PatchSetApproval v = psa(userId, "Verified", 0);
assertEquals(Result.create(
list(cr),
list(),
list(v)),
norm.normalize(change, list(cr, v)));
}
private ProjectConfig loadAllProjects() throws Exception {
try (Repository repo = repoManager.openRepository(allProjects)) {
ProjectConfig pc = new ProjectConfig(allProjects);
pc.load(repo);
return pc;
}
}
private void save(ProjectConfig pc) throws Exception {
MetaDataUpdate md =
metaDataUpdateFactory.create(pc.getProject().getNameKey(), user);
pc.commit(md);
projectCache.evict(pc.getProject().getNameKey());
}
private PatchSetApproval psa(Account.Id accountId, String label, int value) {
return new PatchSetApproval(
new PatchSetApproval.Key(
change.currentPatchSetId(), accountId, new LabelId(label)),
(short) value, TimeUtil.nowTs());
}
private PatchSetApproval copy(PatchSetApproval src, int newValue) {
PatchSetApproval result =
new PatchSetApproval(src.getKey().getParentKey(), src);
result.setValue((short) newValue);
return result;
}
private static List<PatchSetApproval> list(PatchSetApproval... psas) {
return ImmutableList.<PatchSetApproval> copyOf(psas);
}
}
| |
/*******************************************************************************
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
******************************************************************************/
package org.apache.olingo.odata2.jpa.processor.core.access.data;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import java.net.URISyntaxException;
import org.apache.olingo.odata2.api.edm.EdmEntitySet;
import org.apache.olingo.odata2.api.edm.EdmEntityType;
import org.apache.olingo.odata2.api.edm.EdmException;
import org.apache.olingo.odata2.api.exception.ODataException;
import org.apache.olingo.odata2.api.processor.ODataContext;
import org.apache.olingo.odata2.jpa.processor.api.ODataJPAContext;
import org.apache.olingo.odata2.jpa.processor.api.exception.ODataJPARuntimeException;
import org.apache.olingo.odata2.jpa.processor.core.common.ODataJPATestConstants;
import org.apache.olingo.odata2.jpa.processor.core.mock.ODataContextMock;
import org.apache.olingo.odata2.jpa.processor.core.mock.ODataJPAContextMock;
import org.apache.olingo.odata2.jpa.processor.core.mock.PathInfoMock;
import org.apache.olingo.odata2.jpa.processor.core.mock.data.EdmMockUtilV2;
import org.apache.olingo.odata2.jpa.processor.core.mock.data.JPATypeMock;
import org.apache.olingo.odata2.jpa.processor.core.mock.data.JPATypeMock.JPARelatedTypeMock;
import org.apache.olingo.odata2.jpa.processor.core.mock.data.JPATypeMock.JPATypeEmbeddableMock;
import org.apache.olingo.odata2.jpa.processor.core.mock.data.JPATypeMock.JPATypeEmbeddableMock2;
import org.apache.olingo.odata2.jpa.processor.core.mock.data.ODataEntryMockUtil;
import org.junit.Test;
public class JPAEntityTest {
private JPAEntity jpaEntity = null;
@Test
public void testCreateODataEntryWithComplexType() {
try {
EdmEntitySet edmEntitySet = EdmMockUtilV2.mockEdmEntitySet(JPATypeMock.ENTITY_NAME, true);
EdmEntityType edmEntityType = edmEntitySet.getEntityType();
jpaEntity = new JPAEntity(edmEntityType, edmEntitySet, mockODataJPAContext());
jpaEntity.create(ODataEntryMockUtil.mockODataEntryWithComplexType(JPATypeMock.ENTITY_NAME));
} catch (ODataJPARuntimeException e) {
fail(ODataJPATestConstants.EXCEPTION_MSG_PART_1 + e.getMessage()
+ ODataJPATestConstants.EXCEPTION_MSG_PART_2);
} catch (EdmException e) {
fail(ODataJPATestConstants.EXCEPTION_MSG_PART_1 + e.getMessage()
+ ODataJPATestConstants.EXCEPTION_MSG_PART_2);
} catch (ODataException e) {
fail(ODataJPATestConstants.EXCEPTION_MSG_PART_1 + e.getMessage()
+ ODataJPATestConstants.EXCEPTION_MSG_PART_2);
}
JPATypeMock jpaTypeMock = (JPATypeMock) jpaEntity.getJPAEntity();
assertEquals(jpaTypeMock.getMInt(), ODataEntryMockUtil.VALUE_MINT);
assertEquals(jpaTypeMock.getMString(), ODataEntryMockUtil.VALUE_MSTRING);
assertTrue(jpaTypeMock.getMDateTime().equals(ODataEntryMockUtil.VALUE_DATE_TIME));
JPATypeEmbeddableMock jpaEmbeddableMock = jpaTypeMock.getComplexType();
assertNotNull(jpaEmbeddableMock);
assertEquals(jpaEmbeddableMock.getMShort(), ODataEntryMockUtil.VALUE_SHORT);
JPATypeEmbeddableMock2 jpaEmbeddableMock2 = jpaEmbeddableMock.getMEmbeddable();
assertNotNull(jpaEmbeddableMock2);
assertEquals(jpaEmbeddableMock2.getMFloat(), ODataEntryMockUtil.VALUE_MFLOAT, 1);
assertEquals(jpaEmbeddableMock2.getMUUID(), ODataEntryMockUtil.VALUE_UUID);
}
@Test
public void testCreateODataEntry() {
try {
EdmEntitySet edmEntitySet = EdmMockUtilV2.mockEdmEntitySet(JPATypeMock.ENTITY_NAME, false);
EdmEntityType edmEntityType = edmEntitySet.getEntityType();
jpaEntity = new JPAEntity(edmEntityType, edmEntitySet, mockODataJPAContext());
jpaEntity.create(ODataEntryMockUtil.mockODataEntry(JPATypeMock.ENTITY_NAME));
} catch (ODataJPARuntimeException e) {
fail(ODataJPATestConstants.EXCEPTION_MSG_PART_1 + e.getMessage()
+ ODataJPATestConstants.EXCEPTION_MSG_PART_2);
} catch (EdmException e) {
fail(ODataJPATestConstants.EXCEPTION_MSG_PART_1 + e.getMessage()
+ ODataJPATestConstants.EXCEPTION_MSG_PART_2);
} catch (ODataException e) {
fail(ODataJPATestConstants.EXCEPTION_MSG_PART_1 + e.getMessage()
+ ODataJPATestConstants.EXCEPTION_MSG_PART_2);
}
JPATypeMock jpaTypeMock = (JPATypeMock) jpaEntity.getJPAEntity();
assertEquals(jpaTypeMock.getMInt(), ODataEntryMockUtil.VALUE_MINT);
assertEquals(jpaTypeMock.getMString(), ODataEntryMockUtil.VALUE_MSTRING);
assertEquals(ODataEntryMockUtil.VALUE_C.charAt(0), jpaTypeMock.getMC());
assertEquals(ODataEntryMockUtil.VALUE_CARRAY, new String(jpaTypeMock.getMCArray()));
assertEquals(ODataEntryMockUtil.VALUE_CHAR, jpaTypeMock.getMChar().toString());
assertEquals(ODataEntryMockUtil.VALUE_ENUM, jpaTypeMock.getMSomeEnum());
assertEquals(ODataEntryMockUtil.VALUE_CHARARRAY, JPAEntityParser.toString(jpaTypeMock.getMCharArray()));
assertTrue(jpaTypeMock.getMDateTime().equals(ODataEntryMockUtil.VALUE_DATE_TIME));
}
@Test
public void testCreateODataEntryWithInline() {
try {
EdmEntitySet edmEntitySet = EdmMockUtilV2.mockEdmEntitySet(JPATypeMock.ENTITY_NAME, false);
EdmEntityType edmEntityType = edmEntitySet.getEntityType();
jpaEntity = new JPAEntity(edmEntityType, edmEntitySet, mockODataJPAContext());
jpaEntity.create(ODataEntryMockUtil.mockODataEntryWithInline(JPATypeMock.ENTITY_NAME));
} catch (ODataJPARuntimeException e) {
fail(ODataJPATestConstants.EXCEPTION_MSG_PART_1 + e.getMessage()
+ ODataJPATestConstants.EXCEPTION_MSG_PART_2);
} catch (EdmException e) {
fail(ODataJPATestConstants.EXCEPTION_MSG_PART_1 + e.getMessage()
+ ODataJPATestConstants.EXCEPTION_MSG_PART_2);
} catch (ODataException e) {
fail(ODataJPATestConstants.EXCEPTION_MSG_PART_1 + e.getMessage()
+ ODataJPATestConstants.EXCEPTION_MSG_PART_2);
}
JPATypeMock jpaTypeMock = (JPATypeMock) jpaEntity.getJPAEntity();
assertEquals(jpaTypeMock.getMInt(), ODataEntryMockUtil.VALUE_MINT);
assertEquals(jpaTypeMock.getMString(), ODataEntryMockUtil.VALUE_MSTRING);
assertTrue(jpaTypeMock.getMDateTime().equals(ODataEntryMockUtil.VALUE_DATE_TIME));
JPARelatedTypeMock relatedType = jpaTypeMock.getMRelatedEntity();
assertEquals(relatedType.getMByte(), ODataEntryMockUtil.VALUE_MBYTE);
assertEquals(relatedType.getMByteArray(), ODataEntryMockUtil.VALUE_MBYTEARRAY);
assertEquals(relatedType.getMDouble(), ODataEntryMockUtil.VALUE_MDOUBLE, 0.0);
assertEquals(relatedType.getMLong(), ODataEntryMockUtil.VALUE_MLONG);
}
@Test
public void testCreateODataEntryProperty() {
try {
EdmEntitySet edmEntitySet = EdmMockUtilV2.mockEdmEntitySet(JPATypeMock.ENTITY_NAME, false);
EdmEntityType edmEntityType = edmEntitySet.getEntityType();
jpaEntity = new JPAEntity(edmEntityType, edmEntitySet, mockODataJPAContext());
jpaEntity.create(ODataEntryMockUtil.mockODataEntryProperties(JPATypeMock.ENTITY_NAME));
} catch (ODataJPARuntimeException e) {
fail(ODataJPATestConstants.EXCEPTION_MSG_PART_1 + e.getMessage()
+ ODataJPATestConstants.EXCEPTION_MSG_PART_2);
} catch (EdmException e) {
fail(ODataJPATestConstants.EXCEPTION_MSG_PART_1 + e.getMessage()
+ ODataJPATestConstants.EXCEPTION_MSG_PART_2);
} catch (ODataException e) {
fail(ODataJPATestConstants.EXCEPTION_MSG_PART_1 + e.getMessage()
+ ODataJPATestConstants.EXCEPTION_MSG_PART_2);
}
JPATypeMock jpaTypeMock = (JPATypeMock) jpaEntity.getJPAEntity();
assertEquals(jpaTypeMock.getMInt(), ODataEntryMockUtil.VALUE_MINT);
assertEquals(jpaTypeMock.getMString(), ODataEntryMockUtil.VALUE_MSTRING);
assertTrue(jpaTypeMock.getMDateTime().equals(ODataEntryMockUtil.VALUE_DATE_TIME));
}
@Test
public void testCreateODataEntryPropertyWithOutCallBack() {
try {
EdmEntitySet edmEntitySet = EdmMockUtilV2.mockEdmEntitySet(JPATypeMock.ENTITY_NAME, false);
EdmEntityType edmEntityType = edmEntitySet.getEntityType();
jpaEntity = new JPAEntity(edmEntityType, edmEntitySet, mockODataJPAContextWithoutCallBack());
jpaEntity.create(ODataEntryMockUtil.mockODataEntryProperties(JPATypeMock.ENTITY_NAME));
} catch (ODataJPARuntimeException e) {
assertEquals(ODataJPARuntimeException.ERROR_JPA_BLOB_NULL.getKey(), e.getMessageReference().getKey());
return;
} catch (EdmException e) {
fail(ODataJPATestConstants.EXCEPTION_MSG_PART_1 + e.getMessage()
+ ODataJPATestConstants.EXCEPTION_MSG_PART_2);
} catch (ODataException e) {
fail(ODataJPATestConstants.EXCEPTION_MSG_PART_1 + e.getMessage()
+ ODataJPATestConstants.EXCEPTION_MSG_PART_2);
}
fail(ODataJPATestConstants.EXCEPTION_EXPECTED);
}
@Test
public void testUpdateODataEntry() {
try {
EdmEntitySet edmEntitySet = EdmMockUtilV2.mockEdmEntitySet(JPATypeMock.ENTITY_NAME, false);
EdmEntityType edmEntityType = edmEntitySet.getEntityType();
jpaEntity = new JPAEntity(edmEntityType, edmEntitySet, mockODataJPAContext());
JPATypeMock jpaTypeMock = new JPATypeMock();
jpaEntity.setJPAEntity(jpaTypeMock);
jpaEntity.update(ODataEntryMockUtil.mockODataEntry(JPATypeMock.ENTITY_NAME));
} catch (ODataJPARuntimeException e) {
fail(ODataJPATestConstants.EXCEPTION_MSG_PART_1 + e.getMessage()
+ ODataJPATestConstants.EXCEPTION_MSG_PART_2);
} catch (EdmException e) {
fail(ODataJPATestConstants.EXCEPTION_MSG_PART_1 + e.getMessage()
+ ODataJPATestConstants.EXCEPTION_MSG_PART_2);
} catch (ODataException e) {
fail(ODataJPATestConstants.EXCEPTION_MSG_PART_1 + e.getMessage()
+ ODataJPATestConstants.EXCEPTION_MSG_PART_2);
}
JPATypeMock jpaTypeMock = (JPATypeMock) jpaEntity.getJPAEntity();
assertEquals(jpaTypeMock.getMInt(), 0);// Key should not be changed
assertEquals(jpaTypeMock.getMString(), ODataEntryMockUtil.VALUE_MSTRING);
assertTrue(jpaTypeMock.getMDateTime().equals(ODataEntryMockUtil.VALUE_DATE_TIME));
}
@Test
public void testUpdateODataEntryProperty() {
try {
EdmEntitySet edmEntitySet = EdmMockUtilV2.mockEdmEntitySet(JPATypeMock.ENTITY_NAME, false);
EdmEntityType edmEntityType = edmEntitySet.getEntityType();
jpaEntity = new JPAEntity(edmEntityType, edmEntitySet, mockODataJPAContext());
JPATypeMock jpaTypeMock = new JPATypeMock();
jpaEntity.setJPAEntity(jpaTypeMock);
jpaEntity.update(ODataEntryMockUtil.mockODataEntryProperties(JPATypeMock.ENTITY_NAME));
} catch (ODataJPARuntimeException e) {
fail(ODataJPATestConstants.EXCEPTION_MSG_PART_1 + e.getMessage()
+ ODataJPATestConstants.EXCEPTION_MSG_PART_2);
} catch (EdmException e) {
fail(ODataJPATestConstants.EXCEPTION_MSG_PART_1 + e.getMessage()
+ ODataJPATestConstants.EXCEPTION_MSG_PART_2);
} catch (ODataException e) {
fail(ODataJPATestConstants.EXCEPTION_MSG_PART_1 + e.getMessage()
+ ODataJPATestConstants.EXCEPTION_MSG_PART_2);
}
JPATypeMock jpaTypeMock = (JPATypeMock) jpaEntity.getJPAEntity();
assertEquals(jpaTypeMock.getMInt(), 0);// Key should not be changed
assertEquals(jpaTypeMock.getMString(), ODataEntryMockUtil.VALUE_MSTRING);
assertEquals(ODataEntryMockUtil.VALUE_ENUM, jpaTypeMock.getMSomeEnum());
assertTrue(jpaTypeMock.getMDateTime().equals(ODataEntryMockUtil.VALUE_DATE_TIME));
}
private ODataJPAContext mockODataJPAContext() throws ODataException {
PathInfoMock pathInfoMock = new PathInfoMock();
try {
pathInfoMock.setServiceRootURI("http://olingo.apache.org/service.svc");
} catch (URISyntaxException e) {
fail(ODataJPATestConstants.EXCEPTION_MSG_PART_1 + e.getMessage()
+ ODataJPATestConstants.EXCEPTION_MSG_PART_2);
}
ODataContextMock contextMock = new ODataContextMock();
contextMock.setPathInfo(pathInfoMock.mock());
ODataContext context = contextMock.mock();
ODataJPAContext jpaContext = ODataJPAContextMock.mockODataJPAContext(context);
return jpaContext;
}
private ODataJPAContext mockODataJPAContextWithoutCallBack() throws ODataException {
ODataContext context = new ODataContextMock().mockWithoutOnJPAWriteContent();
ODataJPAContext jpaContext = ODataJPAContextMock.mockODataJPAContext(context);
return jpaContext;
}
}
| |
/*
* RED5 Open Source Media Server - https://github.com/Red5/
*
* Copyright 2006-2016 by respective authors (see below). All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.red5.io.amf;
import java.beans.PropertyDescriptor;
import java.io.IOException;
import java.lang.reflect.Field;
import java.lang.reflect.Type;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Date;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Vector;
import org.apache.commons.beanutils.BeanUtils;
import org.apache.commons.beanutils.BeanUtilsBean;
import org.apache.commons.beanutils.PropertyUtilsBean;
import org.apache.commons.codec.binary.Hex;
import org.apache.commons.lang3.math.NumberUtils;
import org.apache.mina.core.buffer.IoBuffer;
import org.red5.io.amf3.ByteArray;
import org.red5.io.object.BaseInput;
import org.red5.io.object.DataTypes;
import org.red5.io.object.Deserializer;
import org.red5.io.object.RecordSet;
import org.red5.io.object.RecordSetPage;
import org.red5.io.utils.ArrayUtils;
import org.red5.io.utils.ObjectMap;
import org.red5.io.utils.XMLUtils;
import org.red5.io.utils.ConversionUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.w3c.dom.Document;
/**
* Input for Red5 data types
*
* @author The Red5 Project
* @author Luke Hubbard, Codegent Ltd (luke@codegent.com)
* @author Paul Gregoire (mondain@gmail.com)
*/
@SuppressWarnings("serial")
public class Input extends BaseInput implements org.red5.io.object.Input {
protected Logger log = LoggerFactory.getLogger(this.getClass());
protected static Map<String, String> classAliases = new HashMap<String, String>(3) {
{
put("DSA", "org.red5.compatibility.flex.messaging.messages.AsyncMessageExt");
put("DSC", "org.red5.compatibility.flex.messaging.messages.CommandMessageExt");
put("DSK", "org.red5.compatibility.flex.messaging.messages.AcknowledgeMessageExt");
}
};
protected IoBuffer buf;
protected byte currentDataType;
/**
* Creates Input object from byte buffer
*
* @param buf
* Byte buffer
*/
public Input(IoBuffer buf) {
super();
this.buf = buf;
if (log.isTraceEnabled()) {
log.trace("Input: {}", Hex.encodeHexString(Arrays.copyOfRange(buf.array(), buf.position(), buf.limit())));
}
}
/**
* Reads the data type.
*
* @return One of AMF class constants with type
* @see org.red5.io.amf.AMF
*/
@Override
public byte readDataType() {
do {
// get the data type
currentDataType = buf.get();
log.trace("Data type: {}", currentDataType);
switch (currentDataType) {
case AMF.TYPE_NULL:
case AMF.TYPE_UNDEFINED:
return DataTypes.CORE_NULL;
case AMF.TYPE_NUMBER:
return DataTypes.CORE_NUMBER;
case AMF.TYPE_BOOLEAN:
return DataTypes.CORE_BOOLEAN;
case AMF.TYPE_STRING:
case AMF.TYPE_LONG_STRING:
return DataTypes.CORE_STRING;
case AMF.TYPE_CLASS_OBJECT:
case AMF.TYPE_OBJECT:
return DataTypes.CORE_OBJECT;
case AMF.TYPE_MIXED_ARRAY:
return DataTypes.CORE_MAP;
case AMF.TYPE_ARRAY:
return DataTypes.CORE_ARRAY;
case AMF.TYPE_DATE:
return DataTypes.CORE_DATE;
case AMF.TYPE_XML:
return DataTypes.CORE_XML;
case AMF.TYPE_REFERENCE:
return DataTypes.OPT_REFERENCE;
case AMF.TYPE_UNSUPPORTED:
case AMF.TYPE_MOVIECLIP:
case AMF.TYPE_RECORDSET:
// These types are not handled by core datatypes
// So add the amf mask to them, this way the deserializer
// will call back to readCustom, we can then handle or return null
return (byte) (currentDataType + DataTypes.CUSTOM_AMF_MASK);
case AMF.TYPE_AMF3_OBJECT:
log.debug("Switch to AMF3");
return DataTypes.CORE_SWITCH;
}
} while (hasMoreProperties());
log.trace("No more data types available");
return DataTypes.CORE_END_OBJECT;
}
/**
* Reads a null.
*
* @return Object
*/
@Override
public Object readNull() {
return null;
}
/**
* Reads a boolean.
*
* @return boolean
*/
@Override
public Boolean readBoolean() {
return (buf.get() == AMF.VALUE_TRUE) ? Boolean.TRUE : Boolean.FALSE;
}
/**
* Reads a Number. In ActionScript 1 and 2 Number type represents all numbers, both floats and integers.
*
* @return Number
*/
@Override
public Number readNumber() {
int remaining = buf.remaining();
log.debug("readNumber from {} bytes", remaining);
// look to see if big enough for double
if (remaining >= 8) {
double d = buf.getDouble();
log.debug("Number: {}", d);
return d;
}
if (log.isDebugEnabled()) {
log.debug("Remaining not big enough for number - offset: {} limit: {} {}", buf.position(), buf.limit(), Hex.encodeHexString(buf.array()));
}
return 0;
}
/**
* Reads string from buffer
*
* @return String
*/
@Override
public String getString() {
log.trace("getString - currentDataType: {}", currentDataType);
byte lastDataType = currentDataType;
// temporarily set to string for reading
if (currentDataType != AMF.TYPE_STRING) {
currentDataType = AMF.TYPE_STRING;
}
String result = readString();
// set data type back to what it was
currentDataType = lastDataType;
return result;
}
/**
* Reads a string
*
* @return String
*/
@Override
public String readString() {
int limit = buf.limit();
int len = 0;
switch (currentDataType) {
case AMF.TYPE_LONG_STRING:
log.trace("Long string type");
len = buf.getInt();
if (len > limit) {
len = limit;
}
break;
case AMF.TYPE_STRING:
log.trace("Std string type");
len = buf.getUnsignedShort();
break;
default:
log.debug("Unknown AMF type: {}", currentDataType);
}
log.debug("Length: {} limit: {}", len, limit);
byte[] str = new byte[len];
buf.get(str);
String string = bufferToString(str);
return string;
}
/**
* Converts the bytes into a string.
*
* @param str
* string bytes
* @return decoded String
*/
private final String bufferToString(byte[] str) {
String string = null;
if (str != null) {
string = AMF.CHARSET.decode(ByteBuffer.wrap(str)).toString();
log.debug("String: {}", string);
} else {
log.warn("ByteBuffer was null attempting to read String");
}
return string;
}
/**
* Returns a date
*
* @return Date Decoded string object
*/
@Override
public Date readDate() {
/*
* Date: 0x0B T7 T6 .. T0 Z1 Z2 T7 to T0 form a 64 bit Big Endian number
* that specifies the number of nanoseconds that have passed since
* 1/1/1970 0:00 to the specified time. This format is UTC 1970. Z1 an
* Z0 for a 16 bit Big Endian number indicating the indicated time's
* timezone in minutes.
*/
long ms = (long) buf.getDouble();
// The timezone can be ignored as the date always is encoded in UTC
@SuppressWarnings("unused")
short timeZoneMins = buf.getShort();
Date date = new Date(ms);
storeReference(date);
return date;
}
@Override
public Object readArray(Type target) {
log.debug("readArray - target: {}", target);
Object result = null;
int count = buf.getInt();
log.debug("Count: {}", count);
// To conform to the Input API, we should convert the output into an Array if the Type asks us to.
Class<?> collection = Collection.class;
if (target instanceof Class<?>) {
collection = (Class<?>) target;
}
List<Object> resultCollection = new ArrayList<>(count);
if (collection.isArray()) {
result = ArrayUtils.getArray(collection.getComponentType(), count);
} else {
result = resultCollection;
}
storeReference(result); //reference should be stored before reading of objects to get correct refIds
for (int i = 0; i < count; i++) {
resultCollection.add(Deserializer.deserialize(this, Object.class));
}
if (collection.isArray()) {
ArrayUtils.fillArray(collection.getComponentType(), result, resultCollection);
}
return result;
}
/**
* Read key - value pairs. This is required for the RecordSet deserializer.
*/
@Override
public Map<String, Object> readKeyValues() {
Map<String, Object> result = new HashMap<String, Object>();
readKeyValues(result);
return result;
}
/**
* Read key - value pairs into Map object
*
* @param result
* Map to put resulting pair to
*/
protected void readKeyValues(Map<String, Object> result) {
do {
String name = readPropertyName();
log.debug("property: {}", name);
Object property = Deserializer.deserialize(this, Object.class);
log.debug("val: {}", property);
result.put(name, property);
if (hasMoreProperties()) {
skipPropertySeparator();
} else {
break;
}
} while (hasMoreProperties());
}
@Override
public Object readMap() {
// the maximum number used in this mixed array
int maxNumber = buf.getInt();
log.debug("Read start mixed array: {}", maxNumber);
Object result;
final Map<Object, Object> mixedResult = new LinkedHashMap<Object, Object>(maxNumber);
// we must store the reference before we deserialize any items in it to ensure
// that reference IDs are correct
int reference = storeReference(mixedResult);
Boolean normalArray = true;
while (hasMoreProperties()) {
String key = getString();
log.debug("key: {}", key);
if (!NumberUtils.isParsable(key)) {
log.debug("key {} is causing non normal array", key);
normalArray = false;
}
Object item = Deserializer.deserialize(this, Object.class);
log.debug("item: {}", item);
mixedResult.put(key, item);
}
if (mixedResult.size() <= maxNumber + 1 && normalArray) {
// MixedArray actually is a regular array
log.debug("mixed array is a regular array");
final List<Object> listResult = new ArrayList<Object>(maxNumber);
for (int i = 0; i < maxNumber; i++) {
listResult.add(i, mixedResult.get(String.valueOf(i)));
}
result = listResult;
} else {
// convert initial indexes
mixedResult.remove("length");
for (int i = 0; i < maxNumber; i++) {
final Object value = mixedResult.remove(String.valueOf(i));
mixedResult.put(i, value);
}
result = mixedResult;
}
// replace the original reference with the final result
storeReference(reference, result);
return result;
}
/**
* Creates a new instance of the className parameter and returns as an Object
*
* @param className
* Class name as String
* @return Object New object instance (for given class)
*/
@SuppressWarnings("all")
protected Object newInstance(String className) {
log.debug("Loading class: {}", className);
Object instance = null;
Class<?> clazz = null;
if ("".equals(className) || className == null)
return instance;
try {
//check for special DS class aliases
if (className.length() == 3) {
className = classAliases.get(className);
}
if (className.startsWith("flex.")) {
// Use Red5 compatibility class instead
className = "org.red5.compatibility." + className;
log.debug("Modified classname: {}", className);
}
if (!classAllowed(className)) {
log.error("Class creation is not allowed {}", className);
} else {
clazz = Thread.currentThread().getContextClassLoader().loadClass(className);
instance = clazz.newInstance();
}
} catch (InstantiationException iex) {
try {
//check for default ctor
clazz.getConstructor(null);
log.error("Error loading class: {}", className);
} catch (NoSuchMethodException nse) {
log.error("Error loading class: {}; this can be resolved by adding a default constructor to your class", className);
}
log.debug("Exception was: {}", iex);
} catch (Exception ex) {
log.error("Error loading class: {}", className);
log.debug("Exception was: {}", ex);
}
return instance;
}
/**
* Reads the input as a bean and returns an object
*
* @param bean
* Input as bean
* @return Decoded object
*/
@SuppressWarnings({ "unchecked", "rawtypes" })
protected Object readBean(Object bean) {
log.debug("readBean: {}", bean);
storeReference(bean);
Class theClass = bean.getClass();
while (hasMoreProperties()) {
String name = readPropertyName();
Type type = getPropertyType(bean, name);
log.debug("property: {} type: {}", name, type);
Object property = Deserializer.deserialize(this, type);
log.debug("val: {}", property);
//log.debug("val: "+property.getClass().getName());
if (property != null) {
try {
if (type instanceof Class) {
Class t = (Class) type;
if (!t.isAssignableFrom(property.getClass())) {
property = ConversionUtils.convert(property, t);
}
}
final Field field = theClass.getField(name);
field.set(bean, property);
} catch (Exception ex2) {
try {
BeanUtils.setProperty(bean, name, property);
} catch (Exception ex) {
log.error("Error mapping property: {} ({})", name, property);
}
}
} else {
log.debug("Skipping null property: {}", name);
}
if (hasMoreProperties()) {
skipPropertySeparator();
} else {
break; //hasMoreProperties == false, position moved to +3
}
}
return bean;
}
/**
* Reads the input as a map and returns a Map
*
* @return Read map
*/
protected Map<String, Object> readSimpleObject() {
log.debug("readSimpleObject");
Map<String, Object> result = new ObjectMap<>();
readKeyValues(result);
storeReference(result);
return result;
}
/**
* Reads start object
*
* @return Read object
*/
@Override
public Object readObject() {
String className;
if (currentDataType == AMF.TYPE_CLASS_OBJECT) {
className = getString();
log.debug("readObject: {}", className);
if (className != null) {
log.debug("read class object");
Object result = null;
Object instance;
if (className.equals("RecordSet")) {
result = new RecordSet(this);
storeReference(result);
} else if (className.equals("RecordSetPage")) {
result = new RecordSetPage(this);
storeReference(result);
} else if (!classAllowed(className)) {
log.debug("Class creation is not allowed {}", className);
result = readSimpleObject();
} else {
instance = newInstance(className);
if (instance != null) {
result = readBean(instance);
} else {
log.debug("Forced to use simple object for class {}", className);
result = readSimpleObject();
}
}
return result;
}
}
return readSimpleObject();
}
/**
* Returns a boolean stating whether there are more properties
*
* @return boolean <code>true</code> if there are more properties to read, <code>false</code> otherwise
*/
public boolean hasMoreProperties() {
if (buf.remaining() >= 3) {
byte[] threeBytes = new byte[3];
int pos = buf.position();
buf.get(threeBytes);
if (Arrays.equals(AMF.END_OF_OBJECT_SEQUENCE, threeBytes)) {
log.trace("End of object");
return false;
}
buf.position(pos);
return true;
}
// an end-of-object marker can't occupy less than 3 bytes so return true
return true;
}
/**
* Reads property name
*
* @return String Object property name
*/
public String readPropertyName() {
return getString();
}
/**
* Skips property separator
*/
public void skipPropertySeparator() {
// SKIP
}
/**
* Reads XML
*
* @return String XML as string
*/
@Override
public Document readXML() {
final String xmlString = readString();
Document doc = null;
try {
doc = XMLUtils.stringToDoc(xmlString);
} catch (IOException ioex) {
log.error("IOException converting xml to dom", ioex);
}
storeReference(doc);
return doc;
}
/**
* Reads Custom
*
* @return Object Custom type object
*/
@Override
public Object readCustom() {
// return null for now
return null;
}
/**
* Read ByteArray object. This is not supported by the AMF0 deserializer.
*
* @return ByteArray object
*/
@Override
public ByteArray readByteArray() {
throw new RuntimeException("ByteArray objects not supported with AMF0");
}
/**
* Read Vector<int> object. This is not supported by the AMF0 deserializer.
*
* @return Vector<Integer> object
*/
@Override
public Vector<Integer> readVectorInt() {
throw new RuntimeException("Vector objects not supported with AMF0");
}
/**
* Read Vector<Long> object. This is not supported by the AMF0 deserializer.
*
* @return Vector<Long> object
*/
@Override
public Vector<Long> readVectorUInt() {
throw new RuntimeException("Vector objects not supported with AMF0");
}
/**
* Read Vector<Number> object. This is not supported by the AMF0 deserializer.
*
* @return Vector<Double> object
*/
@Override
public Vector<Double> readVectorNumber() {
throw new RuntimeException("Vector objects not supported with AMF0");
}
/**
* Read Vector<Object> object. This is not supported by the AMF0 deserializer.
*
* @return Vector<Object> object
*/
@Override
public Vector<Object> readVectorObject() {
throw new RuntimeException("Vector objects not supported with AMF0");
}
/**
* Reads Reference
*
* @return Object Read reference to object
*/
@Override
public Object readReference() {
return getReference(buf.getUnsignedShort());
}
/** {@inheritDoc} */
@Override
public void reset() {
clearReferences();
}
protected Type getPropertyType(Object instance, String propertyName) {
try {
if (instance != null) {
Field field = instance.getClass().getField(propertyName);
return field.getGenericType();
} else {
// instance is null for anonymous class, use default type
}
} catch (NoSuchFieldException e1) {
try {
BeanUtilsBean beanUtilsBean = BeanUtilsBean.getInstance();
PropertyUtilsBean propertyUtils = beanUtilsBean.getPropertyUtils();
PropertyDescriptor propertyDescriptor = propertyUtils.getPropertyDescriptor(instance, propertyName);
return propertyDescriptor.getReadMethod().getGenericReturnType();
} catch (Exception e2) {
// nothing
}
} catch (Exception e) {
// ignore other exceptions
}
// return Object class type by default
return Object.class;
}
}
| |
import java.io.File;
import java.io.FileInputStream;
import java.net.URI;
import java.net.URISyntaxException;
import java.net.URL;
import java.util.ArrayList;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import javax.swing.JOptionPane;
import javax.swing.SwingUtilities;
import javax.swing.text.BadLocationException;
import javax.swing.text.DefaultStyledDocument;
/*
* LineNumApplet.java
*
* Created on May 14, 2008, 11:42 PM
*/
/**
*
* @author Dale Cox
*/
public class LineNumApplet extends javax.swing.JApplet {
/** Initializes the applet LineNumApplet */
public void init() {
try {
SwingUtilities.invokeAndWait(new Runnable() {
public void run() {
initComponents();
}
});
} catch (Exception ex) {
ex.printStackTrace();
}
}
/** This method is called from within the init() method to
* initialize the form.
* WARNING: Do NOT modify this code. The content of this method is
* always regenerated by the Form Editor.
*/
// <editor-fold defaultstate="collapsed" desc="Generated Code">//GEN-BEGIN:initComponents
private void initComponents() {
jScrollPaneText = new javax.swing.JScrollPane();
jTextPaneSource = new javax.swing.JTextPane();
jSeparator1 = new javax.swing.JSeparator();
jToolBar1 = new javax.swing.JToolBar();
loadFile = new javax.swing.JButton();
jSeparator3 = new javax.swing.JToolBar.Separator();
loadFileNum = new javax.swing.JButton();
jSeparator2 = new javax.swing.JToolBar.Separator();
clear = new javax.swing.JButton();
jTextPaneSource.setEditable(false);
jScrollPaneText.setViewportView(jTextPaneSource);
jToolBar1.setRollover(true);
jToolBar1.setMargin(new java.awt.Insets(1, 1, 1, 1));
loadFile.setText("Load File w/o Line Numbers");
loadFile.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
loadFileActionPerformed(evt);
}
});
jToolBar1.add(loadFile);
jToolBar1.add(jSeparator3);
loadFileNum.setText("Load File w/ Line Numbers");
loadFileNum.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
loadFileNumActionPerformed(evt);
}
});
jToolBar1.add(loadFileNum);
jToolBar1.add(jSeparator2);
clear.setText("Clear");
clear.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
clearActionPerformed(evt);
}
});
jToolBar1.add(clear);
javax.swing.GroupLayout layout = new javax.swing.GroupLayout(getContentPane());
getContentPane().setLayout(layout);
layout.setHorizontalGroup(
layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(javax.swing.GroupLayout.Alignment.TRAILING, layout.createSequentialGroup()
.addContainerGap()
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.TRAILING)
.addComponent(jScrollPaneText, javax.swing.GroupLayout.Alignment.LEADING, javax.swing.GroupLayout.DEFAULT_SIZE, 664, Short.MAX_VALUE)
.addComponent(jSeparator1, javax.swing.GroupLayout.Alignment.LEADING, javax.swing.GroupLayout.DEFAULT_SIZE, 664, Short.MAX_VALUE)
.addComponent(jToolBar1, javax.swing.GroupLayout.Alignment.LEADING, javax.swing.GroupLayout.DEFAULT_SIZE, 664, Short.MAX_VALUE))
.addContainerGap())
);
layout.setVerticalGroup(
layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(layout.createSequentialGroup()
.addContainerGap()
.addComponent(jToolBar1, javax.swing.GroupLayout.PREFERRED_SIZE, 25, javax.swing.GroupLayout.PREFERRED_SIZE)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addComponent(jSeparator1, javax.swing.GroupLayout.PREFERRED_SIZE, 10, javax.swing.GroupLayout.PREFERRED_SIZE)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addComponent(jScrollPaneText, javax.swing.GroupLayout.DEFAULT_SIZE, 459, Short.MAX_VALUE)
.addContainerGap())
);
}// </editor-fold>//GEN-END:initComponents
private void loadFileActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_loadFileActionPerformed
try {
//Clear text area
jTextPaneSource.setText("");
//get file contents
//String sourceContents;
//sourceContents = readFromFile();
//set text
URL fileLoc = new URL("http://dalecox99.com/Programming%20Examples/Java/content.txt");
jTextPaneSource.setPage(fileLoc);
//DefaultStyledDocument sourceDoc = (DefaultStyledDocument) jTextPaneSource.getDocument();
//Display text read from file
//sourceDoc.insertString(0, sourceContents, null);
//set carret position to top of doc
jTextPaneSource.setCaretPosition(0);
}
/*catch (BadLocationException ex) {
jTextPaneSource.setText("");
JOptionPane.showMessageDialog(jScrollPaneText, "Error: " + ex, "Applet Error", JOptionPane.ERROR_MESSAGE);
}*/
catch (java.io.IOException e) {
JOptionPane.showMessageDialog(jScrollPaneText, "Error: " + e, "Applet Error", JOptionPane.ERROR_MESSAGE);
}
}//GEN-LAST:event_loadFileActionPerformed
private void loadFileNumActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_loadFileNumActionPerformed
try {
String newLine = "\\n";
String sourceContents;
/*/Code to read from file
File inputFile = new File(inputFileName);
FileInputStream in = new FileInputStream(inputFile);
byte bt[] = new byte[(int)inputFile.length()];
in.read(bt);
String sourceContents = new String(bt);
in.close();*/
sourceContents = jTextPaneSource.getText();
if(sourceContents.isEmpty()){
JOptionPane.showMessageDialog(rootPane, "Please Load the File with out Line Numbers First");
return;
}
jTextPaneSource.setText("");
DefaultStyledDocument sourceDoc = (DefaultStyledDocument) jTextPaneSource.getDocument();
//line numbers
sourceContents = "1 " + sourceContents;
StringBuffer sourceBuffer = new StringBuffer(sourceContents);
//find offsets
Pattern pCPName = Pattern.compile(newLine, Pattern.MULTILINE);
Matcher cpMatcher = pCPName.matcher(sourceBuffer);
ArrayList offEnd = new ArrayList();
int count = 0;
while (cpMatcher.find()) {
offEnd.add(cpMatcher.end());
}
//Insert line num
int endLineVal = offEnd.size() - 1;
count = offEnd.size() + 1;
for (int i = endLineVal; i >= 0; --i) {
String insertString = count + " ";
int endOff = Integer.parseInt(offEnd.get(i).toString());
sourceBuffer.insert(endOff, insertString);
--count;
}
String sourceContentsLines = new String(sourceBuffer);
//Clear text area
jTextPaneSource.setText("");
//Display text read from file
sourceDoc.insertString(0, sourceContentsLines, null);
//jTextPaneSource.setText(sourceContents);
jTextPaneSource.setCaretPosition(0);
} catch (BadLocationException ex) {
jTextPaneSource.setText("");
JOptionPane.showMessageDialog(jScrollPaneText, "Error: " + ex, "Applet Error", JOptionPane.ERROR_MESSAGE);
}
}//GEN-LAST:event_loadFileNumActionPerformed
private void clearActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_clearActionPerformed
jTextPaneSource.setText("");
}//GEN-LAST:event_clearActionPerformed
private String readFromFile() {
/**This Method Reads from a specific file **/
String sourceContents = "";
try {
URL fileLoc = new URL("http://www.digitalworlddevelopment.com/Java/content.txt");
String host=fileLoc.getHost();
URI fileName = fileLoc.toURI();
String Path = fileName.getPath();
System.out.println("host: "+host);
System.out.println("Path: "+Path);
/*
PlainTextInputStream fileStream = (PlainTextInputStream) fileLoc.getContent();
BufferedReader dis = new BufferedReader(new InputStreamReader(fileStream));
ArrayList list = new ArrayList();
while(dis.read()>0){
String line = dis.readLine();
line += " \n";
sourceContents+= line;
}//end while
dis.close();
fileStream.close();*/
//fileStream.sourceContents = new String();
//sourceContents = (String) fileContents;
//String inputFileName = fileLoc.openStream();
//Code to read from file
File inputFile = new File(host+Path);
FileInputStream in = new FileInputStream(inputFile);
//InputStream in = fileLoc.openStream();
byte bt[] = new byte[(int)inputFile.length()];
in.read(bt);
sourceContents = new String(bt);
in.close();
} catch (java.io.IOException e) {
JOptionPane.showMessageDialog(jScrollPaneText, "Error: " + e, "Applet Error", JOptionPane.ERROR_MESSAGE);
}
catch (URISyntaxException ex){
JOptionPane.showMessageDialog(jScrollPaneText, "Error: " + ex, "Applet Error", JOptionPane.ERROR_MESSAGE);
}
return sourceContents;
}
// Variables declaration - do not modify//GEN-BEGIN:variables
private javax.swing.JButton clear;
private javax.swing.JScrollPane jScrollPaneText;
private javax.swing.JSeparator jSeparator1;
private javax.swing.JToolBar.Separator jSeparator2;
private javax.swing.JToolBar.Separator jSeparator3;
private javax.swing.JTextPane jTextPaneSource;
private javax.swing.JToolBar jToolBar1;
private javax.swing.JButton loadFile;
private javax.swing.JButton loadFileNum;
// End of variables declaration//GEN-END:variables
}
| |
/**
* Copyright (C) 2015 Orange
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.francetelecom.clara.cloud.paas.it.services.helper;
import com.francetelecom.clara.cloud.commons.BusinessException;
import com.francetelecom.clara.cloud.commons.TechnicalException;
import com.francetelecom.clara.cloud.core.service.ManageEnvironment;
import com.francetelecom.clara.cloud.core.service.exception.*;
import com.francetelecom.clara.cloud.coremodel.Application;
import com.francetelecom.clara.cloud.coremodel.ApplicationRelease;
import com.francetelecom.clara.cloud.coremodel.MiddlewareProfile;
import com.francetelecom.clara.cloud.coremodel.PaasUser;
import com.francetelecom.clara.cloud.logicalmodel.LogicalDeployment;
import com.francetelecom.clara.cloud.logicalmodel.LogicalWebGUIService;
import com.francetelecom.clara.cloud.logicalmodel.samplecatalog.SampleAppFactory;
import com.francetelecom.clara.cloud.services.dto.EnvironmentDetailsDto;
import com.francetelecom.clara.cloud.services.dto.EnvironmentDto;
import com.francetelecom.clara.cloud.services.dto.EnvironmentOpsDetailsDto;
import com.francetelecom.clara.cloud.services.dto.LinkDto;
import org.hamcrest.BaseMatcher;
import org.hamcrest.Description;
import org.junit.Assert;
import org.junit.Assume;
import org.junit.internal.AssumptionViolatedException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.http.client.SimpleClientHttpRequestFactory;
import org.springframework.web.client.RestTemplate;
import java.net.InetSocketAddress;
import java.net.Proxy;
import java.util.*;
public class PaasServicesEnvITHelper {
protected static Logger logger = LoggerFactory.getLogger(PaasServicesEnvITHelper.class.getName());
/**
* configuration of the ServicesEnvIT Test
*/
protected PaasServicesEnvITConfiguration itConfiguration;
protected String environmentUID = null;
protected Application application;
// logical deployment ID is kept so that we can get access on logical model for assertions
protected int logicalDeploymentID;
protected int maxRequests = 10;
protected int maxSessions = 10;
protected boolean isStateful = false;
protected EnvironmentDto.EnvironmentTypeEnum envType = EnvironmentDto.EnvironmentTypeEnum.DEVELOPMENT;
/**
* environment creation timeout in minutes (default is 90 (1h30);
* can be overridden by each test) default is 1h30 in order to
* include image generation if any example : when 2 appliance are
* generated (20min) + published (7min) + started (workaround
* included : 30min) then 60min is not enough
*/
private int environmentCreationTimeoutMin = 90;
/**
* environment stop timeout in minutes (default is 10; can be
* overridden by each test)
*/
private int environmentStopTimeoutMin = 10;
/**
* environment start timeout in minutes (default is 15; can be
* overridden by each test)
*/
private int environmentStartTimeoutMin = 10;
/**
* environment delete timeout in minutes (default is 10; can be
* overridden by each test)
*/
private int environmentDeleteTimeout = 10;
/**
* Test url attempt count
*/
private int webAppTestAttempts = 2;
/**
* test url wait time in second
*/
private int webAppTestWaitTime = 5;
private Boolean skipDeleteEnvironmentAtTheEnd;
private PaasUser currentUser;
private EnvironmentDto.EnvironmentStatusEnum environmentExpectedStatus;
/**
* constructor :
*
* @param itConfiguration test configuration
*/
public PaasServicesEnvITHelper(PaasServicesEnvITConfiguration itConfiguration) {
this.itConfiguration = itConfiguration;
environmentExpectedStatus = EnvironmentDto.EnvironmentStatusEnum.RUNNING;
}
public void setEnvironmentExpectedStatus(EnvironmentDto.EnvironmentStatusEnum environmentExpectedStatus) {
this.environmentExpectedStatus = environmentExpectedStatus;
}
public Set<String> getAllDistinctHostFromEnvironment() {
List<EnvironmentOpsDetailsDto.VMAccessDto> vmAccessDtos = getEnvironmentVmAccessDtos();
Set<String> envHosts = new HashSet<String>();
for (EnvironmentOpsDetailsDto.VMAccessDto vmAccessDto : vmAccessDtos) {
envHosts.add(vmAccessDto.getIp());
}
return envHosts;
}
public Set<String> getAllDistinctFqdnFromEnvironment() {
List<EnvironmentOpsDetailsDto.VMAccessDto> vmAccessDtos = getEnvironmentVmAccessDtos();
Set<String> envFqdns = new HashSet<String>();
for (EnvironmentOpsDetailsDto.VMAccessDto vmAccessDto : vmAccessDtos) {
envFqdns.add(vmAccessDto.getHostname());
}
return envFqdns;
}
public Map<String, String> getAllFqdnAndHostFromEnvironment() {
List<EnvironmentOpsDetailsDto.VMAccessDto> vmAccessDtos = getEnvironmentVmAccessDtos();
Map<String, String> envFQdnIps = new HashMap<String, String>();
for (EnvironmentOpsDetailsDto.VMAccessDto vmAccessDto : vmAccessDtos) {
envFQdnIps.put(vmAccessDto.getHostname(), vmAccessDto.getIp());
}
return envFQdnIps;
}
private List<EnvironmentOpsDetailsDto.VMAccessDto> getEnvironmentVmAccessDtos() {
EnvironmentOpsDetailsDto environmentOpsDetails;
try {
environmentOpsDetails = getEnvironmentOpsDetails();
} catch (ObjectNotFoundException e) {
throw new TechnicalException("Unable to find test environment", e);
}
return environmentOpsDetails.listVMAccesses();
}
/**
* get all web gui access link from the env
*
* @return access links list
*/
private List<LinkDto> getWebGuiAccessLinks() {
EnvironmentDetailsDto envDto;
LogicalDeployment ld;
try {
envDto = getEnvironmentDetails();
ld = getLogicalDeployment();
} catch (ObjectNotFoundException e) {
throw new TechnicalException("Unable to find test environment", e);
}
Set<LogicalWebGUIService> webGuiServices = ld.listLogicalServices(LogicalWebGUIService.class);
if (webGuiServices.isEmpty()) {
logger.warn("No web gui to be tested");
}
Map<String, List<LinkDto>> linkDtosMap = envDto.getLinkDtoMap();
List<LinkDto> guiAccessLinks = new ArrayList<LinkDto>();
for (LogicalWebGUIService webGuiService : webGuiServices) {
List<LinkDto> linkDtos = linkDtosMap.get(webGuiService.getName());
for (LinkDto link : linkDtos) {
if (link.getLinkType() == LinkDto.LinkTypeEnum.ACCESS_LINK) {
guiAccessLinks.add(link);
}
}
}
return guiAccessLinks;
}
/**
* Ensure that the EAR is deployed and accessible
*/
public void application_should_be_accessible(boolean accessibleExpected) {
List<LinkDto> guiAccessLinks = getWebGuiAccessLinks();
Assert.assertFalse("No access url corresponding to a webGui service have been found",
guiAccessLinks.isEmpty());
getApplicationAccessHelper()
.checkWebGuiServicesAccess(guiAccessLinks, accessibleExpected, itConfiguration);
}
public LogicalDeployment getLogicalDeployment() throws ObjectNotFoundException {
return itConfiguration.getManageLogicalDeployment().findLogicalDeployment(logicalDeploymentID);
}
protected PaasServicesEnvApplicationAccessHelper getApplicationAccessHelper() {
return new PaasServicesEnvApplicationAccessHelper(
itConfiguration.getLogicalModelCatalog(),
webAppTestAttempts,
webAppTestWaitTime);
}
public String setUp() {
checkThatAutowiredFieldIsNotNull(itConfiguration);
checkThatAutowiredFieldIsNotNull(itConfiguration.getManageApplication());
currentUser = AuthenticationHelper.loginAsAdmin();
if (environmentUID == null) {
environmentUID = createTestEnvironment(environmentExpectedStatus);
}
// else : environmentUID != null
assumeEnvironmentNotFailed(environmentUID);
return environmentUID;
}
public static void checkThatAutowiredFieldIsNotNull(Object autowiredField) {
if (autowiredField == null) {
throw new TechnicalException("declared autowired fields null : \n"
+ "\t- either you start an @Ignore test\n"
+ "\t- or check the context..");
}
}
/**
* Global tear-down consists in deleting XaaS resources created during the
* test
*/
public void tearDown() {
logger.info("/////////////////////////////////////////////////////");
logger.info("//////////////// TEAR DOWN - start //////////////////");
logger.info("/////////////////////////////////////////////////////");
if (application == null) {
// nothing to remove
return;
}
boolean skipDelete = skipDelete();
String firstError = null;
try {
for (ApplicationRelease release : itConfiguration.getManageApplicationRelease().findApplicationReleasesByAppUID(application.getUID())) {
firstError = removeReleaseAndRelatedEnvironments(skipDelete, firstError, release);
}
if (!skipDelete) {
itConfiguration.getManageApplication().deleteApplication(application.getUID());
application = null;
}
if (firstError != null) {
throw new TechnicalException("Error during tearDown, check for not deleted resources: " + firstError);
}
} catch (Exception e) {
logger.error(e.getMessage(), e);
}
AuthenticationHelper.logout();
logger.info("//////////////// TEAR DOWN - end //////////////////");
}
private boolean skipDelete() {
if (skipDeleteEnvironmentAtTheEnd == null) {
setSkipDeleteEnvironmentAtTheEnd(Boolean.getBoolean("skipDeleteEnvironmentAtTheEnd"));
}
return skipDeleteEnvironmentAtTheEnd;
}
private String removeReleaseAndRelatedEnvironments(boolean skipDelete, String firstError, ApplicationRelease release) throws ApplicationReleaseNotFoundException {
for (EnvironmentDto env : itConfiguration.getManageEnvironment().findEnvironmentsByAppRelease(release.getUID())) {
try {
EnvironmentOpsDetailsDto environmentDetails = itConfiguration.getManageEnvironment().findEnvironmentOpsDetailsByUID(env.getUid());
logger.info("Splunk logs available at " + environmentDetails.getLinkDtoMap());
if (skipDelete) {
logger.info("skipping clean up of resources for environment " + env.getUid()
+ ", please perform this clean up manually. Details follow.");
displayEnvironmentDetailsInLogs(environmentDetails);
break;
} else {
itConfiguration.getManageEnvironment().deleteEnvironment(env.getUid());
waitForStatus(env.getUid(), EnvironmentDto.EnvironmentStatusEnum.REMOVED, environmentDeleteTimeout);
for (EnvironmentOpsDetailsDto.VMAccessDto access : environmentDetails.listVMAccesses()) {
String curVMLogInfo = "VM " + access.getHostname() + " for environment " + env.getUid();
if (access.getIaasId() != null) {
logger.warn("Cannot retrieve infos for iaasId {}: {}", access.getIaasId(), curVMLogInfo);
}
}
}
} catch (Exception e) {
logger.error(e.getMessage(), e);
if (firstError == null) {
firstError = "Error : " + e.getMessage();
}
}
}
if (!skipDelete) {
itConfiguration.getManageApplicationRelease().deleteApplicationRelease(release.getUID());
}
return firstError;
}
public void assumeEnvironmentNotFailed(String environmentUID) {
if (environmentExpectedStatus == EnvironmentDto.EnvironmentStatusEnum.FAILED){
return;
}
EnvironmentDto envDto;
try {
envDto = itConfiguration.getManageEnvironment().findEnvironmentByUID(environmentUID);
} catch (ObjectNotFoundException e) {
throw new TechnicalException("Unable to create test environment", e);
}
boolean envStatusFailed = (EnvironmentDto.EnvironmentStatusEnum.FAILED == envDto.getStatus());
if (envStatusFailed) {
logger.error("Skipping test because environment creation has failed in previous test.");
}
Assume.assumeTrue(!envStatusFailed);
}
public String createTestEnvironment(EnvironmentDto.EnvironmentStatusEnum environmentExpectedStatus) {
PaasUser paasUser = createTestUser();
application = createApplication(paasUser);
ApplicationRelease applicationRelease = createApplicationRelease(getMiddlewareProfileVersion(), paasUser);
logicalDeploymentID = createLogicalDeployment(applicationRelease);
// *************** createEnvironment *************************
String environmentLabel = itConfiguration.getName() + "-" + envType.toString();
try {
environmentUID = itConfiguration.getManageEnvironment().createEnvironment(
applicationRelease.getUID(),
envType,
paasUser.getSsoId().getValue(),
environmentLabel);
} catch (BusinessException e) {
throw new TechnicalException("Unable to create test environment", e);
}
try {
waitForStatus(environmentUID, environmentExpectedStatus, environmentCreationTimeoutMin);
} catch (ObjectNotFoundException e) {
throw new AssumptionViolatedException(e, new TestComponentExpected(EnvironmentDetailsDto.class, environmentUID, "test environment"));
}
displayEnvironmentDetailsInLogs();
return environmentUID;
}
public void displayEnvironmentDetailsInLogs() {
if (environmentUID == null) {
return;
}
EnvironmentDetailsDto environmentDetails;
try {
environmentDetails = getEnvironmentDetails();
} catch (ObjectNotFoundException e) {
throw new AssumptionViolatedException(e, new TestComponentExpected(EnvironmentDetailsDto.class, environmentUID, "test environment"));
}
displayEnvironmentDetailsInLogs(environmentDetails);
}
private int createLogicalDeployment(ApplicationRelease applicationRelease) {
// Refetch to eagerly fetch all fields.
LogicalDeployment logicalDeployment;
int ldId = applicationRelease.getLogicalDeployment().getId();
try {
logicalDeployment = itConfiguration.getManageLogicalDeployment().findLogicalDeployment(ldId);
} catch (ObjectNotFoundException e) {
throw new AssumptionViolatedException(e, new TestComponentExpected(LogicalDeployment.class, String.valueOf(ldId), "test logical deployment"));
}
itConfiguration.getLogicalModelCatalog().populateLogicalDeployment(logicalDeployment);
// Update maxSession and maxRequest of logical web gui services
for (LogicalWebGUIService s : logicalDeployment.listLogicalServices(LogicalWebGUIService.class)) {
s.setMaxNumberSessions(maxSessions);
s.setMaxReqPerSeconds(maxRequests);
s.setStateful(isStateful);
}
try {
logicalDeployment = itConfiguration.getManageLogicalDeployment().updateLogicalDeployment(logicalDeployment);
} catch (Exception e) {
throw new TechnicalException("Unable to update test logical deployment", e);
}
// we need to call
// checkOverallConsistencyAndUpdateLogicaldeployment() to trigger
// maven references resolution
try {
itConfiguration.getManageLogicalDeployment().checkOverallConsistencyAndUpdateLogicalDeployment(logicalDeployment);
} catch (BusinessException e) {
throw new TechnicalException("Unable to check overall consistency and update logical deployment: " + e.getMessage(), e);
}
return ldId;
}
/**
* Creates and persist application release
*
* @param middlewareProfileVersion middleware profile version
* @param paasUser application release owner
* @return created applicationRelease
*/
private ApplicationRelease createApplicationRelease(String middlewareProfileVersion, PaasUser paasUser) {
String applicationReleaseUID;
try {
applicationReleaseUID = itConfiguration.getManageApplicationRelease().createApplicationRelease(application.getUID(), paasUser.getSsoId().getValue(), "1");
} catch (Exception e) {
throw new TechnicalException("Unable to create test application release", e);
}
// Fetch applicationRelease from DB
ApplicationRelease applicationRelease;
try {
applicationRelease = itConfiguration.getManageApplicationRelease().findApplicationReleaseByUID(applicationReleaseUID);
} catch (ApplicationReleaseNotFoundException e) {
throw new AssumptionViolatedException(e, new TestComponentExpected(ApplicationRelease.class, applicationReleaseUID, "application release"));
}
// set middleware profile version (as overriden by each sample)
applicationRelease.setMiddlewareProfileVersion(middlewareProfileVersion);
try {
applicationRelease = itConfiguration.getManageApplicationRelease().updateApplicationRelease(applicationRelease);
} catch (ApplicationReleaseNotFoundException e) {
throw new AssumptionViolatedException(e, new TestComponentExpected(ApplicationRelease.class, applicationReleaseUID, "application release update"));
}
return applicationRelease;
}
private Application createApplication(PaasUser paasUser) {
String applicationUID;
try {
SampleAppFactory logicalModelCatalog = itConfiguration.getLogicalModelCatalog();
applicationUID = itConfiguration.getManageApplication().createPublicApplication(
logicalModelCatalog.getAppCode(),
logicalModelCatalog.getAppLabel(),
logicalModelCatalog.getAppDescription(),
null,
paasUser.getSsoId());
} catch (DuplicateApplicationException | PaasUserNotFoundException e) {
throw new TechnicalException("Unable to create test application", e);
}
// fetch application from DB
try {
return itConfiguration.getManageApplication().findApplicationByUID(applicationUID);
} catch (ApplicationNotFoundException e) {
throw new AssumptionViolatedException(e, new TestComponentExpected(Application.class, applicationUID, "test application"));
}
}
private PaasUser createTestUser() {
PaasUser paasUser = currentUser;
itConfiguration.getManagePaasUser().checkBeforeCreatePaasUser(paasUser);
try {
paasUser = itConfiguration.getManagePaasUser().findPaasUser(paasUser.getSsoId().getValue());
} catch (ObjectNotFoundException e) {
throw new AssumptionViolatedException(e, new TestComponentExpected(PaasUser.class, paasUser.getSsoId().getValue(), "test user"));
}
return paasUser;
}
/**
* MiddlewareProfile is owned by logical model instance
* if null; the DEFAULT_VCD_MIDDLEWARE_PROFILE is used
*
* @return
*/
public String getMiddlewareProfileVersion() {
String logicalModelMiddlewareProfile = itConfiguration.getLogicalModelCatalog() != null ?
itConfiguration.getLogicalModelCatalog().getAppReleaseMiddlewareProfile()
: null;
if (logicalModelMiddlewareProfile != null) {
return logicalModelMiddlewareProfile;
}
String middlewareProfileToUse = MiddlewareProfile.getDefault().getVersion();
logger.info("no AppReleaseMiddlewareProfile specified into the LogicalModel, so use the default one : {}", middlewareProfileToUse);
return middlewareProfileToUse;
}
public static void shutdown(PaasServicesEnvITHelper sPaasServicesEnvITHelper) {
if (sPaasServicesEnvITHelper == null) {
logger.warn("tear down aborted : no helper found");
return;
}
sPaasServicesEnvITHelper.tearDown();
}
public EnvironmentDetailsDto getAndAssumeEnvironmentDetails() {
EnvironmentDetailsDto environmentDetails = null;
try {
environmentDetails = getEnvironmentDetails();
if (environmentDetails != null) {
return environmentDetails;
}
} catch (ObjectNotFoundException e) {
}
throw new AssumptionViolatedException(null, new TestComponentExpected(EnvironmentDetailsDto.class, environmentUID, "test environment"));
}
protected class TestComponentExpected extends BaseMatcher {
private Class testComponentClass;
private String testComponentId;
private String details;
public TestComponentExpected(Class componentClass, String testComponentId, String details) {
this.testComponentClass = componentClass;
this.testComponentId = testComponentId;
this.details = details;
}
@Override
public boolean matches(Object o) {
return false;
}
@Override
public void describeTo(Description description) {
description.appendText("test component (type:" + testComponentClass.getSimpleName()
+ ")#" + testComponentId + " should exists : " + details);
}
}
private void waitForStatus(String environmentId, EnvironmentDto.EnvironmentStatusEnum expectedStatus, int timeoutInMinutes) throws ObjectNotFoundException {
long timeout = System.currentTimeMillis() + timeoutInMinutes * 60 * 1000;
EnvironmentDto envDto = itConfiguration.getManageEnvironment().findEnvironmentByUID(environmentId);
while (expectedStatus != envDto.getStatus()) {
logDebugCurrentWaitingStatus(expectedStatus, envDto);
if (System.currentTimeMillis() > timeout) {
Assert.fail("Timeout: environment not " + expectedStatus + " after " + timeoutInMinutes + " minutes");
}
if (!envDto.getStatus().toString().endsWith("ING") && envDto.getStatus() != EnvironmentDto.EnvironmentStatusEnum.RUNNING) {
// In a final step, will not change until an action is requested
Assert.assertEquals("Activation process failed : " + envDto.getStatusMessage(), expectedStatus, envDto.getStatus());
}
try {
Thread.sleep(5000);
} catch (InterruptedException e) {
// ignore
}
envDto = itConfiguration.getManageEnvironment().findEnvironmentByUID(environmentId);
}
}
/**
* log the expected state and the current waiting state status message is
* appended if not null (only)
*
* @param expectedStatus status expected
* @param envDto env state
*/
private void logDebugCurrentWaitingStatus(EnvironmentDto.EnvironmentStatusEnum expectedStatus, EnvironmentDto envDto) {
if (!logger.isDebugEnabled())
return;
StringBuilder sb = new StringBuilder();
sb.append("Waiting for ").append(expectedStatus).append(" environment ").append("; current status : ").append(envDto.getStatus());
if (envDto.getStatusPercent() > 0) {
sb.append(" ").append(envDto.getStatusPercent()).append("%");
}
String curMessage = envDto.getStatusMessage();
if (curMessage != null) {
sb.append(" / ").append(envDto.getStatusMessage());
}
logger.debug(sb.toString());
}
private void displayEnvironmentDetailsInLogs(EnvironmentDetailsDto environmentDetails) {
// show splunk logs links (if any)
List<LinkDto> logsLinkDtosMap = environmentDetails.getSpecificLinkDto(LinkDto.LinkTypeEnum.LOGS_LINK);
if (logsLinkDtosMap != null && logsLinkDtosMap.size() > 0) {
logger.info("Environment splunk logs available at: ");
for (LinkDto link : logsLinkDtosMap) {
logger.info(" - <a href=\"" + link.getUrl().toExternalForm() + "\">Link type : " + link.getLinkType() + " - Url : " + link.getUrl().toString()
+ "</a>");
}
}
// show access point (if any)
List<LinkDto> accessLinkDtosMap = environmentDetails.getSpecificLinkDto(LinkDto.LinkTypeEnum.ACCESS_LINK);
if (accessLinkDtosMap != null && accessLinkDtosMap.size() > 0) {
logger.info("Environment access points are: ");
for (LinkDto link : accessLinkDtosMap) {
logger.info(" - <a href=\"" + link.getUrl().toExternalForm() + "\">Link type : " + link.getLinkType() + " - Url : " + link.getUrl().toString()
+ "</a>");
}
}
}
public void setDefaultConfigurationItName() {
String finalName = "NoName";
if (itConfiguration != null && itConfiguration.getLogicalModelCatalog() != null) {
finalName = itConfiguration.getLogicalModelCatalog().getAppCode();
}
logger.info("Renaming itConfiguration to {} (old name: {})",finalName,itConfiguration.getName());
itConfiguration.setName(finalName);
}
/**
* Ensure that the EARs are deployed and accessible after a stop and a start
* (old name : testStopStart())
*/
public void application_should_be_accessible_after_environment_restart() {
EnvironmentDetailsDto envDetailsDto;
try {
EnvironmentDto envDto = getEnvironment();
Assert.assertEquals("Env should be running at this step", EnvironmentDto.EnvironmentStatusEnum.RUNNING, envDto.getStatus());
environment_restart();
envDetailsDto = getEnvironmentDetails();
} catch (ObjectNotFoundException e) {
throw new TechnicalException("Unable to find test environment", e);
}
Assert.assertEquals("Env should be running at this step", EnvironmentDto.EnvironmentStatusEnum.RUNNING, envDetailsDto.getStatus());
application_should_be_accessible(true);
}
public void environment_restart() throws ObjectNotFoundException {
ManageEnvironment manageEnvironment = itConfiguration.getManageEnvironment();
EnvironmentDto envDto;
EnvironmentDetailsDto envDetailsDto;
manageEnvironment.stopEnvironment(environmentUID);
waitForStatus(environmentUID, EnvironmentDto.EnvironmentStatusEnum.STOPPED, environmentStopTimeoutMin);
envDto = getEnvironment();
Assert.assertEquals("Env should be stopped at this step", EnvironmentDto.EnvironmentStatusEnum.STOPPED, envDto.getStatus());
envDetailsDto = getEnvironmentDetails();
Assert.assertEquals("Env should be stopped at this step", EnvironmentDto.EnvironmentStatusEnum.STOPPED, envDetailsDto.getStatus());
manageEnvironment.startEnvironment(environmentUID);
waitForStatus(environmentUID, EnvironmentDto.EnvironmentStatusEnum.RUNNING, environmentStartTimeoutMin);
}
public EnvironmentDetailsDto getEnvironmentDetails() throws ObjectNotFoundException {
if (environmentUID == null) {
return null;
}
ManageEnvironment manageEnvironment = itConfiguration.getManageEnvironment();
return manageEnvironment.findEnvironmentDetails(environmentUID);
}
public EnvironmentOpsDetailsDto getEnvironmentOpsDetails() throws ObjectNotFoundException {
if (environmentUID == null) {
return null;
}
ManageEnvironment manageEnvironment = itConfiguration.getManageEnvironment();
return manageEnvironment.findEnvironmentOpsDetailsByUID(environmentUID);
}
public EnvironmentDto getEnvironment() throws ObjectNotFoundException {
if (environmentUID == null) {
return null;
}
ManageEnvironment manageEnvironment = itConfiguration.getManageEnvironment();
return manageEnvironment.findEnvironmentByUID(environmentUID);
}
public <T> T executeRestRequest(LinkDto link, String path, Class<T> clazz) {
SimpleClientHttpRequestFactory clientHttpRequestFactory = new SimpleClientHttpRequestFactory();
if (this.getItConfiguration().isUseHttpIgeProxy()) {
final String httpProxyHost = this.getItConfiguration().getHttpProxyHost();
final int httpProxyPort = this.getItConfiguration().getHttpProxyPort();
logger.info("Use proxy {}:{} to access Simple Probe", httpProxyHost, httpProxyPort);
Proxy proxy = new Proxy(Proxy.Type.HTTP, new InetSocketAddress(httpProxyHost, httpProxyPort));
clientHttpRequestFactory.setProxy(proxy);
}
RestTemplate restTemplate = new RestTemplate(clientHttpRequestFactory);
T result = restTemplate.getForEntity(link.getUrl().toString()+path, clazz).getBody();
return result;
}
//~getters && setters
public PaasServicesEnvITConfiguration getItConfiguration() {
return itConfiguration;
}
public String getEnvironmentUID() {
return environmentUID;
}
public int getLogicalDeploymentID() {
return logicalDeploymentID;
}
public void setMaxRequests(int maxRequests) {
this.maxRequests = maxRequests;
}
public void setMaxSessions(int maxSessions) {
this.maxSessions = maxSessions;
}
public void setStateful(boolean stateful) {
isStateful = stateful;
}
public void setEnvType(EnvironmentDto.EnvironmentTypeEnum envType) {
this.envType = envType;
}
public void setEnvironmentCreationTimeoutMin(int environmentCreationTimeoutMin) {
this.environmentCreationTimeoutMin = environmentCreationTimeoutMin;
}
public void setWebAppTestAttempts(int webAppTestAttempts) {
this.webAppTestAttempts = webAppTestAttempts;
}
public void setWebAppTestWaitTime(int webAppTestWaitTime) {
this.webAppTestWaitTime = webAppTestWaitTime;
}
public void setEnvironmentStartTimeoutMin(int environmentStartTimeoutMin) {
this.environmentStartTimeoutMin = environmentStartTimeoutMin;
}
public void setEnvironmentStopTimeoutMin(int environmentStopTimeoutMin) {
this.environmentStopTimeoutMin = environmentStopTimeoutMin;
}
public void setEnvironmentDeleteTimeout(int environmentDeleteTimeout) {
this.environmentDeleteTimeout = environmentDeleteTimeout;
}
public void setSkipDeleteEnvironmentAtTheEnd(Boolean skipDeleteEnvironmentAtTheEnd) {
this.skipDeleteEnvironmentAtTheEnd = skipDeleteEnvironmentAtTheEnd;
}
}
| |
package com.cisco.axl.api._8;
import javax.xml.bind.JAXBElement;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlElementRef;
import javax.xml.bind.annotation.XmlType;
import javax.xml.bind.annotation.adapters.CollapsedStringAdapter;
import javax.xml.bind.annotation.adapters.XmlJavaTypeAdapter;
/**
* <p>Java class for XConferenceBridge complex type.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <complexType name="XConferenceBridge">
* <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
* <sequence minOccurs="0">
* <element name="name" type="{http://www.cisco.com/AXL/API/8.0}UniqueName128"/>
* <element name="description" type="{http://www.cisco.com/AXL/API/8.0}String128" minOccurs="0"/>
* <element name="product" type="{http://www.cisco.com/AXL/API/8.0}XProduct"/>
* <element name="devicePoolName" type="{http://www.cisco.com/AXL/API/8.0}XFkType"/>
* <element name="commonDeviceConfigName" type="{http://www.cisco.com/AXL/API/8.0}XFkType" minOccurs="0"/>
* <element name="locationName" type="{http://www.cisco.com/AXL/API/8.0}XFkType"/>
* <element name="subUnit" type="{http://www.cisco.com/AXL/API/8.0}XInteger" minOccurs="0"/>
* <element name="loadInformation" type="{http://www.cisco.com/AXL/API/8.0}XLoadInformation" minOccurs="0"/>
* <element name="vendorConfig" type="{http://www.cisco.com/AXL/API/8.0}XVendorConfig" minOccurs="0"/>
* <element name="maximumCapacity" type="{http://www.cisco.com/AXL/API/8.0}XInteger" minOccurs="0"/>
* <element name="useTrustedRelayPoint" type="{http://www.cisco.com/AXL/API/8.0}XStatus" minOccurs="0"/>
* <element name="securityProfileName" type="{http://www.cisco.com/AXL/API/8.0}XFkType" minOccurs="0"/>
* </sequence>
* </restriction>
* </complexContent>
* </complexType>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "XConferenceBridge", propOrder = {
"name",
"description",
"product",
"devicePoolName",
"commonDeviceConfigName",
"locationName",
"subUnit",
"loadInformation",
"vendorConfig",
"maximumCapacity",
"useTrustedRelayPoint",
"securityProfileName"
})
public class XConferenceBridge {
@XmlJavaTypeAdapter(CollapsedStringAdapter.class)
protected String name;
protected String description;
protected String product;
@XmlElementRef(name = "devicePoolName", type = JAXBElement.class)
protected JAXBElement<XFkType> devicePoolName;
@XmlElementRef(name = "commonDeviceConfigName", type = JAXBElement.class)
protected JAXBElement<XFkType> commonDeviceConfigName;
@XmlElement(defaultValue = "Hub_None")
protected XFkType locationName;
@XmlElement(defaultValue = "0")
protected String subUnit;
@XmlElementRef(name = "loadInformation", type = JAXBElement.class)
protected JAXBElement<XLoadInformation> loadInformation;
protected XVendorConfig vendorConfig;
protected String maximumCapacity;
@XmlElement(defaultValue = "Default")
protected String useTrustedRelayPoint;
@XmlElementRef(name = "securityProfileName", type = JAXBElement.class)
protected JAXBElement<XFkType> securityProfileName;
/**
* Gets the value of the name property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getName() {
return name;
}
/**
* Sets the value of the name property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setName(String value) {
this.name = value;
}
/**
* Gets the value of the description property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getDescription() {
return description;
}
/**
* Sets the value of the description property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setDescription(String value) {
this.description = value;
}
/**
* Gets the value of the product property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getProduct() {
return product;
}
/**
* Sets the value of the product property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setProduct(String value) {
this.product = value;
}
/**
* Gets the value of the devicePoolName property.
*
* @return
* possible object is
* {@link JAXBElement }{@code <}{@link XFkType }{@code >}
*
*/
public JAXBElement<XFkType> getDevicePoolName() {
return devicePoolName;
}
/**
* Sets the value of the devicePoolName property.
*
* @param value
* allowed object is
* {@link JAXBElement }{@code <}{@link XFkType }{@code >}
*
*/
public void setDevicePoolName(JAXBElement<XFkType> value) {
this.devicePoolName = ((JAXBElement<XFkType> ) value);
}
/**
* Gets the value of the commonDeviceConfigName property.
*
* @return
* possible object is
* {@link JAXBElement }{@code <}{@link XFkType }{@code >}
*
*/
public JAXBElement<XFkType> getCommonDeviceConfigName() {
return commonDeviceConfigName;
}
/**
* Sets the value of the commonDeviceConfigName property.
*
* @param value
* allowed object is
* {@link JAXBElement }{@code <}{@link XFkType }{@code >}
*
*/
public void setCommonDeviceConfigName(JAXBElement<XFkType> value) {
this.commonDeviceConfigName = ((JAXBElement<XFkType> ) value);
}
/**
* Gets the value of the locationName property.
*
* @return
* possible object is
* {@link XFkType }
*
*/
public XFkType getLocationName() {
return locationName;
}
/**
* Sets the value of the locationName property.
*
* @param value
* allowed object is
* {@link XFkType }
*
*/
public void setLocationName(XFkType value) {
this.locationName = value;
}
/**
* Gets the value of the subUnit property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getSubUnit() {
return subUnit;
}
/**
* Sets the value of the subUnit property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setSubUnit(String value) {
this.subUnit = value;
}
/**
* Gets the value of the loadInformation property.
*
* @return
* possible object is
* {@link JAXBElement }{@code <}{@link XLoadInformation }{@code >}
*
*/
public JAXBElement<XLoadInformation> getLoadInformation() {
return loadInformation;
}
/**
* Sets the value of the loadInformation property.
*
* @param value
* allowed object is
* {@link JAXBElement }{@code <}{@link XLoadInformation }{@code >}
*
*/
public void setLoadInformation(JAXBElement<XLoadInformation> value) {
this.loadInformation = ((JAXBElement<XLoadInformation> ) value);
}
/**
* Gets the value of the vendorConfig property.
*
* @return
* possible object is
* {@link XVendorConfig }
*
*/
public XVendorConfig getVendorConfig() {
return vendorConfig;
}
/**
* Sets the value of the vendorConfig property.
*
* @param value
* allowed object is
* {@link XVendorConfig }
*
*/
public void setVendorConfig(XVendorConfig value) {
this.vendorConfig = value;
}
/**
* Gets the value of the maximumCapacity property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getMaximumCapacity() {
return maximumCapacity;
}
/**
* Sets the value of the maximumCapacity property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setMaximumCapacity(String value) {
this.maximumCapacity = value;
}
/**
* Gets the value of the useTrustedRelayPoint property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getUseTrustedRelayPoint() {
return useTrustedRelayPoint;
}
/**
* Sets the value of the useTrustedRelayPoint property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setUseTrustedRelayPoint(String value) {
this.useTrustedRelayPoint = value;
}
/**
* Gets the value of the securityProfileName property.
*
* @return
* possible object is
* {@link JAXBElement }{@code <}{@link XFkType }{@code >}
*
*/
public JAXBElement<XFkType> getSecurityProfileName() {
return securityProfileName;
}
/**
* Sets the value of the securityProfileName property.
*
* @param value
* allowed object is
* {@link JAXBElement }{@code <}{@link XFkType }{@code >}
*
*/
public void setSecurityProfileName(JAXBElement<XFkType> value) {
this.securityProfileName = ((JAXBElement<XFkType> ) value);
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE
* file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file
* to You under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the
* License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package org.apache.oodt.cas.resource.system;
import org.apache.commons.io.FileUtils;
import org.apache.oodt.cas.resource.structs.ResourceNode;
import org.apache.oodt.cas.resource.structs.exceptions.JobQueueException;
import org.apache.oodt.cas.resource.structs.exceptions.JobRepositoryException;
import org.apache.oodt.cas.resource.structs.exceptions.MonitorException;
import org.apache.oodt.cas.resource.structs.exceptions.QueueManagerException;
import org.junit.BeforeClass;
import org.junit.Test;
import java.io.File;
import java.io.FileFilter;
import java.io.FileInputStream;
import java.io.IOException;
import java.net.URL;
import java.util.Hashtable;
import java.util.List;
import java.util.Properties;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.*;
import static org.hamcrest.core.IsNot.not;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.fail;
/**
* Tests for the XmlRpcResourceManagerClient to ensure communications between client and server operate correctly.
*/
public class TestXmlRpcResourceManagerClient {
private static final int RM_PORT = 50001;
private static XmlRpcResourceManagerClient rmc;
@BeforeClass
public static void setUp() throws Exception {
generateTestConfiguration();
XmlRpcResourceManager rm = new XmlRpcResourceManager(RM_PORT);
rmc = new XmlRpcResourceManagerClient(new URL("http://localhost:" +RM_PORT));
}
private static void generateTestConfiguration() throws IOException {
Properties config = new Properties();
String propertiesFile = "." + File.separator + "src" + File.separator +
"test" + File.separator + "resources" + File.separator + "test.resource.properties";
System.getProperties().load(new FileInputStream(new File(propertiesFile)));
// stage policy
File tmpPolicyDir = null;
try {
tmpPolicyDir = File.createTempFile("test", "ignore").getParentFile();
} catch (Exception e) {
fail(e.getMessage());
}
for (File policyFile : new File("./src/test/resources/policy")
.listFiles(new FileFilter() {
@Override
public boolean accept(File pathname) {
return pathname.isFile() && pathname.getName().endsWith(".xml");
}
})) {
try {
FileUtils.copyFileToDirectory(policyFile, tmpPolicyDir);
} catch (Exception e) {
fail(e.getMessage());
}
}
config.setProperty("org.apache.oodt.cas.resource.nodes.dirs", tmpPolicyDir
.toURI().toString());
config.setProperty("org.apache.oodt.cas.resource.nodetoqueues.dirs",
tmpPolicyDir.toURI().toString());
System.getProperties().putAll(config);
}
@Test
public void testGetNodes() throws MonitorException {
List<Hashtable> nodes = rmc.getNodes();
assertThat(nodes, is(not(nullValue())));
assertThat(nodes, hasSize(1));
}
@Test
public void testGetExecutionReport() throws JobRepositoryException {
String execreport = rmc.getExecReport();
assertThat(execreport, is(not(nullValue())));
//TODO make it return more than an empty string;
}
@Test
public void testJobQueueCapacity() throws JobRepositoryException {
int capacity = rmc.getJobQueueCapacity();
assertThat(capacity, equalTo(1000));
}
@Test
public void testGetJobQueueSize() throws JobRepositoryException {
int size = rmc.getJobQueueSize();
assertThat(size, equalTo(0));
//TODO Make it change queue size
}
@Test
public void testGetNodeById() throws MonitorException {
List<ResourceNode> nodelist = rmc.getNodes();
ResourceNode node = rmc.getNodeById(nodelist.get(0).getNodeId());
assertThat(node, is(not(nullValue())));
assertThat(node.getNodeId(), equalTo("localhost"));
}
@Test
public void testGetNodeLoad() throws MonitorException {
List<ResourceNode> nodelist = rmc.getNodes();
String node = rmc.getNodeLoad(nodelist.get(0).getNodeId());
assertNotNull(node);
assertThat(node, equalTo("0/8"));
}
@Test
public void testNodeReport() throws MonitorException {
String report = rmc.getNodeReport();
assertThat(report, is(not(nullValue())));
}
@Test
public void testGetNodesInQueue() throws QueueManagerException {
List<String> nodes = rmc.getNodesInQueue("long");
assertThat(nodes, is(not(nullValue())));
assertThat(nodes, hasSize(1));
}
@Test
public void testQueuedJobs() throws JobQueueException {
List jobs = rmc.getQueuedJobs();
assertThat(jobs, is(not(nullValue())));
//TODO queue a job
}
@Test
public void testQueuesWithNode() throws MonitorException, QueueManagerException {
List<ResourceNode> nodelist = rmc.getNodes();
List<String> queues = rmc.getQueuesWithNode(nodelist.get(0).getNodeId());
assertThat(queues, hasSize(3));
assertThat(queues, containsInAnyOrder("high", "quick", "long"));
}
@Test
public void testQueues() throws QueueManagerException {
List<String> queues = rmc.getQueues();
assertThat(queues, hasSize(3));
assertThat(queues, containsInAnyOrder("high", "quick", "long"));
}
}
| |
/**************************************************************************
Exchange Web Services Java API
Copyright (c) Microsoft Corporation
All rights reserved.
MIT License
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the ""Software""), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
**************************************************************************/
package microsoft.exchange.webservices.data;
import org.apache.http.HttpException;
import java.io.*;
import java.net.URL;
import java.util.Map;
/**
* The Class HttpWebRequest.
*/
abstract class HttpWebRequest {
/**
* The url.
*/
private URL url;
/**
* The pre authenticate.
*/
private boolean preAuthenticate;
/**
* The timeout.
*/
private int timeout;
/**
* The content type.
*/
private String contentType = "text/xml; charset=utf-8";
/**
* The accept.
*/
private String accept = "text/xml";
/**
* The user agent.
*/
private String userAgent = "EWS SDK";
/**
* The allow auto redirect.
*/
private boolean allowAutoRedirect;
/**
* The keep alive.
*/
private boolean keepAlive = true;
/**
* The accept gzip encoding.
*/
private boolean acceptGzipEncoding;
/**
* The use default credentials.
*/
private boolean useDefaultCredentials;
private boolean allowAuthentication = true;
/**
* The user name.
*/
private String username;
/**
* The password.
*/
private String password;
/**
* The domain.
*/
private String domain;
/**
* The request Method.
*/
private String requestMethod = "POST";
/**
* The request headers.
*/
private Map<String, String> headers;
/**
* The Web Proxy.
*/
private WebProxy proxy;
/**
* Gets the Web Proxy.
*
* @return the proxy
*/
public WebProxy getProxy() {
return proxy;
}
/**
* Sets the Web Proxy.
*
* @param proxy The Web Proxy
*/
public void setProxy(WebProxy proxy) {
this.proxy = proxy;
}
/**
* Checks if is http scheme.
*
* @return true, if is http scheme
*/
public boolean isHttpScheme() {
return getUrl().getProtocol().equalsIgnoreCase(EWSConstants.HTTP_SCHEME);
}
/**
* Checks if is https scheme.
*
* @return true, if is https scheme
*/
public boolean isHttpsScheme() {
return getUrl().getProtocol().equalsIgnoreCase(EWSConstants.HTTPS_SCHEME);
}
/**
* Gets the user name.
*
* @return the user name
*/
public String getUsername() {
return username;
}
/**
* Sets the user name.
*
* @param username the new user name
*/
public void setUsername(String username) {
this.username = username;
}
/**
* Gets the password.
*
* @return the password
*/
public String getPassword() {
return password;
}
/**
* Sets the password.
*
* @param password the new password
*/
public void setPassword(String password) {
this.password = password;
}
/**
* Gets the domain.
*
* @return the domain
*/
public String getDomain() {
return domain;
}
/**
* Sets the domain.
*
* @param domain the new domain
*/
public void setDomain(String domain) {
this.domain = domain;
}
/**
* Gets the url.
*
* @return the url
*/
public URL getUrl() {
return url;
}
/**
* Sets the url.
*
* @param url the new url
*/
public void setUrl(URL url) {
this.url = url;
}
/**
* Whether to use preemptive authentication. Currently not implemented, though.
*/
public boolean isPreAuthenticate() {
return preAuthenticate;
}
/**
* Whether to use preemptive authentication. Currently not implemented, though.
*/
public void setPreAuthenticate(boolean preAuthenticate) {
this.preAuthenticate = preAuthenticate;
}
/**
* Gets the timeout.
*
* @return the timeout
*/
public int getTimeout() {
return timeout;
}
/**
* Sets the timeout.
*
* @param timeout the new timeout
*/
public void setTimeout(int timeout) {
this.timeout = timeout;
}
/**
* Gets the content type.
*
* @return the content type
*/
public String getContentType() {
return contentType;
}
/**
* Sets the content type.
*
* @param contentType the new content type
*/
public void setContentType(String contentType) {
this.contentType = contentType;
}
/**
* Gets the accept.
*
* @return the accept
*/
public String getAccept() {
return accept;
}
/**
* Sets the accept.
*
* @param accept the new accept
*/
public void setAccept(String accept) {
this.accept = accept;
}
/**
* Gets the user agent.
*
* @return the user agent
*/
public String getUserAgent() {
return userAgent;
}
/**
* Sets the user agent.
*
* @param userAgent the new user agent
*/
public void setUserAgent(String userAgent) {
this.userAgent = userAgent;
}
/**
* Checks if is allow auto redirect.
*
* @return true, if is allow auto redirect
*/
public boolean isAllowAutoRedirect() {
return allowAutoRedirect;
}
/**
* Sets the allow auto redirect.
*
* @param allowAutoRedirect the new allow auto redirect
*/
public void setAllowAutoRedirect(boolean allowAutoRedirect) {
this.allowAutoRedirect = allowAutoRedirect;
}
/**
* Checks if is keep alive.
*
* @return true, if is keep alive
*/
public boolean isKeepAlive() {
return keepAlive;
}
/**
* Sets the keep alive.
*
* @param keepAlive the new keep alive
*/
public void setKeepAlive(boolean keepAlive) {
this.keepAlive = keepAlive;
}
/**
* Checks if is accept gzip encoding.
*
* @return true, if is accept gzip encoding
*/
public boolean isAcceptGzipEncoding() {
return acceptGzipEncoding;
}
/**
* Sets the accept gzip encoding.
*
* @param acceptGzipEncoding the new accept gzip encoding
*/
public void setAcceptGzipEncoding(boolean acceptGzipEncoding) {
this.acceptGzipEncoding = acceptGzipEncoding;
}
/**
* Checks if is use default credentials.
*
* @return true, if is use default credentials
*/
public boolean isUseDefaultCredentials() {
return useDefaultCredentials;
}
/**
* Sets the use default credentials.
*
* @param useDefaultCredentials the new use default credentials
*/
public void setUseDefaultCredentials(boolean useDefaultCredentials) {
this.useDefaultCredentials = useDefaultCredentials;
}
/**
* Whether web service authentication is allowed.
* This can be set to {@code false} to disallow sending credentials with this request.
*
* This is useful for the autodiscover request to the legacy HTTP url, because this single request doesn't
* require authentication and we don't want to send credentials over HTTP.
*
* @return {@code true} if authentication is allowed.
*/
public boolean isAllowAuthentication() {
return allowAuthentication;
}
/**
* Whether web service authentication is allowed.
* This can be set to {@code false} to disallow sending credentials with this request.
*
* This is useful for the autodiscover request to the legacy HTTP url, because this single request doesn't
* require authentication and we don't want to send credentials over HTTP.
*
* Default is {@code true}.
*
* @param allowAuthentication {@code true} if authentication is allowed.
*/
public void setAllowAuthentication(boolean allowAuthentication) {
this.allowAuthentication = allowAuthentication;
}
/**
* Gets the request method type.
*
* @return the request method type.
*/
public String getRequestMethod() {
return requestMethod;
}
/**
* Sets the request method type.
*
* @param requestMethod the request method type.
*/
public void setRequestMethod(String requestMethod) {
this.requestMethod = requestMethod;
}
/**
* Gets the Headers.
*
* @return the content type
*/
public Map<String, String> getHeaders() {
return headers;
}
/**
* Sets the Headers.
*
* @param headers The headers
*/
public void setHeaders(Map<String, String> headers) {
this.headers = headers;
}
/**
* Sets the credentials.
*
* @param domain user domain
* @param user user name
* @param pwd password
*/
public void setCredentials(String domain, String user, String pwd) {
this.domain = domain;
this.username = user;
this.password = pwd;
}
/**
* Gets the input stream.
*
* @return the input stream
* @throws EWSHttpException the eWS http exception
* @throws java.io.IOException
*/
public abstract InputStream getInputStream() throws EWSHttpException, IOException;
/**
* Gets the error stream.
*
* @return the error stream
* @throws EWSHttpException the eWS http exception
*/
public abstract InputStream getErrorStream() throws EWSHttpException;
/**
* Gets the output stream.
*
* @return the output stream
* @throws EWSHttpException the eWS http exception
*/
public abstract OutputStream getOutputStream() throws EWSHttpException;
/**
* Close.
*/
public abstract void close();
/**
* Prepare connection.
*/
public abstract void prepareConnection();
/**
* Gets the response headers.
*
* @return the response headers
* @throws EWSHttpException the eWS http exception
*/
public abstract Map<String, String> getResponseHeaders()
throws EWSHttpException;
/**
* Gets the content encoding.
*
* @return the content encoding
* @throws EWSHttpException the eWS http exception
*/
public abstract String getContentEncoding() throws EWSHttpException;
/**
* Gets the response content type.
*
* @return the response content type
* @throws EWSHttpException the eWS http exception
*/
public abstract String getResponseContentType() throws EWSHttpException;
/**
* Gets the response code.
*
* @return the response code
* @throws EWSHttpException the eWS http exception
*/
public abstract int getResponseCode() throws EWSHttpException;
/**
* Gets the response message.
*
* @return the response message
* @throws EWSHttpException the eWS http exception
*/
public abstract String getResponseText() throws EWSHttpException;
/**
* Gets the response header field.
*
* @param headerName the header name
* @return the response header field
* @throws EWSHttpException the eWS http exception
*/
public abstract String getResponseHeaderField(String headerName)
throws EWSHttpException;
/**
* Gets the request properties.
*
* @return the request properties
* @throws EWSHttpException the eWS http exception
*/
public abstract Map<String, String> getRequestProperty()
throws EWSHttpException;
/**
* Executes Request by sending request xml data to server.
*
* @throws EWSHttpException the eWS http exception
* @throws HttpException the http exception
* @throws java.io.IOException the IO Exception
*/
public abstract int executeRequest() throws EWSHttpException, IOException;
public IAsyncResult beginGetResponse(Object webRequestAsyncCallback,
WebAsyncCallStateAnchor wrappedState) {
// TODO Auto-generated method stub
return null;
}
public ByteArrayOutputStream endGetRequestStream(
IAsyncResult result) {
// TODO Auto-generated method stub
return new ByteArrayOutputStream();
}
}
| |
package pl.edu.agh.multiagent;
import jade.android.RuntimeCallback;
import jade.wrapper.AgentController;
import jade.wrapper.StaleProxyException;
import java.util.ArrayList;
import java.util.List;
import pl.edu.agh.multiagent.api.GameState;
import pl.edu.agh.multiagent.api.MoveResoultionStrategy;
import pl.edu.agh.multiagent.api.State;
import pl.edu.agh.multiagent.jade.GameAgentInterface;
import pl.edu.agh.multiagent.jade.JadeController;
import android.app.Activity;
import android.app.AlertDialog;
import android.content.DialogInterface;
import android.content.Intent;
import android.graphics.Color;
import android.os.Bundle;
import android.util.Log;
import android.view.Menu;
import android.view.View;
import android.view.View.OnClickListener;
import android.view.ViewGroup;
import android.view.ViewGroup.LayoutParams;
import android.widget.ArrayAdapter;
import android.widget.EditText;
import android.widget.LinearLayout;
import android.widget.Spinner;
import android.widget.TableLayout;
import android.widget.TableRow;
import android.widget.TextView;
public class MainActivity extends Activity implements OnClickListener {
private static final String TAG = "MainActivity";
private JadeController jadeController;
private GameAgentInterface agent;
private LinearLayout menuLayout;
private LinearLayout gameLayout;
private LinearLayout gameFinderLayout;
private LinearLayout myGamesLayout;
private Board currentBoard;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
menuLayout = (LinearLayout) findViewById(R.id.mainFormLayout);
gameLayout = (LinearLayout) findViewById(R.id.gameLayout);
gameFinderLayout = (LinearLayout) findViewById(R.id.gameFinder);
myGamesLayout = (LinearLayout) findViewById(R.id.myGames);
menuLayout.setVisibility(View.VISIBLE);
gameLayout.setVisibility(View.GONE);
gameFinderLayout.setVisibility(View.GONE);
myGamesLayout.setVisibility(View.GONE);
Intent i = getIntent();
jadeController = new JadeController(i.getStringExtra("host"),
i.getStringExtra("port"), getApplicationContext());
final TextView agentName = (TextView) findViewById(R.id.agentTitle);
jadeController.startJadeRuntimeService(new RuntimeCallback<Void>() {
@Override
public void onSuccess(Void arg0) {
Log.w(TAG, "Success 1");
jadeController
.startGameAgent(new RuntimeCallback<AgentController>() {
@Override
public void onSuccess(AgentController arg0) {
Log.w(TAG, "Started agent");
try {
GameAgentInterface agent = arg0
.getO2AInterface(GameAgentInterface.class);
if (agent != null) {
MainActivity.this.agent = agent;
runOnUiThread(new Runnable() {
@Override
public void run() {
agentName.setText(MainActivity.this.agent.getAgentInfo().getName());
}
});
} else {
Log.i(TAG, "Agent O2A is null");
}
} catch (StaleProxyException e) {
throw new RuntimeException(e);
}
}
@Override
public void onFailure(Throwable arg0) {
Log.w(TAG, "Failed to start agent");
}
});
}
@Override
public void onFailure(Throwable arg0) {
Log.w(TAG, "Failed to start Jade");
}
});
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
// Inflate the menu; this adds items to the action bar if it is present.
getMenuInflater().inflate(R.menu.main, menu);
return true;
}
public void startGame(View view) {
Log.w(TAG, "Start Game click");
AlertDialog.Builder builder = new AlertDialog.Builder(this);
ViewGroup dialog = (ViewGroup) getLayoutInflater().inflate(
R.layout.dialog_start_game, null);
final EditText gameNameView = (EditText) dialog.findViewById(R.id.enterGameName);
final Spinner choiceSpinner = (Spinner) dialog
.findViewById(R.id.moveResolutionSpinner);
List<String> list = new ArrayList<String>();
for(MoveResoultionStrategy s : MoveResoultionStrategy.values()){
list.add(s.toString());
}
ArrayAdapter<String> dataAdapter = new ArrayAdapter<String>(this,
android.R.layout.simple_spinner_item, list);
dataAdapter
.setDropDownViewResource(android.R.layout.simple_spinner_dropdown_item);
choiceSpinner.setAdapter(dataAdapter);
builder.setView(dialog)
.setPositiveButton("Ok", new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int id) {
GameState state = GameState.newGameState(gameNameView.getText().toString(), MoveResoultionStrategy.valueOf(choiceSpinner.getSelectedItem().toString()), agent.getAgentInfo());
agent.createGame(state);
switchLayout(gameLayout);
initGameFromState(state);
}
})
.setNegativeButton("Cancel",
new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int id) {
dialog.dismiss();
}
});
builder.create().show();
}
public void browseGames(View view) {
switchLayout(gameFinderLayout);
TableLayout table = (TableLayout) findViewById(R.id.otherGamesTable);
table.removeAllViews();
addHeader(table);
for(GameState s :agent.getAllActiveGames()){
if(s.getState().equals(State.OPEN)){
table.addView(new MyTableRow(this, s, this));
}
}
}
public void myGames(View view) {
switchLayout(gameFinderLayout);
TableLayout table = (TableLayout) findViewById(R.id.otherGamesTable);
table.removeAllViews();
addHeader(table);
for(GameState s :agent.getMyGames()){
if(s.getState().equals(State.OPEN)){
table.addView(new MyTableRow(this, s, this));
}
}
}
@SuppressWarnings("deprecation")
private void addHeader(TableLayout table) {
TableRow row = new TableRow(this);
String owner = "Game owner";
TextView ownerView = new TextView(this);
ownerView.setText(owner);
row.addView(ownerView);
String gameName = "Game name";
TextView nameView = new TextView(this);
nameView.setText(gameName);
row.addView(nameView);
String whose = "Move";
TextView whoseView = new TextView(this);
whoseView.setText(whose);
row.addView(whoseView);
String moveNumber = "Move number";
TextView moveView = new TextView(this);
moveView.setText(moveNumber);
row.addView(moveView);
table.addView(row);
View line = new View(this);
line.setBackgroundColor(Color.BLACK);
line.setLayoutParams(new LayoutParams(LayoutParams.FILL_PARENT, 1));
table.addView(line);
}
private void switchLayout(LinearLayout l){
menuLayout.setVisibility(View.GONE);
gameLayout.setVisibility(View.GONE);
gameFinderLayout.setVisibility(View.GONE);
myGamesLayout.setVisibility(View.GONE);
l.setVisibility(View.VISIBLE);
}
@Override
public void onBackPressed() {
if(menuLayout.getVisibility() == View.VISIBLE){
super.onBackPressed();
}
else{
switchLayout(menuLayout);
}
}
@Override
public void onDestroy(){
//TODO clean agent? stop container? stop service? nothing? dunno
super.onDestroy();
}
private void initGameFromState(GameState state){
Board board = new Board(findViewById(R.id.board), agent, state, this, this);
agent.setGameAgentListener(board);
currentBoard = board;
}
public void boardClick(View v){
currentBoard.click(v);
}
/**
* use only for choosing game row
*/
@Override
public void onClick(View v) {
switchLayout(gameLayout);
initGameFromState(((MyTableRow) v).getState());
}
}
| |
// Copyright 2000-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.codeInspection.ex;
import com.intellij.application.options.colors.ColorAndFontDescriptionPanel;
import com.intellij.application.options.colors.InspectionColorSettingsPage;
import com.intellij.application.options.colors.TextAttributesDescription;
import com.intellij.codeInsight.daemon.impl.HighlightInfoType;
import com.intellij.codeInsight.daemon.impl.SeverityRegistrar;
import com.intellij.codeInsight.daemon.impl.SeverityUtil;
import com.intellij.codeInspection.InspectionsBundle;
import com.intellij.ide.DataManager;
import com.intellij.lang.annotation.HighlightSeverity;
import com.intellij.openapi.actionSystem.DataContext;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.editor.colors.CodeInsightColors;
import com.intellij.openapi.editor.colors.TextAttributesKey;
import com.intellij.openapi.editor.markup.TextAttributes;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.ui.DialogWrapper;
import com.intellij.openapi.ui.InputValidator;
import com.intellij.openapi.ui.Messages;
import com.intellij.openapi.util.Comparing;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.profile.codeInspection.ui.SingleInspectionProfilePanel;
import com.intellij.ui.*;
import com.intellij.ui.components.JBList;
import com.intellij.util.Consumer;
import com.intellij.util.ui.JBUI;
import org.jdom.Element;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import javax.swing.event.ListSelectionEvent;
import javax.swing.event.ListSelectionListener;
import java.awt.*;
import java.util.List;
import java.util.*;
import static com.intellij.application.options.colors.ColorAndFontOptions.selectOrEditColor;
import static com.intellij.codeInsight.daemon.impl.SeverityRegistrar.SeverityBasedTextAttributes;
public class SeverityEditorDialog extends DialogWrapper {
private static final Logger LOG = Logger.getInstance(SeverityEditorDialog.class);
private final JPanel myPanel;
private final JList<SeverityBasedTextAttributes> myOptionsList = new JBList<>();
private final ColorAndFontDescriptionPanel myOptionsPanel = new ColorAndFontDescriptionPanel();
private SeverityBasedTextAttributes myCurrentSelection;
private final SeverityRegistrar mySeverityRegistrar;
private final boolean myCloseDialogWhenSettingsShown;
private final CardLayout myCard;
private final JPanel myRightPanel;
@NonNls private static final String DEFAULT = "DEFAULT";
@NonNls private static final String EDITABLE = "EDITABLE";
public static void show(@NotNull Project project,
@Nullable HighlightSeverity selectedSeverity,
@NotNull SeverityRegistrar severityRegistrar,
boolean closeDialogWhenSettingsShown,
@Nullable Consumer<? super HighlightSeverity> chosenSeverityCallback) {
final SeverityEditorDialog dialog = new SeverityEditorDialog(project, selectedSeverity, severityRegistrar, closeDialogWhenSettingsShown);
if (dialog.showAndGet()) {
final HighlightInfoType type = dialog.getSelectedType();
if (type != null) {
final HighlightSeverity severity = type.getSeverity(null);
if (chosenSeverityCallback != null) {
chosenSeverityCallback.consume(severity);
}
}
}
}
private SeverityEditorDialog(@NotNull Project project,
@Nullable HighlightSeverity selectedSeverity,
@NotNull SeverityRegistrar severityRegistrar,
boolean closeDialogWhenSettingsShown) {
super(project, true);
mySeverityRegistrar = severityRegistrar;
myCloseDialogWhenSettingsShown = closeDialogWhenSettingsShown;
myOptionsList.setCellRenderer(new DefaultListCellRenderer() {
@Override
public Component getListCellRendererComponent(JList list, Object value, int index, boolean isSelected, boolean cellHasFocus) {
final Component rendererComponent = super.getListCellRendererComponent(list, value, index, isSelected, cellHasFocus);
setText(SingleInspectionProfilePanel.renderSeverity(((SeverityBasedTextAttributes)value).getSeverity()));
return rendererComponent;
}
});
myOptionsList.addListSelectionListener(new ListSelectionListener() {
@Override
public void valueChanged(ListSelectionEvent e) {
if (myCurrentSelection != null) {
apply(myCurrentSelection);
}
myCurrentSelection = myOptionsList.getSelectedValue();
if (myCurrentSelection != null) {
reset(myCurrentSelection);
myCard.show(myRightPanel, SeverityRegistrar.isDefaultSeverity(myCurrentSelection.getSeverity()) ? DEFAULT : EDITABLE);
}
}
});
TreeUIHelper.getInstance().installListSpeedSearch(myOptionsList, attrs -> attrs.getSeverity().getName());
myOptionsList.setSelectionMode(ListSelectionModel.SINGLE_SELECTION);
JPanel leftPanel = ToolbarDecorator.createDecorator(myOptionsList)
.setAddAction(new AnActionButtonRunnable() {
@Override
public void run(AnActionButton button) {
final String name = Messages.showInputDialog(myPanel, InspectionsBundle.message("highlight.severity.create.dialog.name.label"),
InspectionsBundle.message("highlight.severity.create.dialog.title"),
Messages.getQuestionIcon(),
"", new InputValidator() {
@Override
public boolean checkInput(final String inputString) {
return checkNameExist(inputString);
}
@Override
public boolean canClose(final String inputString) {
return checkInput(inputString);
}
});
if (name == null) return;
SeverityBasedTextAttributes newSeverityBasedTextAttributes = createSeverity(name,
CodeInsightColors.WARNINGS_ATTRIBUTES.getDefaultAttributes());
((DefaultListModel<SeverityBasedTextAttributes>)myOptionsList.getModel()).addElement(newSeverityBasedTextAttributes);
select(newSeverityBasedTextAttributes);
}
}).setMoveUpAction(button -> {
apply(myCurrentSelection);
ListUtil.moveSelectedItemsUp(myOptionsList);
}).setMoveDownAction(button -> {
apply(myCurrentSelection);
ListUtil.moveSelectedItemsDown(myOptionsList);
}).setEditAction(new AnActionButtonRunnable() {
@Override
public void run(AnActionButton button) {
String oldName = myCurrentSelection.getSeverity().getName();
String newName = Messages.showInputDialog(myPanel, InspectionsBundle.message("highlight.severity.create.dialog.name.label"), "Edit Severity Name", null, oldName, new InputValidator() {
@Override
public boolean checkInput(String inputString) {
return checkNameExist(inputString);
}
@Override
public boolean canClose(String inputString) {
return checkInput(inputString);
}
});
if (newName != null && !oldName.equals(newName)) {
SeverityBasedTextAttributes newSeverityBasedTextAttributes = createSeverity(newName, myCurrentSelection.getAttributes());
int index = myOptionsList.getSelectedIndex();
((DefaultListModel<SeverityBasedTextAttributes>)myOptionsList.getModel()).set(index, newSeverityBasedTextAttributes);
select(newSeverityBasedTextAttributes);
}
}
}).setEditActionUpdater(e -> myCurrentSelection != null && !SeverityRegistrar.isDefaultSeverity(myCurrentSelection.getSeverity())).setEditActionName("Rename").createPanel();
ToolbarDecorator.findRemoveButton(leftPanel).addCustomUpdater(
e -> !SeverityRegistrar.isDefaultSeverity(myOptionsList.getSelectedValue().getSeverity()));
ToolbarDecorator.findUpButton(leftPanel).addCustomUpdater(e -> {
boolean canMove = ListUtil.canMoveSelectedItemsUp(myOptionsList);
if (canMove) {
SeverityBasedTextAttributes pair =
myOptionsList.getSelectedValue();
if (pair != null && SeverityRegistrar.isDefaultSeverity(pair.getSeverity())) {
final int newPosition = myOptionsList.getSelectedIndex() - 1;
pair = myOptionsList.getModel().getElementAt(newPosition);
if (SeverityRegistrar.isDefaultSeverity(pair.getSeverity())) {
canMove = false;
}
}
}
return canMove;
});
ToolbarDecorator.findDownButton(leftPanel).addCustomUpdater(e -> {
boolean canMove = ListUtil.canMoveSelectedItemsDown(myOptionsList);
if (canMove) {
SeverityBasedTextAttributes pair =
myOptionsList.getSelectedValue();
if (pair != null && SeverityRegistrar.isDefaultSeverity(pair.getSeverity())) {
final int newPosition = myOptionsList.getSelectedIndex() + 1;
pair = myOptionsList.getModel().getElementAt(newPosition);
if (SeverityRegistrar.isDefaultSeverity(pair.getSeverity())) {
canMove = false;
}
}
}
return canMove;
});
myPanel = new JPanel(new BorderLayout());
myPanel.add(leftPanel, BorderLayout.CENTER);
myCard = new CardLayout();
myRightPanel = new JPanel(myCard);
final JPanel disabled = new JPanel(new GridBagLayout());
final JButton button = new JButton(InspectionsBundle.message("severities.default.settings.message"));
button.addActionListener(e -> editColorsAndFonts());
disabled.add(button,
new GridBagConstraints(0, 0, 1, 1, 0, 0, GridBagConstraints.CENTER, GridBagConstraints.NONE, JBUI.emptyInsets(), 0,
0));
myRightPanel.add(DEFAULT, disabled);
myRightPanel.add(EDITABLE, myOptionsPanel);
myCard.show(myRightPanel, EDITABLE);
myPanel.add(myRightPanel, BorderLayout.EAST);
fillList(selectedSeverity);
init();
setTitle(InspectionsBundle.message("severities.editor.dialog.title"));
reset(myOptionsList.getSelectedValue());
}
@NotNull
public SeverityBasedTextAttributes createSeverity(@NotNull String name, @NotNull TextAttributes parent) {
HighlightInfoType.HighlightInfoTypeImpl info = new HighlightInfoType.HighlightInfoTypeImpl(new HighlightSeverity(name, 50),
TextAttributesKey
.createTextAttributesKey(name));
return new SeverityBasedTextAttributes(parent.clone(), info);
}
public void select(SeverityBasedTextAttributes newSeverityBasedTextAttributes) {
myOptionsList.clearSelection();
ScrollingUtil.selectItem(myOptionsList, newSeverityBasedTextAttributes);
}
private boolean checkNameExist(@NotNull String newName) {
if (StringUtil.isEmpty(newName)) return false;
final ListModel listModel = myOptionsList.getModel();
for (int i = 0; i < listModel.getSize(); i++) {
final String severityName = ((SeverityBasedTextAttributes)listModel.getElementAt(i)).getSeverity().myName;
if (Comparing.strEqual(severityName, newName, false)) return false;
}
return true;
}
private void editColorsAndFonts() {
final String toConfigure = Objects.requireNonNull(getSelectedType()).getSeverity(null).myName;
if (myCloseDialogWhenSettingsShown) {
doOKAction();
}
myOptionsList.clearSelection();
final DataContext dataContext = DataManager.getInstance().getDataContext(myPanel);
selectOrEditColor(dataContext, toConfigure, InspectionColorSettingsPage.class);
}
private void fillList(final @Nullable HighlightSeverity severity) {
DefaultListModel<SeverityBasedTextAttributes> model = new DefaultListModel<>();
final List<SeverityBasedTextAttributes> infoTypes =
new ArrayList<>(SeverityUtil.getRegisteredHighlightingInfoTypes(mySeverityRegistrar));
SeverityBasedTextAttributes preselection = null;
for (SeverityBasedTextAttributes type : infoTypes) {
model.addElement(type);
if (type.getSeverity().equals(severity)) {
preselection = type;
}
}
if (preselection == null && !infoTypes.isEmpty()) {
preselection = infoTypes.get(0);
}
myOptionsList.setModel(model);
myOptionsList.setSelectedValue(preselection, true);
}
private void apply(SeverityBasedTextAttributes info) {
if (info == null) {
return;
}
MyTextAttributesDescription description = new MyTextAttributesDescription(info.getType().toString(), null, new TextAttributes(), info.getType().getAttributesKey());
myOptionsPanel.apply(description, null);
Element textAttributes = new Element("temp");
try {
description.getTextAttributes().writeExternal(textAttributes);
info.getAttributes().readExternal(textAttributes);
}
catch (Exception e) {
LOG.error(e);
}
}
private void reset(SeverityBasedTextAttributes info) {
if (info == null) {
return;
}
final MyTextAttributesDescription description =
new MyTextAttributesDescription(info.getType().toString(), null, info.getAttributes(), info.getType().getAttributesKey());
@NonNls Element textAttributes = new Element("temp");
try {
info.getAttributes().writeExternal(textAttributes);
description.getTextAttributes().readExternal(textAttributes);
}
catch (Exception e) {
LOG.error(e);
}
myOptionsPanel.reset(description);
}
@Override
protected void doOKAction() {
apply(myOptionsList.getSelectedValue());
final Collection<SeverityBasedTextAttributes> infoTypes =
new HashSet<>(SeverityUtil.getRegisteredHighlightingInfoTypes(mySeverityRegistrar));
final ListModel listModel = myOptionsList.getModel();
final List<HighlightSeverity> order = new ArrayList<>();
for (int i = listModel.getSize() - 1; i >= 0; i--) {
SeverityBasedTextAttributes info = (SeverityBasedTextAttributes)listModel.getElementAt(i);
order.add(info.getSeverity());
if (!SeverityRegistrar.isDefaultSeverity(info.getSeverity())) {
infoTypes.remove(info);
final Color stripeColor = info.getAttributes().getErrorStripeColor();
final boolean exists = mySeverityRegistrar.getSeverity(info.getSeverity().getName()) != null;
if (exists) {
info.getType().getAttributesKey().getDefaultAttributes().setErrorStripeColor(stripeColor);
} else {
HighlightInfoType.HighlightInfoTypeImpl type = info.getType();
TextAttributesKey key = type.getAttributesKey();
final TextAttributes defaultAttributes = key.getDefaultAttributes().clone();
defaultAttributes.setErrorStripeColor(stripeColor);
key = TextAttributesKey.createTextAttributesKey(key.getExternalName(), defaultAttributes);
type = new HighlightInfoType.HighlightInfoTypeImpl(type.getSeverity(null), key);
info = new SeverityBasedTextAttributes(info.getAttributes(), type);
}
mySeverityRegistrar.registerSeverity(info, stripeColor != null ? stripeColor : LightColors.YELLOW);
}
}
for (SeverityBasedTextAttributes info : infoTypes) {
mySeverityRegistrar.unregisterSeverity(info.getSeverity());
}
mySeverityRegistrar.setOrder(order);
super.doOKAction();
}
@Override
@Nullable
protected JComponent createCenterPanel() {
return myPanel;
}
@Nullable
public HighlightInfoType getSelectedType() {
final SeverityBasedTextAttributes selection = myOptionsList.getSelectedValue();
return selection != null ? selection.getType() : null;
}
private static class MyTextAttributesDescription extends TextAttributesDescription {
MyTextAttributesDescription(final String name,
final String group,
final TextAttributes attributes,
final TextAttributesKey type) {
super(name, group, attributes, type, null, null, null);
}
@Override
public boolean isErrorStripeEnabled() {
return true;
}
@Override
public TextAttributes getTextAttributes() {
return super.getTextAttributes();
}
}
}
| |
/**
* Copyright 2013 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.lipstick.model.operators;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import javax.persistence.CascadeType;
import javax.persistence.CollectionTable;
import javax.persistence.ElementCollection;
import javax.persistence.Embeddable;
import javax.persistence.Entity;
import javax.persistence.GeneratedValue;
import javax.persistence.Id;
import javax.persistence.Inheritance;
import javax.persistence.InheritanceType;
import javax.persistence.Lob;
import javax.persistence.OneToMany;
import javax.persistence.Transient;
import org.apache.pig.parser.ParserException;
import org.codehaus.jackson.annotate.JsonTypeInfo;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.netflix.lipstick.model.Utils;
import com.netflix.lipstick.model.operators.elements.JoinExpression;
import com.netflix.lipstick.model.operators.elements.SchemaElement;
/**
* Base Lipstick model object for logical operators.
*
* @author jmagnusson
*
*/
@Entity
@Inheritance(strategy = InheritanceType.SINGLE_TABLE)
@JsonTypeInfo(use = JsonTypeInfo.Id.CLASS, include = JsonTypeInfo.As.PROPERTY, property = "@class")
public class P2jLogicalRelationalOperator {
@Embeddable
public static class Join {
private Map<String, JoinExpression> expression;
private String strategy;
private String type;
/**
* Creates a default Join object.
*/
public Join() {
}
/**
* Creates a Join object with the given strategy, type, and expression map.
*
* @param strategy
* @param type
* @param expression
*/
public Join(String strategy, String type, Map<String, List<String>> expression) {
this.strategy = strategy;
this.type = type;
this.expression = Maps.newHashMap();
for (Entry<String, List<String>> e : expression.entrySet()) {
this.expression.put(e.getKey(), new JoinExpression(e.getValue()));
}
}
@OneToMany(cascade = CascadeType.ALL)
public Map<String, JoinExpression> getExpression() {
return expression;
}
public void setExpression(Map<String, JoinExpression> expression) {
this.expression = expression;
}
public String getStrategy() {
return strategy;
}
public void setStrategy(String strategy) {
this.strategy = strategy;
}
public String getType() {
return type;
}
public void setType(String type) {
this.type = type;
}
}
@Embeddable
public static class Location {
private String filename = null;
private Integer line = null;
private List<String> macro = null;
/**
* Creates a default Location object.
*/
public Location() {
}
/**
* Creates a Location object with the given line, filename, and macro information.
*
* @param line
* @param filename
* @param macro
*/
public Location(Integer line, String filename, List<String> macro) {
this.line = line;
this.filename = filename;
this.macro = macro;
}
public String getFilename() {
return filename;
}
public void setFilename(String filename) {
this.filename = filename;
}
public Integer getLine() {
return line;
}
public void setLine(Integer line) {
this.line = line;
}
@ElementCollection
@CollectionTable(name = "StringCollection")
public List<String> getMacro() {
return macro;
}
public void setMacro(List<String> macro) {
this.macro = macro;
}
}
@Embeddable
public static class MRStage {
private String jobId = null;
private String stepType = null;
/**
* Creates a default MRStage object.
*/
public MRStage() {
}
/**
* Creates a MRStage object with the given jobId and stepType.
*
* @param jobId
* @param stepType
*/
public MRStage(String jobId, String stepType) {
this.jobId = jobId;
this.stepType = stepType;
}
public String getJobId() {
return jobId;
}
public void setJobId(String jobId) {
this.jobId = jobId;
}
public String getStepType() {
return stepType;
}
public void setStepType(String stepType) {
this.stepType = stepType;
}
}
private String alias;
private long id;
private Location location;
private MRStage mapReduce;
private String operator;
private List<String> predecessors;
private List<SchemaElement> schema;
private String schemaString;
private List<String> successors;
private String uid;
public String getAlias() {
return alias;
}
@Id
@GeneratedValue
public long getId() {
return id;
}
public Location getLocation() {
return location;
}
public MRStage getMapReduce() {
return mapReduce;
}
public String getOperator() {
return operator;
}
@ElementCollection
public List<String> getPredecessors() {
return predecessors;
}
@Transient
public List<SchemaElement> getSchema() {
return schema;
}
@Lob
public String getSchemaString() {
return schemaString;
}
@ElementCollection
public List<String> getSuccessors() {
return successors;
}
public String getUid() {
return uid;
}
public void setAlias(String alias) {
this.alias = alias;
}
public void setId(long id) {
this.id = id;
}
/**
* Creates a Location object from the line, filename, and macro. Assigns it as the
* P2jLogicalRelationalOperator's location field.
*
* @param line
* @param filename
* @param macro
*/
public void setLocation(int line, String filename, List<String> macro) {
setLocation(new Location(line, filename, macro));
}
public void setLocation(Location location) {
this.location = location;
}
public void setMapReduce(MRStage mapReduce) {
this.mapReduce = mapReduce;
}
/**
* Creates a MRStage object from the jobId and stepType. Assigns it as the
* P2jLogicalRelationalOperator's mapReduce field.
*
* @param jobId
* @param stepType
*/
public void setMapReduce(String jobId, String stepType) {
setMapReduce(new MRStage(jobId, stepType));
}
public void setOperator(String operator) {
this.operator = operator;
}
public void setPredecessors(List<String> predecessors) {
this.predecessors = predecessors;
}
/**
* Sets the P2jLogicalRelationalOperator's schema to the passed in schema,
* or to any empty list if the passed in schema is null.
*
* @param schema
*/
public void setSchema(List<SchemaElement> schema) {
if (schema == null) {
schema = Lists.newArrayList();
}
this.schema = schema;
}
/**
* Sets the P2jLogicalRelationalOperator's schemaString.
*
* @param schemaString
*/
public void setSchemaString(String schemaString) {
if (schemaString != null) {
try {
setSchema(Utils.processSchema(schemaString));
} catch (ParserException e) {
e.printStackTrace();
}
this.schemaString = schemaString.replace(".", "_");
} else {
setSchema(null);
}
}
public void setSuccessors(List<String> successors) {
this.successors = successors;
}
public void setUid(String uid) {
this.uid = uid;
}
}
| |
package cz.metacentrum.perun.webgui.tabs.groupstabs;
import com.google.gwt.core.client.JavaScriptObject;
import com.google.gwt.event.dom.client.ClickEvent;
import com.google.gwt.event.dom.client.ClickHandler;
import com.google.gwt.resources.client.ImageResource;
import com.google.gwt.user.cellview.client.CellTable;
import com.google.gwt.user.client.ui.*;
import cz.metacentrum.perun.webgui.client.PerunWebSession;
import cz.metacentrum.perun.webgui.client.UiElements;
import cz.metacentrum.perun.webgui.client.localization.ButtonTranslation;
import cz.metacentrum.perun.webgui.client.mainmenu.MainMenu;
import cz.metacentrum.perun.webgui.client.resources.*;
import cz.metacentrum.perun.webgui.json.GetEntityById;
import cz.metacentrum.perun.webgui.json.JsonCallbackEvents;
import cz.metacentrum.perun.webgui.json.JsonUtils;
import cz.metacentrum.perun.webgui.json.extSourcesManager.GetGroupExtSources;
import cz.metacentrum.perun.webgui.json.extSourcesManager.GetVoExtSources;
import cz.metacentrum.perun.webgui.json.extSourcesManager.RemoveExtSource;
import cz.metacentrum.perun.webgui.model.ExtSource;
import cz.metacentrum.perun.webgui.model.Group;
import cz.metacentrum.perun.webgui.model.VirtualOrganization;
import cz.metacentrum.perun.webgui.tabs.*;
import cz.metacentrum.perun.webgui.tabs.vostabs.AddVoExtSourceTabItem;
import cz.metacentrum.perun.webgui.widgets.CustomButton;
import cz.metacentrum.perun.webgui.widgets.ExtendedSuggestBox;
import cz.metacentrum.perun.webgui.widgets.TabMenu;
import java.util.ArrayList;
import java.util.Map;
/**
* Group ext. sources management page
*
* @author Pavel Zlamal <256627@mail.muni.cz>
* @author Vaclav Mach <374430@mail.muni.cz>
*/
public class GroupExtSourcesTabItem implements TabItem, TabItemWithUrl {
/**
* Perun web session
*/
private PerunWebSession session = PerunWebSession.getInstance();
/**
* Content widget - should be simple panel
*/
private SimplePanel contentWidget = new SimplePanel();
/**
* Title widget
*/
private Label titleWidget = new Label("Loading group ext sources");
// data
private Group group;
private int groupId;
private int voId;
/**
* Creates a tab instance
*
* @param group
*/
public GroupExtSourcesTabItem(Group group){
this.group = group;
this.groupId = group.getId();
this.voId = group.getVoId();
}
/**
* Creates a tab instance
*
* @param groupId
*/
public GroupExtSourcesTabItem(int groupId){
this.groupId = groupId;
JsonCallbackEvents events = new JsonCallbackEvents(){
public void onFinished(JavaScriptObject jso) {
group = jso.cast();
}
};
new GetEntityById(PerunEntity.GROUP, groupId, events).retrieveData();
}
public boolean isPrepared(){
return !(group == null);
}
@Override
public boolean isRefreshParentOnClose() {
return false;
}
@Override
public void onClose() {
}
public Widget draw() {
this.titleWidget.setText(Utils.getStrippedStringWithEllipsis(group.getName())+": "+"ext sources");
// main panel
VerticalPanel vp = new VerticalPanel();
vp.setSize("100%", "100%");
// HORIZONTAL MENU
TabMenu menu = new TabMenu();
menu.addWidget(UiElements.getRefreshButton(this));
// get VO resources
final GetGroupExtSources extSources = new GetGroupExtSources(groupId);
// refresh table event
final JsonCallbackEvents events = JsonCallbackEvents.refreshTableEvents(extSources);
// create ext source button
CustomButton addButton = TabMenu.getPredefinedButton(ButtonType.ADD, true, ButtonTranslation.INSTANCE.addExtSource(), new ClickHandler() {
public void onClick(ClickEvent event) {
session.getTabManager().addTabToCurrentTab(new AddGroupExtSourceTabItem(groupId), true);
}
});
if (session.isVoAdmin(voId)) {
menu.addWidget(addButton);
}
final CustomButton removeButton = TabMenu.getPredefinedButton(ButtonType.REMOVE, ButtonTranslation.INSTANCE.removeExtSource());
removeButton.addClickHandler(new ClickHandler() {
public void onClick(ClickEvent event) {
final ArrayList<ExtSource> extSourcesToRemove = extSources.getTableSelectedList();
String text = "Following external sources will be removed from Group. You won't be able to import members from them anymore.";
UiElements.showDeleteConfirm(extSourcesToRemove, text, new ClickHandler() {
@Override
public void onClick(ClickEvent clickEvent) {
// TODO - SHOULD HAVE ONLY ONE CALLBACK TO CORE !!
for (int i=0; i<extSourcesToRemove.size(); i++) {
RemoveExtSource request;
if (i == extSourcesToRemove.size()-1) {
request = new RemoveExtSource(JsonCallbackEvents.disableButtonEvents(removeButton, events));
} else {
request = new RemoveExtSource(JsonCallbackEvents.disableButtonEvents(removeButton));
}
request.removeGroupExtSource(groupId, extSourcesToRemove.get(i).getId());
}
}
});
}
});
if (session.isVoAdmin(voId)) {
menu.addWidget(removeButton);
}
// authorization - enable buttons for vo admin only.
if (!session.isVoAdmin(voId)) {
addButton.setEnabled(false);
removeButton.setEnabled(false);
extSources.setCheckable(false);
}
menu.addFilterWidget(new ExtendedSuggestBox(extSources.getOracle()), new PerunSearchEvent() {
@Override
public void searchFor(String text) {
extSources.filterTable(text);
}
}, "Filter external sources by name or type");
// add menu to the main panel
vp.add(menu);
vp.setCellHeight(menu, "30px");
CellTable<ExtSource> table = extSources.getTable();
if (session.isVoAdmin(voId)) {
removeButton.setEnabled(false);
JsonUtils.addTableManagedButton(extSources, table, removeButton);
}
table.addStyleName("perun-table");
table.setWidth("100%");
ScrollPanel sp = new ScrollPanel(table);
sp.addStyleName("perun-tableScrollPanel");
vp.add(sp);
session.getUiElements().resizePerunTable(sp, 350, this);
this.contentWidget.setWidget(vp);
return getWidget();
}
public Widget getWidget() {
return this.contentWidget;
}
public Widget getTitle() {
return this.titleWidget;
}
public ImageResource getIcon() {
return SmallIcons.INSTANCE.worldIcon();
}
@Override
public int hashCode() {
final int prime = 1601;
int result = 1;
result = prime * result + groupId;
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
GroupExtSourcesTabItem other = (GroupExtSourcesTabItem) obj;
if (groupId != other.groupId)
return false;
return true;
}
public boolean multipleInstancesEnabled() {
return false;
}
public void open() {
session.getUiElements().getMenu().openMenu(MainMenu.GROUP_ADMIN);
session.getUiElements().getBreadcrumbs().setLocation(group, "External sources", getUrlWithParameters());
if(group != null){
session.setActiveGroup(group);
return;
}
session.setActiveGroupId(groupId);
}
public boolean isAuthorized() {
if (session.isVoAdmin(voId) || session.isVoObserver(voId) || session.isGroupAdmin(groupId)) {
return true;
} else {
return false;
}
}
public final static String URL = "ext-sources";
public String getUrl()
{
return URL;
}
public String getUrlWithParameters() {
return GroupsTabs.URL + UrlMapper.TAB_NAME_SEPARATOR + getUrl() + "?id=" + groupId;
}
static public GroupExtSourcesTabItem load(Map<String, String> parameters) {
int voId = Integer.parseInt(parameters.get("id"));
return new GroupExtSourcesTabItem(voId);
}
}
| |
//========================================================================
//
//File: ModelMergeTests.java
//
//Copyright 2005-2014 Mentor Graphics Corporation. All rights reserved.
//
//========================================================================
// Licensed under the Apache License, Version 2.0 (the "License"); you may not
// use this file except in compliance with the License. You may obtain a copy
// of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
// License for the specific language governing permissions and limitations under
// the License.
//========================================================================
package com.mentor.nucleus.bp.model.compare.test;
import java.io.InputStream;
import java.util.List;
import org.eclipse.core.resources.IFile;
import org.eclipse.core.resources.IFileState;
import org.eclipse.core.runtime.CoreException;
import org.eclipse.core.runtime.NullProgressMonitor;
import org.eclipse.core.runtime.Path;
import org.eclipse.core.runtime.Platform;
import org.eclipse.swt.graphics.Point;
import org.eclipse.swt.widgets.TreeItem;
import org.eclipse.ui.PlatformUI;
import com.mentor.nucleus.bp.core.ActionNode_c;
import com.mentor.nucleus.bp.core.ActivityDiagramAction_c;
import com.mentor.nucleus.bp.core.ActivityEdge_c;
import com.mentor.nucleus.bp.core.ActivityNode_c;
import com.mentor.nucleus.bp.core.Association_c;
import com.mentor.nucleus.bp.core.Attribute_c;
import com.mentor.nucleus.bp.core.ClassAsAssociatedOneSide_c;
import com.mentor.nucleus.bp.core.ClassAsAssociatedOtherSide_c;
import com.mentor.nucleus.bp.core.ClassStateMachine_c;
import com.mentor.nucleus.bp.core.CorePlugin;
import com.mentor.nucleus.bp.core.ExternalEntity_c;
import com.mentor.nucleus.bp.core.InstanceStateMachine_c;
import com.mentor.nucleus.bp.core.LinkedAssociation_c;
import com.mentor.nucleus.bp.core.ModelClass_c;
import com.mentor.nucleus.bp.core.NewStateTransition_c;
import com.mentor.nucleus.bp.core.NoEventTransition_c;
import com.mentor.nucleus.bp.core.Ooaofooa;
import com.mentor.nucleus.bp.core.Operation_c;
import com.mentor.nucleus.bp.core.Package_c;
import com.mentor.nucleus.bp.core.PackageableElement_c;
import com.mentor.nucleus.bp.core.SemEvent_c;
import com.mentor.nucleus.bp.core.StateEventMatrixEntry_c;
import com.mentor.nucleus.bp.core.StateMachineEvent_c;
import com.mentor.nucleus.bp.core.StateMachineState_c;
import com.mentor.nucleus.bp.core.StateMachine_c;
import com.mentor.nucleus.bp.core.SystemModel_c;
import com.mentor.nucleus.bp.core.Transition_c;
import com.mentor.nucleus.bp.core.common.BridgePointPreferencesStore;
import com.mentor.nucleus.bp.core.common.ClassQueryInterface_c;
import com.mentor.nucleus.bp.core.common.NonRootModelElement;
import com.mentor.nucleus.bp.core.common.Transaction;
import com.mentor.nucleus.bp.core.inspector.ObjectElement;
import com.mentor.nucleus.bp.model.compare.ComparableTreeObject;
import com.mentor.nucleus.bp.model.compare.ITreeDifferencerProvider;
import com.mentor.nucleus.bp.model.compare.TreeDifference;
import com.mentor.nucleus.bp.model.compare.TreeDifferencer;
import com.mentor.nucleus.bp.model.compare.contentmergeviewer.ModelContentMergeViewer;
import com.mentor.nucleus.bp.model.compare.contentmergeviewer.SynchronizedTreeViewer;
import com.mentor.nucleus.bp.model.compare.contentmergeviewer.TextualAttributeCompareElementType;
import com.mentor.nucleus.bp.model.compare.providers.ModelCompareContentProvider;
import com.mentor.nucleus.bp.model.compare.providers.ObjectElementComparable;
import com.mentor.nucleus.bp.model.compare.providers.custom.AssociationComparable;
import com.mentor.nucleus.bp.test.TestUtil;
import com.mentor.nucleus.bp.test.common.BaseTest;
import com.mentor.nucleus.bp.test.common.CanvasTestUtils;
import com.mentor.nucleus.bp.test.common.CompareTestUtilities;
import com.mentor.nucleus.bp.test.common.GitUtil;
import com.mentor.nucleus.bp.test.common.TestingUtilities;
import com.mentor.nucleus.bp.test.common.ZipUtil;
import com.mentor.nucleus.bp.ui.canvas.Connector_c;
import com.mentor.nucleus.bp.ui.canvas.GraphicalElement_c;
import com.mentor.nucleus.bp.ui.canvas.Model_c;
import com.mentor.nucleus.bp.ui.canvas.Ooaofgraphics;
import com.mentor.nucleus.bp.ui.canvas.Shape_c;
public class ModelMergeTests extends BaseTest {
private String test_repositories = Platform.getInstanceLocation().getURL()
.getFile()
+ "/" + "test_repositories";
/*
* (non-Javadoc)
*
* @see com.mentor.nucleus.bp.test.common.BaseTest#initialSetup()
*/
@Override
protected void initialSetup() throws Exception {
CorePlugin
.getDefault()
.getPreferenceStore()
.setValue(
BridgePointPreferencesStore.ENABLE_ERROR_FOR_EMPTY_SYNCHRONOUS_MESSAGE,
false);
String test_repository_location = System
.getenv("XTUML_TEST_MODEL_REPOSITORY");
if (test_repository_location == null
|| test_repository_location.equals("")) {
// use the default location
test_repository_location = BaseTest.DEFAULT_XTUML_TEST_MODEL_REPOSITORY;
}
test_repository_location = new Path(test_repository_location)
.removeLastSegments(1).toString();
ZipUtil.unzipFileContents(
test_repository_location + "/"
+ "test_repositories"
+ "/" + "204.zip",
test_repositories);
ZipUtil.unzipFileContents(
test_repository_location + "/"
+ "test_repositories"
+ "/" + "dts0101054289.zip",
test_repositories);
}
@Override
public void setUp() throws Exception {
super.setUp();
loadProject("HierarchyTestModel");
project = getProjectHandle("HierarchyTestModel");
}
@Override
public void tearDown() throws Exception {
super.tearDown();
PlatformUI.getWorkbench().getActiveWorkbenchWindow().getActivePage()
.closeAllEditors(false);
}
public void testMergeOfTransitionNoOrphanedTransitions() throws Exception {
String projectName = "dts0101042909";
// import git repository from models repo
GitUtil.loadRepository(test_repositories
+ "/" + projectName);
// import test project
GitUtil.loadProject(projectName, projectName);
// merge the test branch
GitUtil.mergeBranch("slave", projectName);
// start the merge tool
GitUtil.startMergeTool(projectName);
// perform test
CompareTestUtilities.copyAllNonConflictingChangesFromRightToLeft();
CompareTestUtilities.copyConflictingChangesFromRightToLeft();
// validate
assertTrue("Found conflicting changes remaining.", CompareTestUtilities
.getConflictingChanges().size() == 0);
assertTrue("Found incoming changes remaining.", CompareTestUtilities
.getIncomingChanges().size() == 0);
// save and close
CompareTestUtilities.flushMergeEditor();
PlatformUI.getWorkbench().getActiveWorkbenchWindow().getActivePage()
.closeAllEditors(false);
// validate
// Look for orphaned transitions
Ooaofooa[] roots = Ooaofooa.getInstancesUnderSystem(projectName);
for (Ooaofooa root : roots) {
Transition_c[] transitions = Transition_c.TransitionInstances(root);
for (Transition_c transition : transitions) {
// make sure there is a state and event associated
// unless it is a no event transition
NoEventTransition_c net = NoEventTransition_c
.getOneSM_NETXNOnR507(transition);
if (net == null) {
NewStateTransition_c nst = NewStateTransition_c
.getOneSM_NSTXNOnR507(transition);
if (nst != null) {
StateMachineState_c state = StateMachineState_c
.getOneSM_STATEOnR503(StateEventMatrixEntry_c
.getOneSM_SEMEOnR504(nst));
assertNotNull(
"Transition after merge did not have a destination state.",
state);
StateMachineEvent_c event = StateMachineEvent_c
.getOneSM_EVTOnR525(SemEvent_c
.getOneSM_SEVTOnR503(StateEventMatrixEntry_c
.getOneSM_SEMEOnR504(nst)));
assertNotNull(
"Transition after merge did not have an event assigned.",
event);
}
}
}
}
// delete test project if no failures/errors
// and reset the repository
TestUtil.deleteProject(getProjectHandle(projectName));
}
public void testNoGraphicalElementCopiedWithoutSemanticCopy()
throws Exception {
String projectName = "dts0101042915";
// import git repository from models repo
GitUtil.loadRepository(test_repositories
+ "/" + projectName);
// import test project
GitUtil.loadProject(projectName, projectName);
// merge the test branch
GitUtil.mergeBranch("slave", projectName);
// start the merge tool
GitUtil.startMergeTool(projectName);
// perform test
CompareTestUtilities.copyAllNonConflictingChangesFromRightToLeft();
// validate
assertTrue("Found conflicting changes remaining.", CompareTestUtilities
.getConflictingChanges().size() == 1);
assertTrue("Found incoming changes remaining.", CompareTestUtilities
.getIncomingChanges().size() == 0);
// save and close
CompareTestUtilities.flushMergeEditor();
PlatformUI.getWorkbench().getActiveWorkbenchWindow().getActivePage()
.closeAllEditors(false);
// validate
BaseTest.dispatchEvents(0);
// Look for orphaned graphics
Ooaofooa[] roots = Ooaofooa.getInstancesUnderSystem(projectName);
for (Ooaofooa root : roots) {
Ooaofgraphics graphicsRoot = Ooaofgraphics
.getInstance(root.getId());
GraphicalElement_c[] elements = GraphicalElement_c
.GraphicalElementInstances(graphicsRoot);
for (GraphicalElement_c element : elements) {
// make sure no graphical elements exist that have no
// represented semantic
assertTrue(
"Found an orphaned graphical element after merging.",
element.getRepresents() != null);
}
}
// delete test project if no failures/errors
// and reset the repository
TestUtil.deleteProject(getProjectHandle(projectName));
}
public void testGraphicalElementDifferencesOnlyCausesDirtyEditor() {
String projectName = "dts0101054289";
// import git repository from models repo
GitUtil.loadRepository(test_repositories
+ "/" + projectName);
// import test project
GitUtil.loadProject(projectName, projectName);
// merge the test branch
GitUtil.mergeBranch("slave", projectName);
// start the merge tool
GitUtil.startMergeTool(projectName);
while(PlatformUI.getWorkbench().getDisplay().readAndDispatch());
// check that the merge tool is dirty
ModelContentMergeViewer viewer = ModelContentMergeViewer
.getInstance(null);
assertTrue("Graphical changes only did not dirty the editor on open.",
viewer.internalIsLeftDirty());
PlatformUI.getWorkbench().getActiveWorkbenchWindow().getActivePage()
.closeAllEditors(false);
TestUtil.deleteProject(getProjectHandle(projectName));
}
public void testMergeWithStateMachineAddedInSeparateBranches()
throws Exception {
String projectName = "dts0101009925";
// import git repository from models repo
GitUtil.loadRepository(test_repositories
+ "/" + projectName);
// import test project
GitUtil.loadProject(projectName, projectName);
// merge the test branch
GitUtil.mergeBranch("slave", projectName);
// start the merge tool
GitUtil.startMergeTool(projectName);
// perform test
CompareTestUtilities.copyAllNonConflictingChangesFromRightToLeft();
// validate
assertTrue("Found conflicting changes remaining.", CompareTestUtilities
.getConflictingChanges().size() == 0);
assertTrue("Found incoming changes remaining.", CompareTestUtilities
.getIncomingChanges().size() == 0);
// save and close
CompareTestUtilities.flushMergeEditor(false);
// switch to next editor and copy all changes
GitUtil.switchToFile("InstanceStateMachine.xtuml");
CompareTestUtilities.copyAllNonConflictingChangesFromRightToLeft();
// validate
assertTrue("Found conflicting changes remaining.", CompareTestUtilities
.getConflictingChanges().size() == 0);
assertTrue("Found incoming changes remaining.", CompareTestUtilities
.getIncomingChanges().size() == 0);
// save and close
CompareTestUtilities.flushMergeEditor();
PlatformUI.getWorkbench().getActiveWorkbenchWindow().getActivePage()
.closeAllEditors(false);
// validate
BaseTest.dispatchEvents(0);
// Verify that four states exist in each state machine
SystemModel_c system = getSystemModel(projectName);
// just need to account for 8 states and 4 transitions
// and as an extra check make sure there are only one ISM
// and ASM in the test package
Package_c pkg = Package_c.getOneEP_PKGOnR1401(system);
assertNotNull(pkg);
InstanceStateMachine_c[] isms = InstanceStateMachine_c
.getManySM_ISMsOnR518(ModelClass_c
.getManyO_OBJsOnR8001(PackageableElement_c
.getManyPE_PEsOnR8000(pkg)));
ClassStateMachine_c[] csms = ClassStateMachine_c
.getManySM_ASMsOnR519(ModelClass_c
.getManyO_OBJsOnR8001(PackageableElement_c
.getManyPE_PEsOnR8000(pkg)));
assertTrue(
"Test data is not valid, should only be one instance state machine.",
isms.length == 1);
assertTrue(
"Test data is not valid, should only be one class state machine.",
csms.length == 1);
StateMachineState_c[] states = StateMachineState_c
.getManySM_STATEsOnR501(StateMachine_c
.getManySM_SMsOnR517(isms));
assertTrue(
"Did not find a valid number of states in the instance state machine.",
states.length == 4);
states = StateMachineState_c.getManySM_STATEsOnR501(StateMachine_c
.getManySM_SMsOnR517(csms));
assertTrue(
"Did not find a valid number of states in the class state machine.",
states.length == 4);
Transition_c[] transitions = Transition_c
.getManySM_TXNsOnR505(StateMachine_c.getManySM_SMsOnR517(isms));
assertTrue(
"Did not find a valid number of transitions in the instance state machine.",
transitions.length == 4);
transitions = Transition_c.getManySM_TXNsOnR505(StateMachine_c
.getManySM_SMsOnR517(csms));
assertTrue(
"Did not find a valid number of transitions in the class state machine.",
transitions.length == 4);
// delete test project if no failures/errors
// and reset the repository
TestUtil.deleteProject(getProjectHandle(projectName));
}
public void testNoGraphicalDataInCompareEditor() throws CoreException {
TestingUtilities.createProject("testNoGraphics");
m_sys = getSystemModel("testNoGraphics");
TestUtil.executeInTransaction(m_sys, "Newpackage", new Object[0]);
Package_c testPackage = Package_c.getOneEP_PKGOnR1401(m_sys);
TestUtil.executeInTransaction(testPackage, "setName",
new Object[] { "testNoGraphics" });
modelRoot = (Ooaofooa) testPackage.getModelRoot();
TestUtil.executeInTransaction(testPackage, "Newexternalentity",
new Object[0]);
BaseTest.dispatchEvents(0);
final ExternalEntity_c ee = ExternalEntity_c
.ExternalEntityInstance(modelRoot);
GraphicalElement_c gElem = GraphicalElement_c.getOneGD_GEOnR1(Model_c
.ModelInstances(Ooaofgraphics.getInstance(modelRoot.getId())),
new ClassQueryInterface_c() {
@Override
public boolean evaluate(Object candidate) {
return ((GraphicalElement_c) candidate).getRepresents() == ee;
}
});
CanvasTestUtils.openCanvasEditor(testPackage);
Point shapeCenter = CanvasTestUtils.getShapeCenter(Shape_c
.getOneGD_SHPOnR2(gElem));
CanvasTestUtils.doMousePress(shapeCenter.x, shapeCenter.y);
CanvasTestUtils.doMouseMove(shapeCenter.x + 100, shapeCenter.y + 100);
CanvasTestUtils
.doMouseRelease(shapeCenter.x + 100, shapeCenter.y + 100);
IFile file = ee.getFile();
IFileState[] history = file.getHistory(new NullProgressMonitor());
IFileState state = history[0];
InputStream contents = state.getContents();
IFile copy = file.getProject().getFile(file.getName());
copy.create(contents, true, new NullProgressMonitor());
CompareTestUtilities.compareElementWithLocalHistory(file, copy);
ModelContentMergeViewer viewer = ModelContentMergeViewer
.getInstance(null);
List<TreeDifference> leftDifferences = viewer.getDifferencer()
.getLeftDifferences();
for (TreeDifference difference : leftDifferences) {
if (!SynchronizedTreeViewer.differenceIsGraphical(difference)) {
fail("Difference found in editor when only graphical differences should be found.");
}
}
}
public void testAutomaticGraphicalMergeElementDeletion() throws Exception {
String projectName = "AutomaticGraphicalMerge";
// import git repository from models repo
GitUtil.loadRepository(test_repositories
+ "/" + projectName);
// import test project
GitUtil.loadProject(projectName, projectName);
// merge the test branch
GitUtil.mergeBranch("slave", projectName);
// start the merge tool
GitUtil.startMergeTool(projectName);
// copy name change
m_sys = getSystemModel(projectName);
Package_c pkg = Package_c.getOneEP_PKGOnR1401(m_sys);
ActivityDiagramAction_c an = ActivityDiagramAction_c.getOneA_GAOnR1107(
ActionNode_c.getManyA_ACTsOnR1105(ActivityNode_c
.getManyA_NsOnR8001(PackageableElement_c
.getManyPE_PEsOnR8000(pkg))),
new ClassQueryInterface_c() {
@Override
public boolean evaluate(Object candidate) {
return ((ActivityDiagramAction_c) candidate).getName()
.equals("ActionOneRenamed");
}
});
CompareTestUtilities.selectElementInTree(false, an);
CompareTestUtilities.mergeSelection();
CompareTestUtilities.flushMergeEditor();
// make sure that the connection was not removed
// as the change to remove the edge was not merged
m_sys = getSystemModel(projectName);
pkg = Package_c.getOneEP_PKGOnR1401(m_sys);
an = ActivityDiagramAction_c.getOneA_GAOnR1107(ActionNode_c
.getManyA_ACTsOnR1105(ActivityNode_c
.getManyA_NsOnR8001(PackageableElement_c
.getManyPE_PEsOnR8000(pkg))),
new ClassQueryInterface_c() {
@Override
public boolean evaluate(Object candidate) {
return ((ActivityDiagramAction_c) candidate).getName()
.equals("ActionOne");
}
});
assertNotNull(an);
ActivityEdge_c edge = ActivityEdge_c.getOneA_EOnR1104(ActivityNode_c
.getOneA_NOnR1105(ActionNode_c.getOneA_ACTOnR1107(an)));
assertNotNull(
"The edge was removed during merge, the test data is not valid.",
edge);
Ooaofgraphics graphicsRoot = Ooaofgraphics.getInstance(an
.getModelRoot().getId());
Connector_c connector = Connector_c.ConnectorInstance(graphicsRoot);
assertNotNull(
"The graphical connector was removed even though the semantic elements was not.",
connector);
GitUtil.startMergeTool(projectName);
// now copy the semantic removal
CompareTestUtilities.selectElementInTree(true, edge);
CompareTestUtilities.mergeSelection();
CompareTestUtilities.flushMergeEditor();
m_sys = getSystemModel(projectName);
pkg = Package_c.getOneEP_PKGOnR1401(m_sys);
an = ActivityDiagramAction_c.getOneA_GAOnR1107(ActionNode_c
.getManyA_ACTsOnR1105(ActivityNode_c
.getManyA_NsOnR8001(PackageableElement_c
.getManyPE_PEsOnR8000(pkg))),
new ClassQueryInterface_c() {
@Override
public boolean evaluate(Object candidate) {
return ((ActivityDiagramAction_c) candidate).getName()
.equals("ActionOneRenamed");
}
});
edge = ActivityEdge_c.getOneA_EOnR1104(ActivityNode_c
.getOneA_NOnR1105(ActionNode_c.getOneA_ACTOnR1107(an)));
assertNull(
"The edge was not removed during merge, the test data is not valid.",
edge);
connector = Connector_c.ConnectorInstance(graphicsRoot);
assertNull(
"The graphical connector was not removed even though the semantic elements was.",
connector);
// delete test project if no failures/errors
// and reset the repository
TestUtil.deleteProject(getProjectHandle(projectName));
}
public void testAutomaticGraphicalMergeElementAdded() throws Exception {
String projectName = "AutomaticGraphicalMergeAddition";
// import git repository from models repo
GitUtil.loadRepository(test_repositories
+ "/" + projectName);
// import test project
GitUtil.loadProject(projectName, projectName);
// merge the test branch
GitUtil.mergeBranch("slave", projectName);
// start the merge tool
GitUtil.startMergeTool(projectName);
// copy name change
m_sys = getSystemModel(projectName);
Package_c pkg = Package_c.getOneEP_PKGOnR1401(m_sys);
ActivityEdge_c edge = ActivityEdge_c.ActivityEdgeInstance(pkg
.getModelRoot());
CompareTestUtilities.selectElementInTree(true, edge);
CompareTestUtilities.mergeSelection();
CompareTestUtilities.flushMergeEditor();
// make sure that the connection was not added
// as the change to add the edge was not merged
m_sys = getSystemModel(projectName);
pkg = Package_c.getOneEP_PKGOnR1401(m_sys);
edge = ActivityEdge_c.ActivityEdgeInstance(pkg.getModelRoot());
assertNull(
"The edge was added during merge, the test data is not valid.",
edge);
Ooaofgraphics graphicsRoot = Ooaofgraphics.getInstance(pkg
.getModelRoot().getId());
Connector_c connector = Connector_c.ConnectorInstance(graphicsRoot);
assertNull(
"The graphical connector was added even though the semantic elements was not.",
connector);
GitUtil.startMergeTool(projectName);
// now copy the semantic addition
ModelContentMergeViewer viewer = ModelContentMergeViewer
.getInstance(null);
edge = ActivityEdge_c
.ActivityEdgeInstance(viewer.getRightCompareRoot());
CompareTestUtilities.selectElementInTree(false, edge);
CompareTestUtilities.mergeSelection();
CompareTestUtilities.flushMergeEditor();
m_sys = getSystemModel(projectName);
pkg = Package_c.getOneEP_PKGOnR1401(m_sys);
ActivityEdge_c[] edges = ActivityEdge_c.ActivityEdgeInstances(pkg
.getModelRoot());
assertTrue(
"The edge was not added during merge, the test data is not valid.",
edges.length == 2);
Connector_c[] connectors = Connector_c.ConnectorInstances(graphicsRoot);
assertTrue(
"The graphical connector was not added even though the semantic elements was.",
connectors.length == 2);
// delete test project if no failures/errors
// and reset the repository
TestUtil.deleteProject(getProjectHandle(projectName));
}
public void testAddStatesAndTransitionsNoExceptions() throws Exception {
String projectName = "dts0101009924";
// import git repository from models repo
GitUtil.loadRepository(test_repositories
+ "/" + projectName);
// import test project
GitUtil.loadProject(projectName, projectName);
// merge the test branch
GitUtil.mergeBranch("slave", projectName);
// start the merge tool
GitUtil.startMergeTool(projectName);
// just need to close now and make sure there
// were no exceptions
CompareTestUtilities.flushMergeEditor();
// delete test project if no failures/errors
// and reset the repository
TestUtil.deleteProject(getProjectHandle(projectName));
}
public void testConnectorTextDoesNotDisappear() throws Exception {
String projectName = "dts0101039702";
// import git repository from models repo
GitUtil.loadRepository(test_repositories
+ "/" + projectName);
// import test project
GitUtil.loadProject(projectName, projectName);
// switch to slave and make sure the provision and
// requirement have a valid represents
GitUtil.switchToBranch("slave", projectName);
String modelRootId = "/" + projectName + "/" + Ooaofooa.MODELS_DIRNAME
+ "/" + projectName + "/" + "Components" + "/" + "Components"
+ "." + Ooaofooa.MODELS_EXT;
Connector_c[] connectors = Connector_c.ConnectorInstances(Ooaofgraphics
.getInstance(modelRootId));
assertTrue("Could not locate connectors in the model.",
connectors.length > 0);
for (Connector_c connector : connectors) {
GraphicalElement_c ge = GraphicalElement_c
.getOneGD_GEOnR2(connector);
NonRootModelElement nrme = (NonRootModelElement) ge.getRepresents();
assertFalse("Found an orphaned connector represents value.",
nrme.isOrphaned());
}
// switch to master and make sure the text is not
// missing
GitUtil.switchToBranch("master", projectName);
connectors = Connector_c.ConnectorInstances(Ooaofgraphics
.getInstance(modelRootId));
assertTrue("Could not locate connectors in the model.",
connectors.length > 0);
for (Connector_c connector : connectors) {
GraphicalElement_c ge = GraphicalElement_c
.getOneGD_GEOnR2(connector);
NonRootModelElement nrme = (NonRootModelElement) ge.getRepresents();
assertFalse("Found an orphaned connector represents value.",
nrme.isOrphaned());
}
// delete test project if no failures/errors
// and reset the repository
TestUtil.deleteProject(getProjectHandle(projectName));
}
public void testValueModificationOfDescriptionThroughCompareDialog()
throws CoreException {
modelRoot = Ooaofooa.getInstance(Ooaofooa.createModelRootId(
getProject(), "Classes", true));
ModelClass_c clazz = ModelClass_c.ModelClassInstance(modelRoot);
Transaction transaction = startTransaction();
clazz.setDescrip("Test Description");
endTransaction(transaction);
CompareTestUtilities.openCompareEditor(clazz.getFile());
ModelContentMergeViewer viewer = ModelContentMergeViewer
.getInstance(null);
SynchronizedTreeViewer leftViewer = viewer.getLeftViewer();
ModelCompareContentProvider provider = new ModelCompareContentProvider();
Object[] children = provider.getChildren(clazz);
TreeItem item = SynchronizedTreeViewer.getMatchingItem(
((ITreeDifferencerProvider) leftViewer.getContentProvider())
.getComparableTreeObject(children[3]), leftViewer);
clazz = ModelClass_c.ModelClassInstance(viewer.getLeftCompareRoot());
ObjectElement objEle = (ObjectElement) ((ObjectElementComparable) item
.getData()).getRealElement();
TextualAttributeCompareElementType type = new TextualAttributeCompareElementType(
objEle, leftViewer, true, null, null);
type.setContent("".getBytes());
assertTrue(
"Textual compare dialog did not properly set the value for a class description.",
clazz.getDescrip().equals(""));
}
public void testAssociationComparable() {
modelRoot = Ooaofooa.getInstance(Ooaofooa.createModelRootId(
getProject(), "Classes", true));
Association_c association = Association_c.AssociationInstance(
modelRoot, new ClassQueryInterface_c() {
@Override
public boolean evaluate(Object candidate) {
return ((Association_c) candidate).getNumb() == 4;
}
});
association.Unformalize();
ClassAsAssociatedOneSide_c oneSide = ClassAsAssociatedOneSide_c
.getOneR_AONEOnR209(LinkedAssociation_c
.getManyR_ASSOCsOnR206(association));
ClassAsAssociatedOtherSide_c otherSide = ClassAsAssociatedOtherSide_c
.getOneR_AOTHOnR210(LinkedAssociation_c
.getManyR_ASSOCsOnR206(association));
AssociationComparable comparable1 = new AssociationComparable(oneSide);
AssociationComparable comparable2 = new AssociationComparable(otherSide);
assertTrue(
"A class as associated one side and a class as associated other side were considered identical.",
!comparable1.equals(comparable2));
}
public void testLocationDetection() throws CoreException {
modelRoot = Ooaofooa.getInstance(Ooaofooa.createModelRootId(
getProject(), "Classes", true));
ModelClass_c clazz = ModelClass_c.ModelClassInstance(modelRoot,
new ClassQueryInterface_c() {
@Override
public boolean evaluate(Object candidate) {
return ((ModelClass_c) candidate).getName().equals(
"Other");
}
});
Transaction transaction = startTransaction();
clazz.Newattribute();
Attribute_c one = getLastCreatedAttribute(clazz);
one.setRoot_nam("one");
clazz.Newattribute();
Attribute_c two = getLastCreatedAttribute(clazz);
two.setRoot_nam("two");
clazz.Newattribute();
Attribute_c three = getLastCreatedAttribute(clazz);
three.setRoot_nam("three");
clazz.Newoperation();
Operation_c oneOp = getLastCreatedOperation(clazz);
oneOp.setName("one");
clazz.Newoperation();
Operation_c twoOp = getLastCreatedOperation(clazz);
twoOp.setName("two");
clazz.Newoperation();
Operation_c threeOp = getLastCreatedOperation(clazz);
threeOp.setName("three");
endTransaction(transaction);
transaction = startTransaction();
two.Dispose();
twoOp.Dispose();
threeOp.Moveup();
endTransaction(transaction);
// verify all of the difference locations
CompareTestUtilities.openCompareEditor(clazz.getFile(), 0);
ModelContentMergeViewer viewer = ModelContentMergeViewer
.getInstance(null);
TreeDifferencer differencer = viewer.getDifferencer();
List<TreeDifference> leftDifferences = differencer.getLeftDifferences();
assertTrue(
"Expected four differences and found " + leftDifferences.size(),
leftDifferences.size() == 4);
validateDifference(leftDifferences.get(0));
validateDifference(leftDifferences.get(1));
validateDifference(leftDifferences.get(2));
validateDifference(leftDifferences.get(3));
viewer.setCopySelection(false);
viewer.copy(false);
while (PlatformUI.getWorkbench().getDisplay().readAndDispatch())
;
assertTrue("Not all differences were removed by the copy all button.",
viewer.getDifferencer().getLeftDifferences().size() == 0);
}
public void testIntegerValueMerge() throws Exception {
Package_c pkg = Package_c.getOneEP_PKGOnR1401(m_sys,
new ClassQueryInterface_c() {
@Override
public boolean evaluate(Object candidate) {
return ((Package_c) candidate).getName().equals(
"Classes");
}
});
final Association_c assoc = Association_c
.getOneR_RELOnR8001(PackageableElement_c
.getManyPE_PEsOnR8000(pkg));
Transaction transaction = startTransaction();
assoc.setNumb(22);
endTransaction(transaction);
CompareTestUtilities.openCompareEditor(pkg.getFile());
ModelContentMergeViewer viewer = ModelContentMergeViewer
.getInstance(null);
TreeDifferencer differencer = viewer.getDifferencer();
assertEquals("Incorrect number of differences found", differencer
.getLeftDifferences().size(), 1);
viewer.setCopySelection(false);
viewer.copy(false);
while (PlatformUI.getWorkbench().getDisplay().readAndDispatch())
;
assertTrue("Not all differences were removed by the copy all button",
viewer.getDifferencer().getLeftDifferences().size() == 0);
Association_c compareAssoc = Association_c.AssociationInstance(
viewer.getLeftCompareRoot(), new ClassQueryInterface_c() {
@Override
public boolean evaluate(Object candidate) {
return ((Association_c) candidate).getRel_id().equals(
assoc.getRel_id());
}
});
assertTrue(
"Association number was not updated by the copy difference button.",
compareAssoc.getNumb() != 22);
}
public void testCantHappenCreationOnNewState() throws Exception {
modelRoot = Ooaofooa.getInstance(Ooaofooa.createModelRootId(
getProject(), "Classes", true));
ModelClass_c clazz = ModelClass_c.ModelClassInstance(modelRoot,
new ClassQueryInterface_c() {
@Override
public boolean evaluate(Object candidate) {
return ((ModelClass_c) candidate).getName().equals(
"ModelClass");
}
});
InstanceStateMachine_c ism = InstanceStateMachine_c
.getOneSM_ISMOnR518(clazz);
Transaction transaction = startTransaction();
ism.Newstate();
StateMachineState_c[] states = StateMachineState_c
.getManySM_STATEsOnR501(StateMachine_c.getManySM_SMsOnR517(ism));
StateMachineState_c state = states[states.length - 1];
endTransaction(transaction);
transaction = startTransaction();
state.Dispose();
endTransaction(transaction);
CompareTestUtilities.openCompareEditor(ism.getFile());
ModelContentMergeViewer viewer = ModelContentMergeViewer
.getInstance(null);
TreeDifferencer differencer = viewer.getDifferencer();
assertTrue("No differences created for state addition.", differencer
.getLeftDifferences().size() != 0);
ism = InstanceStateMachine_c.InstanceStateMachineInstance(viewer
.getLeftCompareRoot());
StateMachineEvent_c[] events = StateMachineEvent_c
.getManySM_EVTsOnR502(StateMachine_c.getManySM_SMsOnR517(ism));
int existingSemeCount = 0;
for (StateMachineEvent_c event : events) {
StateEventMatrixEntry_c[] semes = StateEventMatrixEntry_c
.getManySM_SEMEsOnR503(SemEvent_c
.getManySM_SEVTsOnR525(event));
existingSemeCount = existingSemeCount + semes.length;
}
viewer.setCopySelection(false);
viewer.copy(false);
int newSemeCount = 0;
for (StateMachineEvent_c event : events) {
StateEventMatrixEntry_c[] semes = StateEventMatrixEntry_c
.getManySM_SEMEsOnR503(SemEvent_c
.getManySM_SEVTsOnR525(event));
newSemeCount = newSemeCount + semes.length;
}
assertTrue(
"Event matrix entries were not added on merge of a new state.",
newSemeCount > existingSemeCount);
}
public void testCantHappenCreationOnNewEvent() throws Exception {
modelRoot = Ooaofooa.getInstance(Ooaofooa.createModelRootId(
getProject(), "Classes", true));
ModelClass_c clazz = ModelClass_c.ModelClassInstance(modelRoot,
new ClassQueryInterface_c() {
@Override
public boolean evaluate(Object candidate) {
return ((ModelClass_c) candidate).getName().equals(
"ModelClass");
}
});
InstanceStateMachine_c ism = InstanceStateMachine_c
.getOneSM_ISMOnR518(clazz);
Transaction transaction = startTransaction();
ism.Newevent();
StateMachineEvent_c[] events = StateMachineEvent_c
.getManySM_EVTsOnR502(StateMachine_c.getManySM_SMsOnR517(ism));
StateMachineEvent_c event = events[events.length - 1];
endTransaction(transaction);
transaction = startTransaction();
event.Dispose();
endTransaction(transaction);
CompareTestUtilities.openCompareEditor(ism.getFile());
ModelContentMergeViewer viewer = ModelContentMergeViewer
.getInstance(null);
TreeDifferencer differencer = viewer.getDifferencer();
assertTrue("No differences created for state addition.", differencer
.getLeftDifferences().size() != 0);
ism = InstanceStateMachine_c.InstanceStateMachineInstance(viewer
.getLeftCompareRoot());
StateMachineState_c[] states = StateMachineState_c
.getManySM_STATEsOnR501(StateMachine_c.getManySM_SMsOnR517(ism));
int existingSemeCount = 0;
for (StateMachineState_c state : states) {
StateEventMatrixEntry_c[] semes = StateEventMatrixEntry_c
.getManySM_SEMEsOnR503(state);
existingSemeCount = existingSemeCount + semes.length;
}
viewer.setCopySelection(false);
viewer.copy(false);
int newSemeCount = 0;
for (StateMachineState_c state : states) {
StateEventMatrixEntry_c[] semes = StateEventMatrixEntry_c
.getManySM_SEMEsOnR503(state);
newSemeCount = newSemeCount + semes.length;
}
assertTrue(
"Event matrix entries were not added on merge of a new event.",
newSemeCount > existingSemeCount);
}
public void testUnassignEventFromTransitionMerge() throws Exception {
modelRoot = Ooaofooa.getInstance(Ooaofooa.createModelRootId(
getProject(), "Classes", true));
ModelClass_c clazz = ModelClass_c.ModelClassInstance(modelRoot,
new ClassQueryInterface_c() {
@Override
public boolean evaluate(Object candidate) {
return ((ModelClass_c) candidate).getName().equals(
"ModelClass");
}
});
InstanceStateMachine_c ism = InstanceStateMachine_c
.getOneSM_ISMOnR518(clazz);
Transition_c transition = Transition_c.getOneSM_TXNOnR505(
StateMachine_c.getOneSM_SMOnR517(ism),
new ClassQueryInterface_c() {
@Override
public boolean evaluate(Object candidate) {
Transition_c transition = (Transition_c) candidate;
StateEventMatrixEntry_c seme = StateEventMatrixEntry_c
.getOneSM_SEMEOnR504(NewStateTransition_c
.getOneSM_NSTXNOnR507(transition));
return seme != null;
}
});
Transaction transaction = startTransaction();
transition.Removeevent();
endTransaction(transaction);
transaction = startTransaction();
transition.Addevent(
StateMachineEvent_c.getOneSM_EVTOnR502(
StateMachine_c.getOneSM_SMOnR517(ism)).getSmevt_id(),
ism.getSm_id());
endTransaction(transaction);
CompareTestUtilities.openCompareEditor(ism.getFile());
ModelContentMergeViewer viewer = ModelContentMergeViewer
.getInstance(null);
TreeDifferencer differencer = viewer.getDifferencer();
assertTrue("No differences created for event addition.", differencer
.getLeftDifferences().size() != 0);
viewer.setCopySelection(false);
viewer.copy(false);
while (PlatformUI.getWorkbench().getDisplay().readAndDispatch())
;
assertTrue(
"Differences were not removed on copy for unassigning an event from a transition",
viewer.getDifferencer().getLeftDifferences().size() == 0);
}
public void testGitConflictAnnotationRemoval() throws Exception {
String projectName = "GitAnnotationRemovalTest";
// import git repository from models repo
GitUtil.loadRepository(test_repositories
+ "/" + projectName);
// import test project
GitUtil.loadProject(projectName, projectName);
// merge the test branch
GitUtil.mergeBranch("slave", projectName);
// start the merge tool
GitUtil.startMergeTool(projectName);
// perform test
GitUtil.switchToFile("ClassStateMachine.xtuml");
CompareTestUtilities.copyAllNonConflictingChangesFromRightToLeft();
CompareTestUtilities.flushMergeEditor(true);
m_sys = getSystemModel(projectName);
Package_c pkg = Package_c.getOneEP_PKGOnR1401(m_sys);
InstanceStateMachine_c ism = InstanceStateMachine_c
.InstanceStateMachineInstance(pkg.getModelRoot());
ClassStateMachine_c csm = ClassStateMachine_c
.ClassStateMachineInstance(pkg.getModelRoot());
ModelClass_c clazz = ModelClass_c
.ModelClassInstance(pkg.getModelRoot());
String ismContents = TestUtil.getTextFileContents(ism.getFile()
.getLocation().toFile());
String csmContents = TestUtil.getTextFileContents(csm.getFile()
.getLocation().toFile());
String classContents = TestUtil.getTextFileContents(clazz.getFile()
.getLocation().toFile());
assertTrue(
"Did not find the git annotation markers in an unviewed file.",
ismContents.contains(">>>"));
assertFalse("Found git annotation markers in a viewed and saved file.",
csmContents.contains(">>>"));
assertFalse("Found git annotation markers in a viewed file.",
classContents.contains(">>>"));
GitUtil.startMergeTool(projectName);
GitUtil.switchToFile("InstanceStateMachine.xtuml");
CompareTestUtilities.copyAllNonConflictingChangesFromRightToLeft();
CompareTestUtilities.closeMergeEditor(false);
ism = InstanceStateMachine_c.InstanceStateMachineInstance(pkg
.getModelRoot());
ismContents = TestUtil.getTextFileContents(ism.getFile().getLocation()
.toFile());
assertFalse("Found git annotation markers in an viewed file.",
ismContents.contains(">>>"));
StateMachineState_c[] states = StateMachineState_c
.getManySM_STATEsOnR501(StateMachine_c.getOneSM_SMOnR517(ism));
assertTrue(
"Changes were merged even when the merge editor was not saved.",
states.length == 2);
// delete test project if no failures/errors
// and reset the repository
TestUtil.deleteProject(getProjectHandle(projectName));
}
public void testAutocrlfOption() throws Exception {
}
private void validateDifference(TreeDifference difference) {
NonRootModelElement realElement = getRealElement((ComparableTreeObject) difference
.getMatchingDifference().getElement());
if (difference.getElement() == null
&& realElement.getName().equals("two")) {
if (realElement instanceof Attribute_c) {
assertTrue(
"Location was not correctly determined for attribute removal.",
difference.getLocation() == 6);
} else {
assertTrue(
"Location was not correctly deterined for operation removal",
difference.getLocation() == 8);
}
}
}
private NonRootModelElement getRealElement(ComparableTreeObject element) {
return (NonRootModelElement) ((ComparableTreeObject) element)
.getRealElement();
}
private Operation_c getLastCreatedOperation(ModelClass_c clazz) {
Operation_c[] ops = Operation_c.getManyO_TFRsOnR115(clazz);
return ops[ops.length - 1];
}
private Attribute_c getLastCreatedAttribute(ModelClass_c clazz) {
Attribute_c[] attrs = Attribute_c.getManyO_ATTRsOnR102(clazz);
return attrs[attrs.length - 1];
}
}
| |
/*
* Copyright (c) 2014 Haixing Hu
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.github.haixing_hu.util.junit;
import java.lang.reflect.Array;
import java.util.Collection;
import java.util.Iterator;
import org.junit.internal.ArrayComparisonFailure;
import com.github.haixing_hu.lang.ArrayUtils;
/**
* Provides some assertion functions that current version of JUnit does not
* provide.
*
* @author Haixing Hu
*/
public class Assert {
/**
* Asserts that two boolean arrays are equal. If they are not, an
* {@link AssertionError} is thrown with the given message.
*
* @param message
* the identifying message for the {@link AssertionError} (
* {@code null} okay)
* @param expecteds
* boolean array with expected values.
* @param actuals
* boolean array with actual values
*/
public static void assertArrayEquals(final String message,
final boolean[] expecteds, final boolean[] actuals)
throws ArrayComparisonFailure {
if (expecteds == actuals) {
return;
}
final String header = (message == null ? "" : message + ": ");
assertBothNonNull(header, expecteds, actuals);
assertArrayAreSameLength(header, expecteds.length, actuals.length);
for (int i = 0; i < expecteds.length; ++i) {
final boolean expected = expecteds[i];
final boolean actual = actuals[i];
try {
org.junit.Assert.assertEquals(expected, actual);
} catch (final AssertionError e) {
throw new CollectionComparisonFailure(header, e, i);
}
}
}
/**
* Asserts that two boolean arrays are equal. If they are not, an
* {@link AssertionError} is thrown.
*
* @param expecteds
* boolean array with expected values.
* @param actuals
* boolean array with actual values
*/
public static void assertArrayEquals(final boolean[] expecteds,
final boolean[] actuals) {
assertArrayEquals(null, expecteds, actuals);
}
/**
* Asserts that two byte arrays are equal. If they are not, an
* {@link AssertionError} is thrown with the given message.
*
* @param message
* the identifying message for the {@link AssertionError} (
* {@code null} okay)
* @param expecteds
* byte array with expected values.
* @param actuals
* byte array with actual values
*/
public static void assertArrayEquals(final String message,
final byte[] expecteds, final byte[] actuals)
throws ArrayComparisonFailure {
if (expecteds == actuals) {
return;
}
final String header = (message == null ? "" : message + ": ");
assertBothNonNull(header, expecteds, actuals);
assertArrayAreSameLength(header, expecteds.length, actuals.length);
for (int i = 0; i < expecteds.length; ++i) {
final byte expected = expecteds[i];
final byte actual = actuals[i];
try {
org.junit.Assert.assertEquals(expected, actual);
} catch (final AssertionError e) {
throw new CollectionComparisonFailure(header, e, i);
}
}
}
/**
* Asserts that two byte arrays are equal. If they are not, an
* {@link AssertionError} is thrown.
*
* @param expecteds
* byte array with expected values.
* @param actuals
* byte array with actual values
*/
public static void assertArrayEquals(final byte[] expecteds,
final byte[] actuals) {
assertArrayEquals(null, expecteds, actuals);
}
/**
* Asserts that two char arrays are equal. If they are not, an
* {@link AssertionError} is thrown with the given message.
*
* @param message
* the identifying message for the {@link AssertionError} (
* {@code null} okay)
* @param expecteds
* char array with expected values.
* @param actuals
* char array with actual values
*/
public static void assertArrayEquals(final String message,
final char[] expecteds, final char[] actuals)
throws ArrayComparisonFailure {
if (expecteds == actuals) {
return;
}
final String header = (message == null ? "" : message + ": ");
assertBothNonNull(header, expecteds, actuals);
assertArrayAreSameLength(header, expecteds.length, actuals.length);
for (int i = 0; i < expecteds.length; ++i) {
final char expected = expecteds[i];
final char actual = actuals[i];
try {
org.junit.Assert.assertEquals(expected, actual);
} catch (final AssertionError e) {
throw new CollectionComparisonFailure(header, e, i);
}
}
}
/**
* Asserts that two char arrays are equal. If they are not, an
* {@link AssertionError} is thrown.
*
* @param expecteds
* char array with expected values.
* @param actuals
* char array with actual values
*/
public static void assertArrayEquals(final char[] expecteds,
final char[] actuals) {
assertArrayEquals(null, expecteds, actuals);
}
/**
* Asserts that two short arrays are equal. If they are not, an
* {@link AssertionError} is thrown with the given message.
*
* @param message
* the identifying message for the {@link AssertionError} (
* {@code null} okay)
* @param expecteds
* short array with expected values.
* @param actuals
* short array with actual values
*/
public static void assertArrayEquals(final String message,
final short[] expecteds, final short[] actuals)
throws ArrayComparisonFailure {
if (expecteds == actuals) {
return;
}
final String header = (message == null ? "" : message + ": ");
assertBothNonNull(header, expecteds, actuals);
assertArrayAreSameLength(header, expecteds.length, actuals.length);
for (int i = 0; i < expecteds.length; ++i) {
final short expected = expecteds[i];
final short actual = actuals[i];
try {
org.junit.Assert.assertEquals(expected, actual);
} catch (final AssertionError e) {
throw new CollectionComparisonFailure(header, e, i);
}
}
}
/**
* Asserts that two short arrays are equal. If they are not, an
* {@link AssertionError} is thrown.
*
* @param expecteds
* short array with expected values.
* @param actuals
* short array with actual values
*/
public static void assertArrayEquals(final short[] expecteds,
final short[] actuals) {
assertArrayEquals(null, expecteds, actuals);
}
/**
* Asserts that two int arrays are equal. If they are not, an
* {@link AssertionError} is thrown with the given message.
*
* @param message
* the identifying message for the {@link AssertionError} (
* {@code null} okay)
* @param expecteds
* int array with expected values.
* @param actuals
* int array with actual values
*/
public static void assertArrayEquals(final String message,
final int[] expecteds, final int[] actuals) throws ArrayComparisonFailure {
if (expecteds == actuals) {
return;
}
final String header = (message == null ? "" : message + ": ");
assertBothNonNull(header, expecteds, actuals);
assertArrayAreSameLength(header, expecteds.length, actuals.length);
for (int i = 0; i < expecteds.length; ++i) {
final int expected = expecteds[i];
final int actual = actuals[i];
try {
org.junit.Assert.assertEquals(expected, actual);
} catch (final AssertionError e) {
throw new CollectionComparisonFailure(header, e, i);
}
}
}
/**
* Asserts that two int arrays are equal. If they are not, an
* {@link AssertionError} is thrown.
*
* @param expecteds
* int array with expected values.
* @param actuals
* int array with actual values
*/
public static void assertArrayEquals(final int[] expecteds,
final int[] actuals) {
assertArrayEquals(null, expecteds, actuals);
}
/**
* Asserts that two long arrays are equal. If they are not, an
* {@link AssertionError} is thrown with the given message.
*
* @param message
* the identifying message for the {@link AssertionError} (
* {@code null} okay)
* @param expecteds
* long array with expected values.
* @param actuals
* long array with actual values
*/
public static void assertArrayEquals(final String message,
final long[] expecteds, final long[] actuals)
throws ArrayComparisonFailure {
if (expecteds == actuals) {
return;
}
final String header = (message == null ? "" : message + ": ");
assertBothNonNull(header, expecteds, actuals);
assertArrayAreSameLength(header, expecteds.length, actuals.length);
for (int i = 0; i < expecteds.length; ++i) {
final long expected = expecteds[i];
final long actual = actuals[i];
try {
org.junit.Assert.assertEquals(expected, actual);
} catch (final AssertionError e) {
throw new CollectionComparisonFailure(header, e, i);
}
}
}
/**
* Asserts that two long arrays are equal. If they are not, an
* {@link AssertionError} is thrown.
*
* @param expecteds
* long array with expected values.
* @param actuals
* long array with actual values
*/
public static void assertArrayEquals(final long[] expecteds,
final long[] actuals) {
assertArrayEquals(null, expecteds, actuals);
}
/**
* Asserts that two double arrays are equal. If they are not, an
* {@link AssertionError} is thrown with the given message.
*
* @param message
* the identifying message for the {@link AssertionError} (
* {@code null} okay)
* @param expecteds
* double array with expected values.
* @param actuals
* double array with actual values
*/
public static void assertArrayEquals(final String message,
final double[] expecteds, final double[] actuals, final double delta)
throws ArrayComparisonFailure {
if (expecteds == actuals) {
return;
}
final String header = (message == null ? "" : message + ": ");
assertBothNonNull(header, expecteds, actuals);
assertArrayAreSameLength(header, expecteds.length, actuals.length);
for (int i = 0; i < expecteds.length; ++i) {
final double expected = expecteds[i];
final double actual = actuals[i];
try {
org.junit.Assert.assertEquals(expected, actual, delta);
} catch (final AssertionError e) {
throw new CollectionComparisonFailure(header, e, i);
}
}
}
/**
* Asserts that two double arrays are equal. If they are not, an
* {@link AssertionError} is thrown.
*
* @param expecteds
* double array with expected values.
* @param actuals
* double array with actual values
*/
public static void assertArrayEquals(final double[] expecteds,
final double[] actuals, final double delta) {
assertArrayEquals(null, expecteds, actuals, delta);
}
/**
* Asserts that two float arrays are equal. If they are not, an
* {@link AssertionError} is thrown with the given message.
*
* @param message
* the identifying message for the {@link AssertionError} (
* {@code null} okay)
* @param expecteds
* float array with expected values.
* @param actuals
* float array with actual values
*/
public static void assertArrayEquals(final String message,
final float[] expecteds, final float[] actuals, final float delta)
throws ArrayComparisonFailure {
if (expecteds == actuals) {
return;
}
final String header = (message == null ? "" : message + ": ");
assertBothNonNull(header, expecteds, actuals);
assertArrayAreSameLength(header, expecteds.length, actuals.length);
for (int i = 0; i < expecteds.length; ++i) {
final float expected = expecteds[i];
final float actual = actuals[i];
try {
org.junit.Assert.assertEquals(expected, actual, delta);
} catch (final AssertionError e) {
throw new CollectionComparisonFailure(header, e, i);
}
}
}
/**
* Asserts that two float arrays are equal. If they are not, an
* {@link AssertionError} is thrown.
*
* @param expecteds
* float array with expected values.
* @param actuals
* float array with actual values
*/
public static void assertArrayEquals(final float[] expecteds,
final float[] actuals, final float delta) {
assertArrayEquals(null, expecteds, actuals, delta);
}
/**
* Asserts that two generic type arrays are equal. If they are not, an
* {@link AssertionError} is thrown with the given message. If
* {@code expecteds} and {@code actuals} are {@code null}, they
* are considered equal.
*
* @param message
* the identifying message for the {@link AssertionError} (
* {@code null} okay)
* @param expecteds
* Generic type array with expected values.
* @param actuals
* Generic type array with actual values
*/
public static <T> void assertArrayEquals(final String message,
final T[] expecteds, final T[] actuals) throws ArrayComparisonFailure {
internalAssertArrayEquals(message, expecteds, actuals);
}
/**
* Asserts that two Class arrays are equal. If they are not, an
* {@link AssertionError} is thrown. If {@code expected} and
* {@code actual} are {@code null}, they are considered equal.
*
* @param expecteds
* Class array with expected values
* @param actuals
* Class array with actual values
*/
public static <T> void assertArrayEquals(final T[] expecteds,
final T[] actuals) {
assertArrayEquals(null, expecteds, actuals);
}
private static void internalAssertArrayEquals(final String message,
final Object expecteds, final Object actuals)
throws ArrayComparisonFailure {
if (expecteds == actuals) {
return;
}
final String header = message == null ? "" : message + ": ";
assertBothNonNull(header, expecteds, actuals);
final int expectedsLength = Array.getLength(expecteds);
final int actualsLength = Array.getLength(actuals);
assertArrayAreSameLength(header, expectedsLength, actualsLength);
for (int i = 0; i < expectedsLength; i++) {
final Object expected = Array.get(expecteds, i);
final Object actual = Array.get(actuals, i);
if (ArrayUtils.isArray(expected) && ArrayUtils.isArray(actual)) {
try {
internalAssertArrayEquals(message, expected, actual);
} catch (final ArrayComparisonFailure e) {
e.addDimension(i);
throw e;
}
} else {
try {
org.junit.Assert.assertEquals(expected, actual);
} catch (final AssertionError e) {
throw new ArrayComparisonFailure(header, e, i);
}
}
}
}
public static void assertCollectionEquals(final Collection<?> expecteds,
final Collection<?> actuals) {
assertCollectionEquals(null, expecteds, actuals);
}
public static void assertCollectionEquals(final String message,
final Collection<?> expecteds, final Collection<?> actuals) {
if (expecteds == actuals) {
return;
}
final String header= (message == null ? "" : message + ": ");
assertBothNonNull(header, expecteds, actuals);
assertCollectionAreSameSize(header, expecteds.size(), actuals.size());
final Iterator<?> expectedIter = expecteds.iterator();
final Iterator<?> actualIter = actuals.iterator();
int index = 0;
while (expectedIter.hasNext()) {
final Object expected = expectedIter.next();
final Object actual = actualIter.next();
if (ArrayUtils.isArray(expected) && ArrayUtils.isArray(actual)) {
try {
internalAssertArrayEquals(message, expected, actual);
} catch (final ArrayComparisonFailure e) {
throw new CollectionComparisonFailure(header, e, index);
}
} else {
try {
org.junit.Assert.assertEquals(expected, actual);
} catch (final AssertionError e) {
throw new CollectionComparisonFailure(header, e, index);
}
}
++index;
}
}
private static void assertBothNonNull(final String header,
final Object expecteds, final Object actuals) {
if (expecteds == null) {
org.junit.Assert.fail(header + "expected was null");
} else if (actuals == null) {
org.junit.Assert.fail(header + "actual was null");
}
}
private static void assertCollectionAreSameSize(final String header,
final int expectedSize, final int actualSize) {
if (expectedSize != actualSize) {
org.junit.Assert.fail(header
+ "collection sizes differed, expected.size() =" + expectedSize
+ " actual.size() =" + actualSize);
}
}
private static void assertArrayAreSameLength(final String header,
final int expectedLength, final int actualLength) {
if (expectedLength != actualLength) {
org.junit.Assert.fail(header
+ "array lengths differed, expected.length =" + expectedLength
+ " actual.length =" + actualLength);
}
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.http;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.io.PrintWriter;
import java.io.Serializable;
import java.net.URLDecoder;
import java.util.Enumeration;
import java.util.Iterator;
import java.util.Map;
import javax.activation.DataHandler;
import javax.servlet.ServletOutputStream;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.camel.Endpoint;
import org.apache.camel.Exchange;
import org.apache.camel.InvalidPayloadException;
import org.apache.camel.Message;
import org.apache.camel.RuntimeCamelException;
import org.apache.camel.StreamCache;
import org.apache.camel.component.http.helper.CamelFileDataSource;
import org.apache.camel.component.http.helper.HttpHelper;
import org.apache.camel.converter.stream.CachedOutputStream;
import org.apache.camel.spi.HeaderFilterStrategy;
import org.apache.camel.util.GZIPHelper;
import org.apache.camel.util.IOHelper;
import org.apache.camel.util.MessageHelper;
import org.apache.camel.util.ObjectHelper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Binding between {@link HttpMessage} and {@link HttpServletResponse}.
* <p/>
* Uses by default the {@link org.apache.camel.component.http.HttpHeaderFilterStrategy}
*
* @version
*/
public class DefaultHttpBinding implements HttpBinding {
private static final Logger LOG = LoggerFactory.getLogger(DefaultHttpBinding.class);
private boolean useReaderForPayload;
private boolean transferException;
private HeaderFilterStrategy headerFilterStrategy = new HttpHeaderFilterStrategy();
public DefaultHttpBinding() {
}
@Deprecated
public DefaultHttpBinding(HeaderFilterStrategy headerFilterStrategy) {
this.headerFilterStrategy = headerFilterStrategy;
}
@Deprecated
public DefaultHttpBinding(HttpEndpoint endpoint) {
this.headerFilterStrategy = endpoint.getHeaderFilterStrategy();
this.transferException = endpoint.isTransferException();
}
public void readRequest(HttpServletRequest request, HttpMessage message) {
LOG.trace("readRequest {}", request);
// lets force a parse of the body and headers
message.getBody();
// populate the headers from the request
Map<String, Object> headers = message.getHeaders();
//apply the headerFilterStrategy
Enumeration<?> names = request.getHeaderNames();
while (names.hasMoreElements()) {
String name = (String)names.nextElement();
String value = request.getHeader(name);
// use http helper to extract parameter value as it may contain multiple values
Object extracted = HttpHelper.extractHttpParameterValue(value);
// mapping the content-type
if (name.toLowerCase().equals("content-type")) {
name = Exchange.CONTENT_TYPE;
}
if (headerFilterStrategy != null
&& !headerFilterStrategy.applyFilterToExternalHeaders(name, extracted, message.getExchange())) {
HttpHelper.appendHeader(headers, name, extracted);
}
}
if (request.getCharacterEncoding() != null) {
headers.put(Exchange.HTTP_CHARACTER_ENCODING, request.getCharacterEncoding());
message.getExchange().setProperty(Exchange.CHARSET_NAME, request.getCharacterEncoding());
}
try {
populateRequestParameters(request, message);
} catch (Exception e) {
throw new RuntimeCamelException("Cannot read request parameters due " + e.getMessage(), e);
}
Object body = message.getBody();
// reset the stream cache if the body is the instance of StreamCache
if (body instanceof StreamCache) {
((StreamCache)body).reset();
}
// store the method and query and other info in headers as String types
headers.put(Exchange.HTTP_METHOD, request.getMethod());
headers.put(Exchange.HTTP_QUERY, request.getQueryString());
headers.put(Exchange.HTTP_URL, request.getRequestURL().toString());
headers.put(Exchange.HTTP_URI, request.getRequestURI());
headers.put(Exchange.HTTP_PATH, request.getPathInfo());
headers.put(Exchange.CONTENT_TYPE, request.getContentType());
if (LOG.isTraceEnabled()) {
LOG.trace("HTTP method {}", request.getMethod());
LOG.trace("HTTP query {}", request.getQueryString());
LOG.trace("HTTP url {}", request.getRequestURL());
LOG.trace("HTTP uri {}", request.getRequestURI());
LOG.trace("HTTP path {}", request.getPathInfo());
LOG.trace("HTTP content-type {}", request.getContentType());
}
// if content type is serialized java object, then de-serialize it to a Java object
if (request.getContentType() != null && HttpConstants.CONTENT_TYPE_JAVA_SERIALIZED_OBJECT.equals(request.getContentType())) {
try {
InputStream is = message.getExchange().getContext().getTypeConverter().mandatoryConvertTo(InputStream.class, body);
Object object = HttpHelper.deserializeJavaObjectFromStream(is, message.getExchange().getContext());
if (object != null) {
message.setBody(object);
}
} catch (Exception e) {
throw new RuntimeCamelException("Cannot deserialize body to Java object", e);
}
}
populateAttachments(request, message);
}
protected void populateRequestParameters(HttpServletRequest request, HttpMessage message) throws Exception {
//we populate the http request parameters without checking the request method
Map<String, Object> headers = message.getHeaders();
Enumeration<?> names = request.getParameterNames();
while (names.hasMoreElements()) {
String name = (String)names.nextElement();
// there may be multiple values for the same name
String[] values = request.getParameterValues(name);
LOG.trace("HTTP parameter {} = {}", name, values);
if (values != null) {
for (String value : values) {
if (headerFilterStrategy != null
&& !headerFilterStrategy.applyFilterToExternalHeaders(name, value, message.getExchange())) {
HttpHelper.appendHeader(headers, name, value);
}
}
}
}
LOG.trace("HTTP method {} with Content-Type {}", request.getMethod(), request.getContentType());
Boolean flag = message.getHeader(Exchange.SKIP_WWW_FORM_URLENCODED, Boolean.class);
boolean skipWwwFormUrlEncoding = flag != null ? flag : false;
if (request.getMethod().equals("POST") && request.getContentType() != null
&& request.getContentType().startsWith(HttpConstants.CONTENT_TYPE_WWW_FORM_URLENCODED)
&& !skipWwwFormUrlEncoding) {
String charset = request.getCharacterEncoding();
if (charset == null) {
charset = "UTF-8";
}
// Push POST form params into the headers to retain compatibility with DefaultHttpBinding
String body = message.getBody(String.class);
if (ObjectHelper.isNotEmpty(body)) {
for (String param : body.split("&")) {
String[] pair = param.split("=", 2);
if (pair.length == 2) {
String name = URLDecoder.decode(pair[0], charset);
String value = URLDecoder.decode(pair[1], charset);
if (headerFilterStrategy != null
&& !headerFilterStrategy.applyFilterToExternalHeaders(name, value, message.getExchange())) {
HttpHelper.appendHeader(headers, name, value);
}
} else {
throw new IllegalArgumentException("Invalid parameter, expected to be a pair but was " + param);
}
}
}
}
}
protected void populateAttachments(HttpServletRequest request, HttpMessage message) {
// check if there is multipart files, if so will put it into DataHandler
Enumeration<?> names = request.getAttributeNames();
while (names.hasMoreElements()) {
String name = (String) names.nextElement();
Object object = request.getAttribute(name);
LOG.trace("HTTP attachment {} = {}", name, object);
if (object instanceof File) {
String fileName = request.getParameter(name);
message.addAttachment(fileName, new DataHandler(new CamelFileDataSource((File)object, fileName)));
}
}
}
public void writeResponse(Exchange exchange, HttpServletResponse response) throws IOException {
Message target = exchange.hasOut() ? exchange.getOut() : exchange.getIn();
if (exchange.isFailed()) {
if (exchange.getException() != null) {
doWriteExceptionResponse(exchange.getException(), response);
} else {
// it must be a fault, no need to check for the fault flag on the message
doWriteFaultResponse(target, response, exchange);
}
} else {
if (exchange.hasOut()) {
// just copy the protocol relates header if we do not have them
copyProtocolHeaders(exchange.getIn(), exchange.getOut());
}
doWriteResponse(target, response, exchange);
}
}
private void copyProtocolHeaders(Message request, Message response) {
if (request.getHeader(Exchange.CONTENT_ENCODING) != null) {
String contentEncoding = request.getHeader(Exchange.CONTENT_ENCODING, String.class);
response.setHeader(Exchange.CONTENT_ENCODING, contentEncoding);
}
if (checkChunked(response, response.getExchange())) {
response.setHeader(Exchange.TRANSFER_ENCODING, "chunked");
}
}
public void doWriteExceptionResponse(Throwable exception, HttpServletResponse response) throws IOException {
// 500 for internal server error
response.setStatus(500);
if (isTransferException()) {
// transfer the exception as a serialized java object
HttpHelper.writeObjectToServletResponse(response, exception);
} else {
// write stacktrace as plain text
response.setContentType("text/plain");
PrintWriter pw = response.getWriter();
exception.printStackTrace(pw);
pw.flush();
}
}
public void doWriteFaultResponse(Message message, HttpServletResponse response, Exchange exchange) throws IOException {
message.setHeader(Exchange.HTTP_RESPONSE_CODE, 500);
doWriteResponse(message, response, exchange);
}
public void doWriteResponse(Message message, HttpServletResponse response, Exchange exchange) throws IOException {
// set the status code in the response. Default is 200.
if (message.getHeader(Exchange.HTTP_RESPONSE_CODE) != null) {
int code = message.getHeader(Exchange.HTTP_RESPONSE_CODE, Integer.class);
response.setStatus(code);
}
// set the content type in the response.
String contentType = MessageHelper.getContentType(message);
if (contentType != null) {
response.setContentType(contentType);
}
// append headers
// must use entrySet to ensure case of keys is preserved
for (Map.Entry<String, Object> entry : message.getHeaders().entrySet()) {
String key = entry.getKey();
Object value = entry.getValue();
// use an iterator as there can be multiple values. (must not use a delimiter)
final Iterator<?> it = ObjectHelper.createIterator(value, null);
while (it.hasNext()) {
String headerValue = exchange.getContext().getTypeConverter().convertTo(String.class, it.next());
if (headerValue != null && headerFilterStrategy != null
&& !headerFilterStrategy.applyFilterToCamelHeaders(key, headerValue, exchange)) {
response.addHeader(key, headerValue);
}
}
}
// write the body.
if (message.getBody() != null) {
if (GZIPHelper.isGzip(message)) {
doWriteGZIPResponse(message, response, exchange);
} else {
doWriteDirectResponse(message, response, exchange);
}
}
}
protected boolean isText(String contentType) {
if (contentType != null) {
String temp = contentType.toLowerCase();
if (temp.indexOf("text") >= 0 || temp.indexOf("html") >= 0) {
return true;
}
}
return false;
}
protected int copyStream(InputStream is, OutputStream os, int bufferSize) throws IOException {
try {
// copy stream, and must flush on each write as etc Jetty has better performance when
// flushing after writing to its servlet output stream
return IOHelper.copy(is, os, bufferSize, true);
} finally {
IOHelper.close(os, is);
}
}
protected void doWriteDirectResponse(Message message, HttpServletResponse response, Exchange exchange) throws IOException {
// if content type is serialized Java object, then serialize and write it to the response
String contentType = message.getHeader(Exchange.CONTENT_TYPE, String.class);
if (contentType != null && HttpConstants.CONTENT_TYPE_JAVA_SERIALIZED_OBJECT.equals(contentType)) {
try {
Object object = message.getMandatoryBody(Serializable.class);
HttpHelper.writeObjectToServletResponse(response, object);
// object is written so return
return;
} catch (InvalidPayloadException e) {
throw new IOException(e);
}
}
// prefer streaming
InputStream is = null;
if (checkChunked(message, exchange)) {
is = message.getBody(InputStream.class);
} else {
// try to use input stream first, so we can copy directly
if (!isText(contentType)) {
is = exchange.getContext().getTypeConverter().tryConvertTo(InputStream.class, message.getBody());
}
}
if (is != null) {
ServletOutputStream os = response.getOutputStream();
if (!checkChunked(message, exchange)) {
CachedOutputStream stream = new CachedOutputStream(exchange);
try {
// copy directly from input stream to the cached output stream to get the content length
int len = copyStream(is, stream, response.getBufferSize());
// we need to setup the length if message is not chucked
response.setContentLength(len);
OutputStream current = stream.getCurrentStream();
if (current instanceof ByteArrayOutputStream) {
if (LOG.isDebugEnabled()) {
LOG.debug("Streaming (direct) response in non-chunked mode with content-length {}");
}
ByteArrayOutputStream bos = (ByteArrayOutputStream) current;
bos.writeTo(os);
} else {
if (LOG.isDebugEnabled()) {
LOG.debug("Streaming response in non-chunked mode with content-length {} and buffer size: {}", len, len);
}
copyStream(stream.getInputStream(), os, len);
}
} finally {
IOHelper.close(is, os);
}
} else {
if (LOG.isDebugEnabled()) {
LOG.debug("Streaming response in chunked mode with buffer size {}", response.getBufferSize());
}
copyStream(is, os, response.getBufferSize());
}
} else {
// not convertable as a stream so fallback as a String
String data = message.getBody(String.class);
if (data != null) {
// set content length and encoding before we write data
String charset = IOHelper.getCharsetName(exchange, true);
final int dataByteLength = data.getBytes(charset).length;
response.setCharacterEncoding(charset);
response.setContentLength(dataByteLength);
if (LOG.isDebugEnabled()) {
LOG.debug("Writing response in non-chunked mode as plain text with content-length {} and buffer size: {}", dataByteLength, response.getBufferSize());
}
try {
response.getWriter().print(data);
} finally {
response.getWriter().flush();
}
}
}
}
protected boolean checkChunked(Message message, Exchange exchange) {
boolean answer = true;
if (message.getHeader(Exchange.HTTP_CHUNKED) == null) {
// check the endpoint option
Endpoint endpoint = exchange.getFromEndpoint();
if (endpoint instanceof HttpEndpoint) {
answer = ((HttpEndpoint)endpoint).isChunked();
}
} else {
answer = message.getHeader(Exchange.HTTP_CHUNKED, boolean.class);
}
return answer;
}
protected void doWriteGZIPResponse(Message message, HttpServletResponse response, Exchange exchange) throws IOException {
byte[] bytes;
try {
bytes = message.getMandatoryBody(byte[].class);
} catch (InvalidPayloadException e) {
throw ObjectHelper.wrapRuntimeCamelException(e);
}
byte[] data = GZIPHelper.compressGZIP(bytes);
ServletOutputStream os = response.getOutputStream();
try {
if (LOG.isDebugEnabled()) {
LOG.debug("Streaming response as GZIP in non-chunked mode with content-length {} and buffer size: {}", data.length, response.getBufferSize());
}
response.setContentLength(data.length);
os.write(data);
os.flush();
} finally {
IOHelper.close(os);
}
}
public Object parseBody(HttpMessage httpMessage) throws IOException {
// lets assume the body is a reader
HttpServletRequest request = httpMessage.getRequest();
// there is only a body if we have a content length, or its -1 to indicate unknown length
int len = request.getContentLength();
LOG.trace("HttpServletRequest content-length: {}", len);
if (len == 0) {
return null;
}
if (isUseReaderForPayload()) {
// use reader to read the response body
return request.getReader();
} else {
// if we do not know if there is any data at all, then make sure to check the stream first
if (len < 0) {
InputStream is = request.getInputStream();
if (is.available() == 0) {
// no data so return null
return null;
}
}
// read the response body from servlet request
return HttpHelper.readRequestBodyFromServletRequest(request, httpMessage.getExchange());
}
}
public boolean isUseReaderForPayload() {
return useReaderForPayload;
}
public void setUseReaderForPayload(boolean useReaderForPayload) {
this.useReaderForPayload = useReaderForPayload;
}
public boolean isTransferException() {
return transferException;
}
public void setTransferException(boolean transferException) {
this.transferException = transferException;
}
public HeaderFilterStrategy getHeaderFilterStrategy() {
return headerFilterStrategy;
}
public void setHeaderFilterStrategy(HeaderFilterStrategy headerFilterStrategy) {
this.headerFilterStrategy = headerFilterStrategy;
}
}
| |
package ca.uhn.fhir.jpa.dao.dstu3;
import static org.hamcrest.Matchers.containsInAnyOrder;
import static org.hamcrest.Matchers.containsStringIgnoringCase;
import static org.hamcrest.Matchers.empty;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.fail;
import java.util.ArrayList;
import java.util.List;
import java.util.Set;
import org.hl7.fhir.dstu3.model.*;
import org.hl7.fhir.dstu3.model.AllergyIntolerance.AllergyIntoleranceCategory;
import org.hl7.fhir.dstu3.model.AllergyIntolerance.AllergyIntoleranceClinicalStatus;
import org.hl7.fhir.dstu3.model.CodeSystem.CodeSystemContentMode;
import org.hl7.fhir.dstu3.model.CodeSystem.ConceptDefinitionComponent;
import org.hl7.fhir.dstu3.model.ValueSet.ConceptReferenceComponent;
import org.hl7.fhir.dstu3.model.ValueSet.ConceptSetComponent;
import org.hl7.fhir.dstu3.model.ValueSet.FilterOperator;
import org.hl7.fhir.dstu3.model.ValueSet.ValueSetComposeComponent;
import org.hl7.fhir.dstu3.model.ValueSet.ValueSetExpansionContainsComponent;
import org.hl7.fhir.instance.model.api.IIdType;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.Ignore;
import org.junit.Test;
import org.springframework.beans.factory.annotation.Autowired;
import ca.uhn.fhir.jpa.dao.BaseHapiFhirSystemDao;
import ca.uhn.fhir.jpa.dao.DaoConfig;
import ca.uhn.fhir.jpa.dao.IFhirResourceDaoCodeSystem.LookupCodeResult;
import ca.uhn.fhir.jpa.dao.SearchParameterMap;
import ca.uhn.fhir.jpa.entity.ResourceTable;
import ca.uhn.fhir.jpa.entity.TermCodeSystemVersion;
import ca.uhn.fhir.jpa.entity.TermConcept;
import ca.uhn.fhir.jpa.entity.TermConceptParentChildLink.RelationshipTypeEnum;
import ca.uhn.fhir.jpa.term.BaseHapiTerminologySvc;
import ca.uhn.fhir.jpa.term.IHapiTerminologySvc;
import ca.uhn.fhir.parser.IParser;
import ca.uhn.fhir.rest.param.TokenParam;
import ca.uhn.fhir.rest.param.TokenParamModifier;
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
import ca.uhn.fhir.rest.server.exceptions.UnprocessableEntityException;
import ca.uhn.fhir.util.TestUtil;
import ca.uhn.fhir.validation.FhirValidator;
import ca.uhn.fhir.validation.ValidationResult;
public class FhirResourceDaoDstu3TerminologyTest extends BaseJpaDstu3Test {
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(FhirResourceDaoDstu3TerminologyTest.class);
public static final String URL_MY_CODE_SYSTEM = "http://example.com/my_code_system";
public static final String URL_MY_VALUE_SET = "http://example.com/my_value_set";
@Autowired
private IHapiTerminologySvc myHapiTerminologySvc;
@After
public void after() {
myDaoConfig.setDeferIndexingForCodesystemsOfSize(new DaoConfig().getDeferIndexingForCodesystemsOfSize());
BaseHapiTerminologySvc.setForceSaveDeferredAlwaysForUnitTest(false);
}
@Before
public void before() {
myDaoConfig.setMaximumExpansionSize(5000);
// my
}
private CodeSystem createExternalCs() {
CodeSystem codeSystem = new CodeSystem();
codeSystem.setUrl(URL_MY_CODE_SYSTEM);
codeSystem.setContent(CodeSystemContentMode.NOTPRESENT);
IIdType id = myCodeSystemDao.create(codeSystem, mySrd).getId().toUnqualified();
ResourceTable table = myResourceTableDao.findOne(id.getIdPartAsLong());
TermCodeSystemVersion cs = new TermCodeSystemVersion();
cs.setResource(table);
cs.setResourceVersionId(table.getVersion());
TermConcept parentA = new TermConcept(cs, "ParentA").setDisplay("Parent A");
cs.getConcepts().add(parentA);
TermConcept childAA = new TermConcept(cs, "childAA").setDisplay("Child AA");
parentA.addChild(childAA, RelationshipTypeEnum.ISA);
TermConcept childAAA = new TermConcept(cs, "childAAA").setDisplay("Child AAA");
childAA.addChild(childAAA, RelationshipTypeEnum.ISA);
TermConcept childAAB = new TermConcept(cs, "childAAB").setDisplay("Child AAB");
childAA.addChild(childAAB, RelationshipTypeEnum.ISA);
TermConcept childAB = new TermConcept(cs, "childAB").setDisplay("Child AB");
parentA.addChild(childAB, RelationshipTypeEnum.ISA);
TermConcept parentB = new TermConcept(cs, "ParentB").setDisplay("Parent B");
cs.getConcepts().add(parentB);
TermConcept childBA = new TermConcept(cs, "childBA").setDisplay("Child BA");
childBA.addChild(childAAB, RelationshipTypeEnum.ISA);
parentB.addChild(childBA, RelationshipTypeEnum.ISA);
TermConcept parentC = new TermConcept(cs, "ParentC").setDisplay("Parent C");
cs.getConcepts().add(parentC);
TermConcept childCA = new TermConcept(cs, "childCA").setDisplay("Child CA");
parentC.addChild(childCA, RelationshipTypeEnum.ISA);
myTermSvc.storeNewCodeSystemVersion(table.getId(), URL_MY_CODE_SYSTEM, cs);
return codeSystem;
}
private void createExternalCsAndLocalVs() {
CodeSystem codeSystem = createExternalCs();
createLocalVs(codeSystem);
}
private CodeSystem createExternalCsDogs() {
CodeSystem codeSystem = new CodeSystem();
codeSystem.setUrl(URL_MY_CODE_SYSTEM);
codeSystem.setContent(CodeSystemContentMode.NOTPRESENT);
IIdType id = myCodeSystemDao.create(codeSystem, mySrd).getId().toUnqualified();
ResourceTable table = myResourceTableDao.findOne(id.getIdPartAsLong());
TermCodeSystemVersion cs = new TermCodeSystemVersion();
cs.setResource(table);
cs.setResourceVersionId(table.getVersion());
TermConcept hello = new TermConcept(cs, "hello").setDisplay("Hello");
cs.getConcepts().add(hello);
TermConcept goodbye = new TermConcept(cs, "goodbye").setDisplay("Goodbye");
cs.getConcepts().add(goodbye);
TermConcept dogs = new TermConcept(cs, "dogs").setDisplay("Dogs");
cs.getConcepts().add(dogs);
TermConcept labrador = new TermConcept(cs, "labrador").setDisplay("Labrador");
dogs.addChild(labrador, RelationshipTypeEnum.ISA);
TermConcept beagle = new TermConcept(cs, "beagle").setDisplay("Beagle");
dogs.addChild(beagle, RelationshipTypeEnum.ISA);
myTermSvc.storeNewCodeSystemVersion(table.getId(), URL_MY_CODE_SYSTEM, cs);
return codeSystem;
}
private void createLocalCsAndVs() {
//@formatter:off
CodeSystem codeSystem = new CodeSystem();
codeSystem.setUrl(URL_MY_CODE_SYSTEM);
codeSystem.setContent(CodeSystemContentMode.COMPLETE);
codeSystem
.addConcept().setCode("A").setDisplay("Code A")
.addConcept(new ConceptDefinitionComponent().setCode("AA").setDisplay("Code AA")
.addConcept(new ConceptDefinitionComponent().setCode("AAA").setDisplay("Code AAA"))
)
.addConcept(new ConceptDefinitionComponent().setCode("AB").setDisplay("Code AB"));
codeSystem
.addConcept().setCode("B").setDisplay("Code B")
.addConcept(new ConceptDefinitionComponent().setCode("BA").setDisplay("Code BA"))
.addConcept(new ConceptDefinitionComponent().setCode("BB").setDisplay("Code BB"));
//@formatter:on
myCodeSystemDao.create(codeSystem, mySrd);
createLocalVs(codeSystem);
}
private void createLocalVs(CodeSystem codeSystem) {
ValueSet valueSet = new ValueSet();
valueSet.setUrl(URL_MY_VALUE_SET);
valueSet.getCompose().addInclude().setSystem(codeSystem.getUrl());
myValueSetDao.create(valueSet, mySrd);
}
private void logAndValidateValueSet(ValueSet theResult) {
IParser parser = myFhirCtx.newXmlParser().setPrettyPrint(true);
String encoded = parser.encodeResourceToString(theResult);
ourLog.info(encoded);
FhirValidator validator = myFhirCtx.newValidator();
validator.setValidateAgainstStandardSchema(true);
validator.setValidateAgainstStandardSchematron(true);
ValidationResult result = validator.validateWithResult(theResult);
if (!result.isSuccessful()) {
ourLog.info(parser.encodeResourceToString(result.toOperationOutcome()));
fail(parser.encodeResourceToString(result.toOperationOutcome()));
}
}
@Test
public void testCodeSystemCreateDuplicateFails() {
CodeSystem codeSystem = new CodeSystem();
codeSystem.setUrl(URL_MY_CODE_SYSTEM);
codeSystem.setContent(CodeSystemContentMode.COMPLETE);
IIdType id = myCodeSystemDao.create(codeSystem, mySrd).getId().toUnqualified();
codeSystem = new CodeSystem();
codeSystem.setUrl(URL_MY_CODE_SYSTEM);
codeSystem.setContent(CodeSystemContentMode.COMPLETE);
try {
myCodeSystemDao.create(codeSystem, mySrd);
fail();
} catch (UnprocessableEntityException e) {
assertEquals("Can not create multiple code systems with URI \"http://example.com/my_code_system\", already have one with resource ID: CodeSystem/" + id.getIdPart(), e.getMessage());
}
}
@Test
public void testCodeSystemWithDefinedCodes() {
//@formatter:off
CodeSystem codeSystem = new CodeSystem();
codeSystem.setUrl(URL_MY_CODE_SYSTEM);
codeSystem.setContent(CodeSystemContentMode.COMPLETE);
codeSystem
.addConcept().setCode("A").setDisplay("Code A")
.addConcept(new ConceptDefinitionComponent().setCode("AA").setDisplay("Code AA"))
.addConcept(new ConceptDefinitionComponent().setCode("AB").setDisplay("Code AB"));
codeSystem
.addConcept().setCode("B").setDisplay("Code A")
.addConcept(new ConceptDefinitionComponent().setCode("BA").setDisplay("Code AA"))
.addConcept(new ConceptDefinitionComponent().setCode("BB").setDisplay("Code AB"));
//@formatter:on
IIdType id = myCodeSystemDao.create(codeSystem, mySrd).getId().toUnqualified();
Set<TermConcept> codes = myTermSvc.findCodesBelow(id.getIdPartAsLong(), id.getVersionIdPartAsLong(), "A");
assertThat(toCodes(codes), containsInAnyOrder("A", "AA", "AB"));
}
@Test
public void testExpandInvalid() {
createExternalCsAndLocalVs();
ValueSet vs = new ValueSet();
ConceptSetComponent include = vs.getCompose().addInclude();
include.setSystem(URL_MY_CODE_SYSTEM);
include.addFilter();
include.addFilter().setOp(FilterOperator.ISA).setValue("childAA");
try {
myValueSetDao.expand(vs, null);
fail();
} catch (InvalidRequestException e) {
assertEquals("Invalid filter, must have fields populated: property op value", e.getMessage());
}
}
@Test
public void testExpandWithCodesAndDisplayFilterBlank() {
CodeSystem codeSystem = createExternalCsDogs();
ValueSet valueSet = new ValueSet();
valueSet.setUrl(URL_MY_VALUE_SET);
valueSet.getCompose()
.addInclude()
.setSystem(codeSystem.getUrl())
.addConcept(new ConceptReferenceComponent().setCode("hello"))
.addConcept(new ConceptReferenceComponent().setCode("goodbye"));
valueSet.getCompose()
.addInclude()
.setSystem(codeSystem.getUrl())
.addFilter()
.setProperty("concept")
.setOp(FilterOperator.ISA)
.setValue("dogs");
myValueSetDao.create(valueSet, mySrd);
ValueSet result = myValueSetDao.expand(valueSet, "");
logAndValidateValueSet(result);
assertEquals(4, result.getExpansion().getTotal());
ArrayList<String> codes = toCodesContains(result.getExpansion().getContains());
assertThat(codes, containsInAnyOrder("hello", "goodbye", "labrador", "beagle"));
}
@Test
public void testExpandWithCodesAndDisplayFilterPartialOnFilter() {
CodeSystem codeSystem = createExternalCsDogs();
ValueSet valueSet = new ValueSet();
valueSet.setUrl(URL_MY_VALUE_SET);
valueSet.getCompose()
.addInclude()
.setSystem(codeSystem.getUrl())
.addConcept(new ConceptReferenceComponent().setCode("hello"))
.addConcept(new ConceptReferenceComponent().setCode("goodbye"));
valueSet.getCompose()
.addInclude()
.setSystem(codeSystem.getUrl())
.addFilter()
.setProperty("concept")
.setOp(FilterOperator.ISA)
.setValue("dogs");
myValueSetDao.create(valueSet, mySrd);
ValueSet result = myValueSetDao.expand(valueSet, "lab");
logAndValidateValueSet(result);
assertEquals(1, result.getExpansion().getTotal());
ArrayList<String> codes = toCodesContains(result.getExpansion().getContains());
assertThat(codes, containsInAnyOrder("labrador"));
}
@Test
public void testExpandWithCodesAndDisplayFilterPartialOnCodes() {
CodeSystem codeSystem = createExternalCsDogs();
ValueSet valueSet = new ValueSet();
valueSet.setUrl(URL_MY_VALUE_SET);
valueSet.getCompose()
.addInclude()
.setSystem(codeSystem.getUrl())
.addConcept(new ConceptReferenceComponent().setCode("hello"))
.addConcept(new ConceptReferenceComponent().setCode("goodbye"));
valueSet.getCompose()
.addInclude()
.setSystem(codeSystem.getUrl())
.addFilter()
.setProperty("concept")
.setOp(FilterOperator.ISA)
.setValue("dogs");
myValueSetDao.create(valueSet, mySrd);
ValueSet result = myValueSetDao.expand(valueSet, "hel");
logAndValidateValueSet(result);
assertEquals(1, result.getExpansion().getTotal());
ArrayList<String> codes = toCodesContains(result.getExpansion().getContains());
assertThat(codes, containsInAnyOrder("hello"));
}
@Test
public void testExpandWithCodesAndDisplayFilterPartialOnExpansion() {
CodeSystem codeSystem = createExternalCsDogs();
ValueSet valueSet = new ValueSet();
valueSet.setUrl(URL_MY_VALUE_SET);
valueSet.getCompose().addInclude().setSystem(codeSystem.getUrl());
myValueSetDao.create(valueSet, mySrd);
ValueSet result = myValueSetDao.expand(valueSet, "lab");
logAndValidateValueSet(result);
assertEquals(1, result.getExpansion().getTotal());
ArrayList<String> codes = toCodesContains(result.getExpansion().getContains());
assertThat(codes, containsInAnyOrder("labrador"));
}
@Test
public void testExpandWithDisplayInExternalValueSetFuzzyMatching() {
createExternalCsAndLocalVs();
ValueSet vs = new ValueSet();
ConceptSetComponent include = vs.getCompose().addInclude();
include.setSystem(URL_MY_CODE_SYSTEM);
include.addFilter().setProperty("display").setOp(FilterOperator.EQUAL).setValue("parent a");
ValueSet result = myValueSetDao.expand(vs, null);
logAndValidateValueSet(result);
ArrayList<String> codes = toCodesContains(result.getExpansion().getContains());
assertThat(codes, containsInAnyOrder("ParentA"));
vs = new ValueSet();
include = vs.getCompose().addInclude();
include.setSystem(URL_MY_CODE_SYSTEM);
include.addFilter().setProperty("display").setOp(FilterOperator.EQUAL).setValue("pare");
result = myValueSetDao.expand(vs, null);
logAndValidateValueSet(result);
codes = toCodesContains(result.getExpansion().getContains());
assertThat(codes, containsInAnyOrder("ParentA", "ParentB", "ParentC"));
vs = new ValueSet();
include = vs.getCompose().addInclude();
include.setSystem(URL_MY_CODE_SYSTEM);
include.addFilter().setProperty("display:exact").setOp(FilterOperator.EQUAL).setValue("pare");
result = myValueSetDao.expand(vs, null);
logAndValidateValueSet(result);
codes = toCodesContains(result.getExpansion().getContains());
assertThat(codes, empty());
}
@Test
public void testExpandWithExcludeInExternalValueSet() {
createExternalCsAndLocalVs();
ValueSet vs = new ValueSet();
ConceptSetComponent include = vs.getCompose().addInclude();
include.setSystem(URL_MY_CODE_SYSTEM);
ConceptSetComponent exclude = vs.getCompose().addExclude();
exclude.setSystem(URL_MY_CODE_SYSTEM);
exclude.addConcept().setCode("childAA");
exclude.addConcept().setCode("childAAA");
ValueSet result = myValueSetDao.expand(vs, null);
logAndValidateValueSet(result);
ArrayList<String> codes = toCodesContains(result.getExpansion().getContains());
assertThat(codes, containsInAnyOrder("ParentA", "ParentB", "childAB", "childAAB", "ParentC", "childBA", "childCA"));
}
@Test
public void testExpandWithInvalidExclude() {
createExternalCsAndLocalVs();
ValueSet vs = new ValueSet();
ConceptSetComponent include = vs.getCompose().addInclude();
include.setSystem(URL_MY_CODE_SYSTEM);
/*
* No system set on exclude
*/
ConceptSetComponent exclude = vs.getCompose().addExclude();
exclude.addConcept().setCode("childAA");
exclude.addConcept().setCode("childAAA");
try {
myValueSetDao.expand(vs, null);
fail();
} catch (InvalidRequestException e) {
assertEquals("ValueSet contains exclude criteria with no system defined", e.getMessage());
}
}
@Test
public void testExpandWithIsAInExternalValueSet() {
createExternalCsAndLocalVs();
ValueSet vs = new ValueSet();
ConceptSetComponent include = vs.getCompose().addInclude();
include.setSystem(URL_MY_CODE_SYSTEM);
include.addFilter().setOp(FilterOperator.ISA).setValue("childAA").setProperty("concept");
ValueSet result = myValueSetDao.expand(vs, null);
logAndValidateValueSet(result);
ArrayList<String> codes = toCodesContains(result.getExpansion().getContains());
assertThat(codes, containsInAnyOrder("childAAA", "childAAB"));
}
@Test
public void testExpandWithIsAInExternalValueSetReindex() {
BaseHapiTerminologySvc.setForceSaveDeferredAlwaysForUnitTest(true);
createExternalCsAndLocalVs();
mySystemDao.markAllResourcesForReindexing();
mySystemDao.performReindexingPass(100);
mySystemDao.performReindexingPass(100);
myHapiTerminologySvc.saveDeferred();
myHapiTerminologySvc.saveDeferred();
myHapiTerminologySvc.saveDeferred();
ValueSet vs = new ValueSet();
ConceptSetComponent include = vs.getCompose().addInclude();
include.setSystem(URL_MY_CODE_SYSTEM);
include.addFilter().setOp(FilterOperator.ISA).setValue("childAA").setProperty("concept");
ValueSet result = myValueSetDao.expand(vs, null);
logAndValidateValueSet(result);
ArrayList<String> codes = toCodesContains(result.getExpansion().getContains());
assertThat(codes, containsInAnyOrder("childAAA", "childAAB"));
}
@Test
public void testExpandWithNoResultsInLocalValueSet1() {
createLocalCsAndVs();
ValueSet vs = new ValueSet();
ConceptSetComponent include = vs.getCompose().addInclude();
include.setSystem(URL_MY_CODE_SYSTEM);
include.addConcept().setCode("ZZZZ");
try {
myValueSetDao.expand(vs, null);
fail();
} catch (InvalidRequestException e) {
assertEquals("Unable to find code 'ZZZZ' in code system http://example.com/my_code_system", e.getMessage());
}
}
@Test
public void testExpandWithNoResultsInLocalValueSet2() {
createLocalCsAndVs();
ValueSet vs = new ValueSet();
ConceptSetComponent include = vs.getCompose().addInclude();
include.setSystem(URL_MY_CODE_SYSTEM + "AA");
include.addConcept().setCode("A");
try {
myValueSetDao.expand(vs, null);
fail();
} catch (InvalidRequestException e) {
assertEquals("unable to find code system http://example.com/my_code_systemAA", e.getMessage());
}
}
@Test
public void testExpandWithSystemAndCodesAndFilterKeywordInLocalValueSet() {
createLocalCsAndVs();
ValueSet vs = new ValueSet();
ConceptSetComponent include = vs.getCompose().addInclude();
include.setSystem(URL_MY_CODE_SYSTEM);
include.addConcept().setCode("A");
include.addFilter().setProperty("display").setOp(FilterOperator.EQUAL).setValue("AAA");
ValueSet result = myValueSetDao.expand(vs, null);
// Technically it's not valid to expand a ValueSet with both includes and filters so the
// result fails validation because of the input.. we're being permissive by allowing both
// though, so we won't validate the input
result.setCompose(new ValueSetComposeComponent());
logAndValidateValueSet(result);
ArrayList<String> codes = toCodesContains(result.getExpansion().getContains());
assertThat(codes, containsInAnyOrder("A", "AAA"));
int idx = codes.indexOf("AAA");
assertEquals("AAA", result.getExpansion().getContains().get(idx).getCode());
assertEquals("Code AAA", result.getExpansion().getContains().get(idx).getDisplay());
assertEquals(URL_MY_CODE_SYSTEM, result.getExpansion().getContains().get(idx).getSystem());
//
}
@Test
public void testExpandWithSystemAndCodesInExternalValueSet() {
createExternalCsAndLocalVs();
ValueSet vs = new ValueSet();
ConceptSetComponent include = vs.getCompose().addInclude();
include.setSystem(URL_MY_CODE_SYSTEM);
include.addConcept().setCode("ParentA");
include.addConcept().setCode("childAA");
include.addConcept().setCode("childAAA");
ValueSet result = myValueSetDao.expand(vs, null);
logAndValidateValueSet(result);
ArrayList<String> codes = toCodesContains(result.getExpansion().getContains());
assertThat(codes, containsInAnyOrder("ParentA", "childAA", "childAAA"));
int idx = codes.indexOf("childAA");
assertEquals("childAA", result.getExpansion().getContains().get(idx).getCode());
assertEquals("Child AA", result.getExpansion().getContains().get(idx).getDisplay());
assertEquals(URL_MY_CODE_SYSTEM, result.getExpansion().getContains().get(idx).getSystem());
}
@Test
public void testExpandWithSystemAndCodesInLocalValueSet() {
createLocalCsAndVs();
ValueSet vs = new ValueSet();
ConceptSetComponent include = vs.getCompose().addInclude();
include.setSystem(URL_MY_CODE_SYSTEM);
include.addConcept().setCode("A");
include.addConcept().setCode("AA");
include.addConcept().setCode("AAA");
include.addConcept().setCode("AB");
ValueSet result = myValueSetDao.expand(vs, null);
logAndValidateValueSet(result);
ArrayList<String> codes = toCodesContains(result.getExpansion().getContains());
assertThat(codes, containsInAnyOrder("A", "AA", "AAA", "AB"));
int idx = codes.indexOf("AAA");
assertEquals("AAA", result.getExpansion().getContains().get(idx).getCode());
assertEquals("Code AAA", result.getExpansion().getContains().get(idx).getDisplay());
assertEquals(URL_MY_CODE_SYSTEM, result.getExpansion().getContains().get(idx).getSystem());
// ValueSet expansion = myValueSetDao.expandByIdentifier(URL_MY_VALUE_SET, "cervical");
// ValueSet expansion = myValueSetDao.expandByIdentifier(URL_MY_VALUE_SET, "cervical");
//
}
@Test
public void testExpandWithSystemAndDisplayFilterBlank() {
CodeSystem codeSystem = createExternalCsDogs();
ValueSet valueSet = new ValueSet();
valueSet.setUrl(URL_MY_VALUE_SET);
valueSet.getCompose()
.addInclude()
.setSystem(codeSystem.getUrl());
ValueSet result = myValueSetDao.expand(valueSet, "");
logAndValidateValueSet(result);
assertEquals(5, result.getExpansion().getTotal());
ArrayList<String> codes = toCodesContains(result.getExpansion().getContains());
assertThat(codes, containsInAnyOrder("hello", "goodbye", "dogs", "labrador", "beagle"));
}
@Test
public void testExpandWithSystemAndFilterInExternalValueSet() {
createExternalCsAndLocalVs();
ValueSet vs = new ValueSet();
ConceptSetComponent include = vs.getCompose().addInclude();
include.setSystem(URL_MY_CODE_SYSTEM);
include.addFilter().setProperty("display").setOp(FilterOperator.EQUAL).setValue("Parent B");
ValueSet result = myValueSetDao.expand(vs, null);
logAndValidateValueSet(result);
ArrayList<String> codes = toCodesContains(result.getExpansion().getContains());
assertThat(codes, containsInAnyOrder("ParentB"));
}
@Test
public void testIndexingIsDeferredForLargeCodeSystems() {
myDaoConfig.setDeferIndexingForCodesystemsOfSize(1);
myTermSvc.setProcessDeferred(false);
createExternalCsAndLocalVs();
ValueSet vs = new ValueSet();
ConceptSetComponent include = vs.getCompose().addInclude();
include.setSystem(URL_MY_CODE_SYSTEM);
include.addFilter().setProperty("concept").setOp(FilterOperator.ISA).setValue("ParentA");
ValueSet result = myValueSetDao.expand(vs, null);
logAndValidateValueSet(result);
assertEquals(0, result.getExpansion().getContains().size());
myTermSvc.setProcessDeferred(true);
myTermSvc.saveDeferred();
myTermSvc.saveDeferred();
myTermSvc.saveDeferred();
myTermSvc.saveDeferred();
myTermSvc.saveDeferred();
myTermSvc.saveDeferred();
myTermSvc.saveDeferred();
vs = new ValueSet();
include = vs.getCompose().addInclude();
include.setSystem(URL_MY_CODE_SYSTEM);
include.addFilter().setProperty("concept").setOp(FilterOperator.ISA).setValue("ParentA");
result = myValueSetDao.expand(vs, null);
logAndValidateValueSet(result);
assertEquals(4, result.getExpansion().getContains().size());
String encoded = myFhirCtx.newXmlParser().setPrettyPrint(true).encodeResourceToString(result);
assertThat(encoded, containsStringIgnoringCase("<code value=\"childAAB\"/>"));
}
@Test
public void testLookupSnomed() {
CodeSystem codeSystem = new CodeSystem();
codeSystem.setUrl("http://snomed.info/sct");
codeSystem.setContent(CodeSystemContentMode.NOTPRESENT);
IIdType id = myCodeSystemDao.create(codeSystem, mySrd).getId().toUnqualified();
ResourceTable table = myResourceTableDao.findOne(id.getIdPartAsLong());
TermCodeSystemVersion cs = new TermCodeSystemVersion();
cs.setResource(table);
cs.setResourceVersionId(table.getVersion());
TermConcept parentA = new TermConcept(cs, "ParentA").setDisplay("Parent A");
cs.getConcepts().add(parentA);
myTermSvc.storeNewCodeSystemVersion(table.getId(), "http://snomed.info/sct", cs);
StringType code = new StringType("ParentA");
StringType system = new StringType("http://snomed.info/sct");
LookupCodeResult outcome = myCodeSystemDao.lookupCode(code, system, null, mySrd);
assertEquals(true, outcome.isFound());
}
/**
* Can't currently abort costly
*/
@Test
@Ignore
public void testRefuseCostlyExpansionFhirCodesystem() {
createLocalCsAndVs();
myDaoConfig.setMaximumExpansionSize(1);
SearchParameterMap params = new SearchParameterMap();
params.add(AuditEvent.SP_TYPE, new TokenParam(null, "http://hl7.org/fhir/ValueSet/audit-event-type").setModifier(TokenParamModifier.IN));
try {
myAuditEventDao.search(params);
fail();
} catch (InvalidRequestException e) {
assertEquals("", e.getMessage());
}
}
@Test
public void testRefuseCostlyExpansionLocalCodesystem() {
createLocalCsAndVs();
myDaoConfig.setMaximumExpansionSize(1);
SearchParameterMap params = new SearchParameterMap();
params.add(Observation.SP_CODE, new TokenParam(URL_MY_CODE_SYSTEM, "AAA").setModifier(TokenParamModifier.ABOVE));
try {
myObservationDao.search(params);
fail();
} catch (InvalidRequestException e) {
assertEquals("Expansion of ValueSet produced too many codes (maximum 1) - Operation aborted!", e.getMessage());
}
}
@Test
public void testReindex() {
createLocalCsAndVs();
ValueSet vs = new ValueSet();
ConceptSetComponent include = vs.getCompose().addInclude();
include.setSystem(URL_MY_CODE_SYSTEM);
include.addConcept().setCode("ZZZZ");
mySystemDao.markAllResourcesForReindexing();
mySystemDao.performReindexingPass(null);
myTermSvc.saveDeferred();
mySystemDao.performReindexingPass(null);
myTermSvc.saveDeferred();
// Again
mySystemDao.markAllResourcesForReindexing();
mySystemDao.performReindexingPass(null);
myTermSvc.saveDeferred();
mySystemDao.performReindexingPass(null);
myTermSvc.saveDeferred();
}
@Test
public void testSearchCodeAboveLocalCodesystem() {
createLocalCsAndVs();
Observation obsAA = new Observation();
obsAA.getCode().addCoding().setSystem(URL_MY_CODE_SYSTEM).setCode("AA");
IIdType idAA = myObservationDao.create(obsAA, mySrd).getId().toUnqualifiedVersionless();
Observation obsBA = new Observation();
obsBA.getCode().addCoding().setSystem(URL_MY_CODE_SYSTEM).setCode("BA");
IIdType idBA = myObservationDao.create(obsBA, mySrd).getId().toUnqualifiedVersionless();
Observation obsCA = new Observation();
obsCA.getCode().addCoding().setSystem(URL_MY_CODE_SYSTEM).setCode("CA");
IIdType idCA = myObservationDao.create(obsCA, mySrd).getId().toUnqualifiedVersionless();
SearchParameterMap params = new SearchParameterMap();
params.add(Observation.SP_CODE, new TokenParam(URL_MY_CODE_SYSTEM, "AAA").setModifier(TokenParamModifier.ABOVE));
assertThat(toUnqualifiedVersionlessIdValues(myObservationDao.search(params)), containsInAnyOrder(idAA.getValue()));
params = new SearchParameterMap();
params.add(Observation.SP_CODE, new TokenParam(URL_MY_CODE_SYSTEM, "A").setModifier(TokenParamModifier.ABOVE));
assertThat(toUnqualifiedVersionlessIdValues(myObservationDao.search(params)), empty());
}
@Test
public void testSearchCodeBelowAndAboveUnknownCodeSystem() {
SearchParameterMap params = new SearchParameterMap();
params.add(Observation.SP_CODE, new TokenParam(URL_MY_CODE_SYSTEM, "childAA").setModifier(TokenParamModifier.BELOW));
assertThat(toUnqualifiedVersionlessIdValues(myObservationDao.search(params)), empty());
params.add(Observation.SP_CODE, new TokenParam(URL_MY_CODE_SYSTEM, "childAA").setModifier(TokenParamModifier.ABOVE));
assertThat(toUnqualifiedVersionlessIdValues(myObservationDao.search(params)), empty());
params.add(Observation.SP_CODE, new TokenParam(null, URL_MY_VALUE_SET).setModifier(TokenParamModifier.IN));
assertThat(toUnqualifiedVersionlessIdValues(myObservationDao.search(params)), empty());
}
@Test
public void testSearchCodeBelowBuiltInCodesystem() {
AllergyIntolerance ai1 = new AllergyIntolerance();
ai1.setClinicalStatus(AllergyIntoleranceClinicalStatus.ACTIVE);
String id1 = myAllergyIntoleranceDao.create(ai1, mySrd).getId().toUnqualifiedVersionless().getValue();
AllergyIntolerance ai2 = new AllergyIntolerance();
ai2.setClinicalStatus(AllergyIntoleranceClinicalStatus.RESOLVED);
String id2 = myAllergyIntoleranceDao.create(ai2, mySrd).getId().toUnqualifiedVersionless().getValue();
AllergyIntolerance ai3 = new AllergyIntolerance();
ai3.setClinicalStatus(AllergyIntoleranceClinicalStatus.INACTIVE);
String id3 = myAllergyIntoleranceDao.create(ai3, mySrd).getId().toUnqualifiedVersionless().getValue();
SearchParameterMap params;
params = new SearchParameterMap();
params.add(AllergyIntolerance.SP_CLINICAL_STATUS, new TokenParam("http://hl7.org/fhir/allergy-clinical-status", AllergyIntoleranceClinicalStatus.ACTIVE.toCode()));
assertThat(toUnqualifiedVersionlessIdValues(myAllergyIntoleranceDao.search(params)), containsInAnyOrder(id1));
params = new SearchParameterMap();
params.add(AllergyIntolerance.SP_CLINICAL_STATUS, new TokenParam("http://hl7.org/fhir/allergy-clinical-status", AllergyIntoleranceClinicalStatus.ACTIVE.toCode()).setModifier(TokenParamModifier.BELOW));
assertThat(toUnqualifiedVersionlessIdValues(myAllergyIntoleranceDao.search(params)), containsInAnyOrder(id1));
params = new SearchParameterMap();
params.add(AllergyIntolerance.SP_CLINICAL_STATUS, new TokenParam("http://hl7.org/fhir/allergy-clinical-status", AllergyIntoleranceClinicalStatus.RESOLVED.toCode()).setModifier(TokenParamModifier.BELOW));
assertThat(toUnqualifiedVersionlessIdValues(myAllergyIntoleranceDao.search(params)), containsInAnyOrder(id2));
params = new SearchParameterMap();
params.add(AllergyIntolerance.SP_CLINICAL_STATUS, new TokenParam("http://hl7.org/fhir/allergy-clinical-status", AllergyIntoleranceClinicalStatus.RESOLVED.toCode()));
assertThat(toUnqualifiedVersionlessIdValues(myAllergyIntoleranceDao.search(params)), containsInAnyOrder(id2));
// Unknown code
params = new SearchParameterMap();
params.add(AllergyIntolerance.SP_CLINICAL_STATUS, new TokenParam("http://hl7.org/fhir/allergy-clinical-status", "fooooo"));
assertThat(toUnqualifiedVersionlessIdValues(myAllergyIntoleranceDao.search(params)), empty());
// Unknown system
params = new SearchParameterMap();
params.add(AllergyIntolerance.SP_CLINICAL_STATUS, new TokenParam("http://hl7.org/fhir/allergy-clinical-status222222", "fooooo"));
assertThat(toUnqualifiedVersionlessIdValues(myAllergyIntoleranceDao.search(params)), empty());
}
@Test
public void testSearchCodeBelowBuiltInCodesystemUnqualified() {
AllergyIntolerance ai1 = new AllergyIntolerance();
ai1.setClinicalStatus(AllergyIntoleranceClinicalStatus.ACTIVE);
ai1.addCategoryElement().setValue(AllergyIntoleranceCategory.MEDICATION);
String id1 = myAllergyIntoleranceDao.create(ai1, mySrd).getId().toUnqualifiedVersionless().getValue();
AllergyIntolerance ai2 = new AllergyIntolerance();
ai2.setClinicalStatus(AllergyIntoleranceClinicalStatus.RESOLVED);
ai2.addCategoryElement().setValue(AllergyIntoleranceCategory.BIOLOGIC);
String id2 = myAllergyIntoleranceDao.create(ai2, mySrd).getId().toUnqualifiedVersionless().getValue();
AllergyIntolerance ai3 = new AllergyIntolerance();
ai3.setClinicalStatus(AllergyIntoleranceClinicalStatus.INACTIVE);
ai3.addCategoryElement().setValue(AllergyIntoleranceCategory.FOOD);
String id3 = myAllergyIntoleranceDao.create(ai3, mySrd).getId().toUnqualifiedVersionless().getValue();
SearchParameterMap params;
params = new SearchParameterMap();
params.add(AllergyIntolerance.SP_CLINICAL_STATUS, new TokenParam(null, AllergyIntoleranceClinicalStatus.ACTIVE.toCode()));
assertThat(toUnqualifiedVersionlessIdValues(myAllergyIntoleranceDao.search(params)), containsInAnyOrder(id1));
params = new SearchParameterMap();
params.add(AllergyIntolerance.SP_CLINICAL_STATUS, new TokenParam(null, AllergyIntoleranceClinicalStatus.ACTIVE.toCode()).setModifier(TokenParamModifier.BELOW));
assertThat(toUnqualifiedVersionlessIdValues(myAllergyIntoleranceDao.search(params)), containsInAnyOrder(id1));
params = new SearchParameterMap();
params.add(AllergyIntolerance.SP_CATEGORY, new TokenParam(null, AllergyIntoleranceCategory.MEDICATION.toCode()).setModifier(TokenParamModifier.BELOW));
assertThat(toUnqualifiedVersionlessIdValues(myAllergyIntoleranceDao.search(params)), containsInAnyOrder(id1, id2));
params = new SearchParameterMap();
params.add(AllergyIntolerance.SP_CLINICAL_STATUS, new TokenParam(null, AllergyIntoleranceClinicalStatus.RESOLVED.toCode()).setModifier(TokenParamModifier.BELOW));
assertThat(toUnqualifiedVersionlessIdValues(myAllergyIntoleranceDao.search(params)), containsInAnyOrder(id2));
params = new SearchParameterMap();
params.add(AllergyIntolerance.SP_CLINICAL_STATUS, new TokenParam(null, AllergyIntoleranceClinicalStatus.RESOLVED.toCode()));
assertThat(toUnqualifiedVersionlessIdValues(myAllergyIntoleranceDao.search(params)), containsInAnyOrder(id2));
params = new SearchParameterMap();
params.add(AllergyIntolerance.SP_CLINICAL_STATUS, new TokenParam(null, "FOO"));
assertThat(toUnqualifiedVersionlessIdValues(myAllergyIntoleranceDao.search(params)), empty());
}
@Test
public void testSearchCodeBelowLocalCodesystem() {
createLocalCsAndVs();
Observation obsAA = new Observation();
obsAA.getCode().addCoding().setSystem(URL_MY_CODE_SYSTEM).setCode("AA");
IIdType idAA = myObservationDao.create(obsAA, mySrd).getId().toUnqualifiedVersionless();
Observation obsBA = new Observation();
obsBA.getCode().addCoding().setSystem(URL_MY_CODE_SYSTEM).setCode("BA");
IIdType idBA = myObservationDao.create(obsBA, mySrd).getId().toUnqualifiedVersionless();
Observation obsCA = new Observation();
obsCA.getCode().addCoding().setSystem(URL_MY_CODE_SYSTEM).setCode("CA");
IIdType idCA = myObservationDao.create(obsCA, mySrd).getId().toUnqualifiedVersionless();
SearchParameterMap params = new SearchParameterMap();
params.add(Observation.SP_CODE, new TokenParam(URL_MY_CODE_SYSTEM, "A").setModifier(TokenParamModifier.BELOW));
assertThat(toUnqualifiedVersionlessIdValues(myObservationDao.search(params)), containsInAnyOrder(idAA.getValue()));
params = new SearchParameterMap();
params.add(Observation.SP_CODE, new TokenParam(URL_MY_CODE_SYSTEM, "AAA").setModifier(TokenParamModifier.BELOW));
assertThat(toUnqualifiedVersionlessIdValues(myObservationDao.search(params)), empty());
}
@Test
public void testSearchCodeInBuiltInValueSet() {
AllergyIntolerance ai1 = new AllergyIntolerance();
ai1.setClinicalStatus(AllergyIntoleranceClinicalStatus.ACTIVE);
String id1 = myAllergyIntoleranceDao.create(ai1, mySrd).getId().toUnqualifiedVersionless().getValue();
AllergyIntolerance ai2 = new AllergyIntolerance();
ai2.setClinicalStatus(AllergyIntoleranceClinicalStatus.RESOLVED);
String id2 = myAllergyIntoleranceDao.create(ai2, mySrd).getId().toUnqualifiedVersionless().getValue();
AllergyIntolerance ai3 = new AllergyIntolerance();
ai3.setClinicalStatus(AllergyIntoleranceClinicalStatus.INACTIVE);
String id3 = myAllergyIntoleranceDao.create(ai3, mySrd).getId().toUnqualifiedVersionless().getValue();
SearchParameterMap params;
params = new SearchParameterMap();
params.add(AllergyIntolerance.SP_CLINICAL_STATUS, new TokenParam(null, "http://hl7.org/fhir/ValueSet/allergy-clinical-status").setModifier(TokenParamModifier.IN));
assertThat(toUnqualifiedVersionlessIdValues(myAllergyIntoleranceDao.search(params)), containsInAnyOrder(id1, id2, id3));
// No codes in this one
params = new SearchParameterMap();
params.add(AllergyIntolerance.SP_CLINICAL_STATUS, new TokenParam(null, "http://hl7.org/fhir/ValueSet/allergy-intolerance-criticality").setModifier(TokenParamModifier.IN));
assertThat(toUnqualifiedVersionlessIdValues(myAllergyIntoleranceDao.search(params)), empty());
// Invalid VS
params = new SearchParameterMap();
params.add(AllergyIntolerance.SP_CLINICAL_STATUS, new TokenParam(null, "http://hl7.org/fhir/ValueSet/FOO").setModifier(TokenParamModifier.IN));
try {
myAllergyIntoleranceDao.search(params);
} catch (InvalidRequestException e) {
assertEquals("Unable to find imported value set http://hl7.org/fhir/ValueSet/FOO", e.getMessage());
}
}
@Test
public void testSearchCodeInEmptyValueSet() {
ValueSet valueSet = new ValueSet();
valueSet.setUrl(URL_MY_VALUE_SET);
myValueSetDao.create(valueSet, mySrd);
SearchParameterMap params;
ourLog.info("testSearchCodeInEmptyValueSet without status");
params = new SearchParameterMap();
params.add(Observation.SP_CODE, new TokenParam(null, URL_MY_VALUE_SET).setModifier(TokenParamModifier.IN));
assertThat(toUnqualifiedVersionlessIdValues(myObservationDao.search(params)), empty());
ourLog.info("testSearchCodeInEmptyValueSet with status");
params = new SearchParameterMap();
params.add(Observation.SP_CODE, new TokenParam(null, URL_MY_VALUE_SET).setModifier(TokenParamModifier.IN));
params.add(Observation.SP_STATUS, new TokenParam(null, "final"));
assertThat(toUnqualifiedVersionlessIdValues(myObservationDao.search(params)), empty());
ourLog.info("testSearchCodeInEmptyValueSet done");
}
@Test
public void testSearchCodeInExternalCodesystem() {
createExternalCsAndLocalVs();
Observation obsPA = new Observation();
obsPA.getCode().addCoding().setSystem(URL_MY_CODE_SYSTEM).setCode("ParentA");
IIdType idPA = myObservationDao.create(obsPA, mySrd).getId().toUnqualifiedVersionless();
Observation obsAAA = new Observation();
obsAAA.getCode().addCoding().setSystem(URL_MY_CODE_SYSTEM).setCode("childAAA");
IIdType idAAA = myObservationDao.create(obsAAA, mySrd).getId().toUnqualifiedVersionless();
Observation obsAAB = new Observation();
obsAAB.getCode().addCoding().setSystem(URL_MY_CODE_SYSTEM).setCode("childAAB");
IIdType idAAB = myObservationDao.create(obsAAB, mySrd).getId().toUnqualifiedVersionless();
Observation obsCA = new Observation();
obsCA.getCode().addCoding().setSystem(URL_MY_CODE_SYSTEM).setCode("CA");
IIdType idCA = myObservationDao.create(obsCA, mySrd).getId().toUnqualifiedVersionless();
SearchParameterMap params = new SearchParameterMap();
params.add(Observation.SP_CODE, new TokenParam(URL_MY_CODE_SYSTEM, "childAA").setModifier(TokenParamModifier.BELOW));
assertThat(toUnqualifiedVersionlessIdValues(myObservationDao.search(params)), containsInAnyOrder(idAAA.getValue(), idAAB.getValue()));
params = new SearchParameterMap();
params.add(Observation.SP_CODE, new TokenParam(URL_MY_CODE_SYSTEM, "childAA").setModifier(TokenParamModifier.ABOVE));
assertThat(toUnqualifiedVersionlessIdValues(myObservationDao.search(params)), containsInAnyOrder(idPA.getValue()));
params = new SearchParameterMap();
params.add(Observation.SP_CODE, new TokenParam(null, URL_MY_VALUE_SET).setModifier(TokenParamModifier.IN));
assertThat(toUnqualifiedVersionlessIdValues(myObservationDao.search(params)), containsInAnyOrder(idPA.getValue(), idAAA.getValue(), idAAB.getValue()));
}
@Test
public void testSearchCodeInFhirCodesystem() {
createLocalCsAndVs();
AuditEvent aeIn1 = new AuditEvent();
aeIn1.getType().setSystem("http://nema.org/dicom/dicm").setCode("110102");
IIdType idIn1 = myAuditEventDao.create(aeIn1, mySrd).getId().toUnqualifiedVersionless();
AuditEvent aeIn2 = new AuditEvent();
aeIn2.getType().setSystem("http://hl7.org/fhir/audit-event-type").setCode("rest");
IIdType idIn2 = myAuditEventDao.create(aeIn2, mySrd).getId().toUnqualifiedVersionless();
AuditEvent aeOut1 = new AuditEvent();
aeOut1.getType().setSystem("http://example.com").setCode("foo");
IIdType idOut1 = myAuditEventDao.create(aeOut1, mySrd).getId().toUnqualifiedVersionless();
SearchParameterMap params = new SearchParameterMap();
params.add(AuditEvent.SP_TYPE, new TokenParam(null, "http://hl7.org/fhir/ValueSet/audit-event-type").setModifier(TokenParamModifier.IN));
assertThat(toUnqualifiedVersionlessIdValues(myAuditEventDao.search(params)), containsInAnyOrder(idIn1.getValue(), idIn2.getValue()));
params = new SearchParameterMap();
params.add(AuditEvent.SP_TYPE, new TokenParam(null, "http://hl7.org/fhir/ValueSet/v3-PurposeOfUse").setModifier(TokenParamModifier.IN));
assertThat(toUnqualifiedVersionlessIdValues(myAuditEventDao.search(params)), empty());
}
@Test
public void testSearchCodeInLocalCodesystem() {
createLocalCsAndVs();
Observation obsAA = new Observation();
obsAA.getCode().addCoding().setSystem(URL_MY_CODE_SYSTEM).setCode("AA");
IIdType idAA = myObservationDao.create(obsAA, mySrd).getId().toUnqualifiedVersionless();
Observation obsBA = new Observation();
obsBA.getCode().addCoding().setSystem(URL_MY_CODE_SYSTEM).setCode("BA");
IIdType idBA = myObservationDao.create(obsBA, mySrd).getId().toUnqualifiedVersionless();
Observation obsCA = new Observation();
obsCA.getCode().addCoding().setSystem(URL_MY_CODE_SYSTEM).setCode("CA");
IIdType idCA = myObservationDao.create(obsCA, mySrd).getId().toUnqualifiedVersionless();
SearchParameterMap params = new SearchParameterMap();
params.add(Observation.SP_CODE, new TokenParam(null, URL_MY_VALUE_SET).setModifier(TokenParamModifier.IN));
assertThat(toUnqualifiedVersionlessIdValues(myObservationDao.search(params)), containsInAnyOrder(idAA.getValue(), idBA.getValue()));
}
@Test
public void testSearchCodeInValueSetThatImportsInvalidCodeSystem() {
ValueSet valueSet = new ValueSet();
valueSet.getCompose().addInclude().addValueSet("http://non_existant_VS");
valueSet.setUrl(URL_MY_VALUE_SET);
IIdType vsid = myValueSetDao.create(valueSet, mySrd).getId().toUnqualifiedVersionless();
SearchParameterMap params;
ourLog.info("testSearchCodeInEmptyValueSet without status");
params = new SearchParameterMap();
params.add(Observation.SP_CODE, new TokenParam(null, URL_MY_VALUE_SET).setModifier(TokenParamModifier.IN));
try {
myObservationDao.search(params);
} catch(InvalidRequestException e) {
assertEquals("Unable to expand imported value set: Unable to find imported value set http://non_existant_VS", e.getMessage());
}
// Now let's update
valueSet = new ValueSet();
valueSet.setId(vsid);
valueSet.getCompose().addInclude().setSystem("http://hl7.org/fhir/v3/MaritalStatus").addConcept().setCode("A");
valueSet.setUrl(URL_MY_VALUE_SET);
myValueSetDao.update(valueSet, mySrd).getId().toUnqualifiedVersionless();
params = new SearchParameterMap();
params.add(Observation.SP_CODE, new TokenParam(null, URL_MY_VALUE_SET).setModifier(TokenParamModifier.IN));
params.add(Observation.SP_STATUS, new TokenParam(null, "final"));
assertThat(toUnqualifiedVersionlessIdValues(myObservationDao.search(params)), empty());
}
/**
* Todo: not yet implemented
*/
@Test
@Ignore
public void testSearchCodeNotInBuiltInValueSet() {
AllergyIntolerance ai1 = new AllergyIntolerance();
ai1.setClinicalStatus(AllergyIntoleranceClinicalStatus.ACTIVE);
String id1 = myAllergyIntoleranceDao.create(ai1, mySrd).getId().toUnqualifiedVersionless().getValue();
AllergyIntolerance ai2 = new AllergyIntolerance();
ai2.setClinicalStatus(AllergyIntoleranceClinicalStatus.RESOLVED);
String id2 = myAllergyIntoleranceDao.create(ai2, mySrd).getId().toUnqualifiedVersionless().getValue();
AllergyIntolerance ai3 = new AllergyIntolerance();
ai3.setClinicalStatus(AllergyIntoleranceClinicalStatus.INACTIVE);
String id3 = myAllergyIntoleranceDao.create(ai3, mySrd).getId().toUnqualifiedVersionless().getValue();
SearchParameterMap params;
params = new SearchParameterMap();
params.add(AllergyIntolerance.SP_CLINICAL_STATUS, new TokenParam(null, "http://hl7.org/fhir/ValueSet/allergy-intolerance-status").setModifier(TokenParamModifier.NOT_IN));
assertThat(toUnqualifiedVersionlessIdValues(myAllergyIntoleranceDao.search(params)), empty());
// No codes in this one
params = new SearchParameterMap();
params.add(AllergyIntolerance.SP_CLINICAL_STATUS, new TokenParam(null, "http://hl7.org/fhir/ValueSet/allergy-intolerance-criticality").setModifier(TokenParamModifier.NOT_IN));
assertThat(toUnqualifiedVersionlessIdValues(myAllergyIntoleranceDao.search(params)), containsInAnyOrder(id1, id2, id3));
// Invalid VS
params = new SearchParameterMap();
params.add(AllergyIntolerance.SP_CLINICAL_STATUS, new TokenParam(null, "http://hl7.org/fhir/ValueSet/FOO").setModifier(TokenParamModifier.NOT_IN));
try {
myAllergyIntoleranceDao.search(params);
} catch (InvalidRequestException e) {
assertEquals("Unable to find imported value set http://hl7.org/fhir/ValueSet/FOO", e.getMessage());
}
}
private ArrayList<String> toCodesContains(List<ValueSetExpansionContainsComponent> theContains) {
ArrayList<String> retVal = new ArrayList<String>();
for (ValueSetExpansionContainsComponent next : theContains) {
retVal.add(next.getCode());
}
return retVal;
}
@AfterClass
public static void afterClassClearContext() {
TestUtil.clearAllStaticFieldsForUnitTest();
}
}
| |
/*
* Copyright (c) 2015. Rick Hightower, Geoff Chandler
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* QBit - The Microservice lib for Java : JSON, WebSocket, REST. Be The Web!
*/
package io.advantageous.qbit;
import io.advantageous.qbit.client.Client;
import io.advantageous.qbit.concurrent.PeriodicScheduler;
import io.advantageous.qbit.events.EventBusProxyCreator;
import io.advantageous.qbit.events.EventManager;
import io.advantageous.qbit.events.spi.EventConnector;
import io.advantageous.qbit.http.request.HttpResponseCreator;
import io.advantageous.qbit.http.request.HttpResponseDecorator;
import io.advantageous.qbit.http.HttpTransport;
import io.advantageous.qbit.http.client.HttpClient;
import io.advantageous.qbit.http.config.HttpServerOptions;
import io.advantageous.qbit.http.server.HttpServer;
import io.advantageous.qbit.json.JsonMapper;
import io.advantageous.qbit.message.MethodCall;
import io.advantageous.qbit.message.Request;
import io.advantageous.qbit.message.Response;
import io.advantageous.qbit.queue.Queue;
import io.advantageous.qbit.queue.QueueBuilder;
import io.advantageous.qbit.sender.Sender;
import io.advantageous.qbit.server.ServiceEndpointServer;
import io.advantageous.qbit.service.BeforeMethodCall;
import io.advantageous.qbit.service.ServiceBundle;
import io.advantageous.qbit.service.ServiceMethodHandler;
import io.advantageous.qbit.service.ServiceQueue;
import io.advantageous.qbit.service.discovery.ServiceDiscovery;
import io.advantageous.qbit.service.health.HealthServiceAsync;
import io.advantageous.qbit.service.impl.CallbackManager;
import io.advantageous.qbit.service.stats.StatsCollector;
import io.advantageous.qbit.spi.FactorySPI;
import io.advantageous.qbit.spi.ProtocolEncoder;
import io.advantageous.qbit.spi.ProtocolParser;
import io.advantageous.qbit.system.QBitSystemManager;
import io.advantageous.qbit.transforms.Transformer;
import io.advantageous.qbit.util.MultiMap;
import io.advantageous.qbit.util.Timer;
import java.util.List;
import java.util.concurrent.CopyOnWriteArrayList;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
/**
* Main factory for QBit. This gets used internally to create things easily.
*
* @author rhightower
*/
public interface Factory {
default PeriodicScheduler createPeriodicScheduler(int poolSize) {
throw new IllegalStateException("Not implemented");
}
default PeriodicScheduler periodicScheduler() {
throw new IllegalStateException("Not implemented");
}
/**
* Create a method call based on a body that we are parsing from a POST body or WebSocket message for example.
*
* @param address address of method (this can override what is in the body)
* @param returnAddress return address, which is a moniker for where we want to return the results
* @param objectName name of the object (optional)
* @param methodName name of the method (optional)
* @param args arguments and possibly more (could be whole message encoded)
* @param params params, usually request parameters
* @return new method call object returned.
*/
default MethodCall<Object> createMethodCallToBeParsedFromBody(String address,
String returnAddress,
String objectName,
String methodName,
Object args,
MultiMap<String, String> params) {
throw new UnsupportedOperationException();
}
/**
* Create a method call based on a body that we are parsing from a POST body or WebSocket message for example.
*
* @param address address of method (this can override what is in the body)
* @param returnAddress return address, which is a moniker for where we want to return the results
* @param args arguments and possibly more (could be whole message encoded)
* @param params params, usually request parameters
* @return new method call object returned.
*/
default MethodCall<Object> createMethodCallByAddress(String address,
String returnAddress,
Object args,
MultiMap<String, String> params) {
throw new UnsupportedOperationException();
}
/**
* Create a method call based on a body that we are parsing from a POST body or WebSocket message for example.
*
* @param objectName name of the object (optional)
* @param methodName name of the method (optional)
* @param returnAddress return address, which is a moniker for where we want to return the results
* @param args arguments and possibly more (could be whole message encoded)
* @param params params, usually request parameters
* @return new method call object returned.
*/
default MethodCall<Object> createMethodCallByNames(
String methodName, String objectName, String returnAddress, Object args,
MultiMap<String, String> params) {
throw new UnsupportedOperationException();
}
default ServiceBundle createServiceBundle(String address,
final QueueBuilder requestQueueBuilder,
final QueueBuilder responseQueueBuilder,
final QueueBuilder webResponseQueueBuilder,
final Factory factory, final boolean asyncCalls,
final BeforeMethodCall beforeMethodCall,
final BeforeMethodCall beforeMethodCallAfterTransform,
final Transformer<Request, Object> argTransformer,
boolean invokeDynamic,
final QBitSystemManager systemManager,
final HealthServiceAsync healthService,
final StatsCollector statsCollector,
final Timer timer,
final int statsFlushRateSeconds,
final int checkTimingEveryXCalls,
final CallbackManager callbackManager) {
throw new UnsupportedOperationException();
}
default ServiceMethodHandler createServiceMethodHandler(boolean invokeDynamic) {
throw new UnsupportedOperationException();
}
default ServiceQueue createService(String rootAddress, String serviceAddress,
Object object,
Queue<Response<Object>> responseQueue,
final QueueBuilder requestQueueBuilder,
final QueueBuilder responseQueueBuilder,
boolean asyncCalls,
boolean invokeDynamic,
boolean handleCallbacks,
final QBitSystemManager systemManager) {
throw new UnsupportedOperationException();
}
/**
* Create a client
*
* @param rootAddress base URI
* @param serviceAddress client address URI
* @param object object that implements the client
* @param responseQueue the response queue.
* @param systemManager system manager
* @return new Service that was created
*/
default ServiceQueue createService(String rootAddress, String serviceAddress,
Object object,
Queue<Response<Object>> responseQueue,
final QBitSystemManager systemManager) {
throw new UnsupportedOperationException();
}
/**
* Create an encoder.
*
* @return encoder.
*/
default ProtocolEncoder createEncoder() {
throw new UnsupportedOperationException();
}
/**
* Create a local client proxy
*
* @param serviceInterface client interface to client
* @param serviceName name of the client that we are proxying method calls to.
* @param serviceBundle name of client bundle
* @param <T> type of proxy
* @return new proxy object
*/
default <T> T createLocalProxy(Class<T> serviceInterface, String serviceName, ServiceBundle serviceBundle) {
throw new UnsupportedOperationException();
}
/**
* Create a remote proxy using a sender that knows how to forwardEvent method body over wire
*
* @param serviceInterface client view of client
* @param uri uri of client
* @param serviceName name of the client that we are proxying method calls to.
* @param port port
* @param host host
* @param connected connected
* @param returnAddressArg return address
* @param sender how we are sending the message over the wire
* @param beforeMethodCall before method call
* @param <T> type of client
* @param requestBatchSize request batch size
* @return remote proxy
*/
default <T> T createRemoteProxyWithReturnAddress(Class<T> serviceInterface, String uri, String serviceName,
String host,
int port,
AtomicBoolean connected,
String returnAddressArg,
Sender<String> sender,
BeforeMethodCall beforeMethodCall,
int requestBatchSize) {
throw new UnsupportedOperationException();
}
/**
* Parses a method call using an address prefix and a body.
* Useful for Websocket calls and POST calls (if you don't care about request params).
*
* @param addressPrefix prefix of the address
* @param message message that we are sending
* @param originatingRequest the request that caused this method to be created
* @return method call that we just created
*/
default MethodCall<Object> createMethodCallToBeParsedFromBody(String addressPrefix,
Object message,
Request<Object> originatingRequest) {
throw new UnsupportedOperationException();
}
default List<MethodCall<Object>> createMethodCallListToBeParsedFromBody(
String addressPrefix,
Object body,
Request<Object> originatingRequest) {
throw new UnsupportedOperationException();
}
/**
* Request request
*
* @param request incoming request that we want to createWithWorkers a MethodCall from.
* @param args args
* @return request
*/
default MethodCall<Object> createMethodCallFromHttpRequest(
Request<Object> request, Object args) {
throw new UnsupportedOperationException();
}
/**
* Creates a JSON Mapper.
*
* @return json mapper
*/
default JsonMapper createJsonMapper() {
throw new UnsupportedOperationException();
}
default HttpClient createHttpClient(
String host,
int port,
int timeOutInMilliseconds,
int poolSize,
boolean autoFlush,
int flushRate,
boolean keepAlive,
boolean pipeline,
boolean ssl,
boolean verifyHost,
boolean trustAll,
int maxWebSocketFrameSize,
boolean tryUseCompression,
String trustStorePath,
String trustStorePassword,
boolean tcpNoDelay,
int soLinger
) {
return FactorySPI.getHttpClientFactory().create(
host,
port,
timeOutInMilliseconds,
poolSize,
autoFlush,
flushRate,
keepAlive,
pipeline,
ssl,
verifyHost,
trustAll,
maxWebSocketFrameSize,
tryUseCompression,
trustStorePath,
trustStorePassword,
tcpNoDelay,
soLinger
);
}
default EventManager systemEventManager() {
throw new IllegalStateException("Not implemented");
}
default EventManager createEventManager(final String name, final EventConnector eventConnector,
final StatsCollector statsCollector) {
return FactorySPI.getEventManagerFactory().createEventManager(name, eventConnector, statsCollector);
}
default ServiceEndpointServer createServiceServer(final HttpTransport httpServer,
final ProtocolEncoder encoder,
final ProtocolParser protocolParser,
final ServiceBundle serviceBundle,
final JsonMapper jsonMapper,
final int timeOutInSeconds,
final int numberOfOutstandingRequests,
final int batchSize,
final int flushInterval,
final QBitSystemManager systemManager,
final String endpointName,
final ServiceDiscovery serviceDiscovery,
final int port,
final int ttlSeconds,
final HealthServiceAsync healthServiceAsync
) {
throw new UnsupportedOperationException();
}
default Client createClient(String uri, HttpClient httpClient, int requestBatchSize) {
throw new UnsupportedOperationException();
}
default ProtocolParser createProtocolParser() {
throw new UnsupportedOperationException();
}
default EventManager eventManagerProxy() {
return null;
}
default void clearEventManagerProxy() {
}
default EventBusProxyCreator eventBusProxyCreator() {
throw new UnsupportedOperationException();
}
default void shutdownSystemEventBus() {
}
default HttpServer createHttpServer(HttpServerOptions options,
String endpointName,
QBitSystemManager systemManager,
ServiceDiscovery serviceDiscovery,
HealthServiceAsync healthServiceAsync,
final int serviceDiscoveryTtl,
final TimeUnit serviceDiscoveryTtlTimeUnit,
final CopyOnWriteArrayList<HttpResponseDecorator> decorators,
final HttpResponseCreator httpResponseCreator) {
return FactorySPI.getHttpServerFactory().create(options, endpointName,
systemManager, serviceDiscovery, healthServiceAsync,
serviceDiscoveryTtl, serviceDiscoveryTtlTimeUnit, decorators, httpResponseCreator);
}
}
| |
/*
* The Alluxio Open Foundation licenses this work under the Apache License, version 2.0
* (the "License"). You may not use this work except in compliance with the License, which is
* available at www.apache.org/licenses/LICENSE-2.0
*
* This software is distributed on an "AS IS" basis, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied, as more fully set forth in the License.
*
* See the NOTICE file distributed with this work for information regarding copyright ownership.
*/
package alluxio.worker.netty;
import alluxio.Configuration;
import alluxio.PropertyKey;
import alluxio.exception.status.AlluxioStatusException;
import alluxio.exception.status.InternalException;
import alluxio.exception.status.InvalidArgumentException;
import alluxio.network.protocol.RPCMessage;
import alluxio.network.protocol.RPCProtoMessage;
import alluxio.network.protocol.databuffer.DataBuffer;
import alluxio.proto.dataserver.Protocol;
import alluxio.resource.LockResource;
import alluxio.util.network.NettyUtils;
import com.google.common.base.Preconditions;
import io.netty.buffer.ByteBuf;
import io.netty.buffer.Unpooled;
import io.netty.channel.Channel;
import io.netty.channel.ChannelFutureListener;
import io.netty.channel.ChannelHandlerContext;
import io.netty.channel.ChannelInboundHandlerAdapter;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.Closeable;
import java.io.IOException;
import java.util.LinkedList;
import java.util.Queue;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.locks.ReentrantLock;
import javax.annotation.concurrent.GuardedBy;
import javax.annotation.concurrent.NotThreadSafe;
/**
* This class handles {@link alluxio.proto.dataserver.Protocol.WriteRequest}s.
*
* Protocol: Check {@link alluxio.client.block.stream.NettyPacketWriter} for more information.
* 1. The netty channel handler streams packets from the channel and buffers them. The netty
* reader is paused if the buffer is full by turning off the auto read, and is resumed when
* the buffer is not full.
* 2. The {@link PacketWriter} polls packets from the buffer and writes to the block worker. The
* writer becomes inactive if there is nothing on the buffer to free up the executor. It is
* resumed when the buffer becomes non-empty.
* 3. An EOF or CANCEL message signifies the completion of this request.
* 4. When an error occurs, the channel is closed.
*
* Threading model:
* Only two threads are involved at a given point of time: netty I/O thread, packet writer thread.
* 1. The netty I/O thread reads packets from the wire and pushes them to the buffer if there is
* no error seen so far. This packet reading can be ended by an EOF packet, a CANCEL packet or
* an exception. When one of these 3 happens, a special packet is pushed to the buffer.
* 2. The packet writer thread keeps polling packets from the buffer and processes them.
* NOTE: it is guaranteed that there is only one packet writer thread active at a given time.
*/
@NotThreadSafe
abstract class DataServerWriteHandler extends ChannelInboundHandlerAdapter {
private static final Logger LOG = LoggerFactory.getLogger(DataServerWriteHandler.class);
private static final int MAX_PACKETS_IN_FLIGHT =
Configuration.getInt(PropertyKey.WORKER_NETWORK_NETTY_WRITER_BUFFER_SIZE_PACKETS);
/** The executor service to run the {@link PacketWriter}s. */
private final ExecutorService mPacketWriterExecutor;
/**
* Special packets used to pass control information from the I/O thread to the packet writer
* thread.
* EOF: the end of file.
* CANCEL: the write request is cancelled by the client.
* ABORT: a non-recoverable error is detected, abort this channel.
*/
private static final ByteBuf EOF = Unpooled.buffer(0);
private static final ByteBuf CANCEL = Unpooled.buffer(0);
private static final ByteBuf ABORT = Unpooled.buffer(0);
private ReentrantLock mLock = new ReentrantLock();
/** The buffer for packets read from the channel. */
@GuardedBy("mLock")
private Queue<ByteBuf> mPackets = new LinkedList<>();
/**
* Set to true if the packet writer is active.
*
* The following invariants (happens-before orders) must be maintained:
* 1. When mPacketWriterActive is true, it is guaranteed that mPackets is polled at least
* once after the lock is released. This is guaranteed even when there is an exception
* thrown when writing the packet.
* 2. When mPacketWriterActive is false, it is guaranteed that mPackets won't be polled before
* before someone sets it to true again.
*
* The above are achieved by protecting it with "mLock". It is set to true when a new packet
* is read when it is false. It set to false when one of the these is true: 1) The mPackets queue
* is empty; 2) The write request is fulfilled (eof or cancel is received); 3) A failure occurs.
*/
@GuardedBy("mLock")
private boolean mPacketWriterActive;
/**
* The error seen in either the netty I/O thread (e.g. failed to read from the network) or the
* packet writer thread (e.g. failed to write the packet).
*/
@GuardedBy("mLock")
private Error mError;
private class Error {
final AlluxioStatusException mCause;
final boolean mNotifyClient;
Error(AlluxioStatusException cause, boolean notifyClient) {
mCause = cause;
mNotifyClient = notifyClient;
}
}
/**
* mRequest is initialized only once for a whole file or block in
* {@link DataServerReadHandler#channelRead(ChannelHandlerContext, Object)}.
* After that, it should only be used by the packet writer thread.
* It is safe to read those final primitive fields (e.g. mId, mSessionId) if mError is not set
* from any thread (not such usage in the code now). It is destroyed when the write request is
* done (complete or cancel) or an error is seen.
*/
protected volatile WriteRequestInternal mRequest;
abstract class WriteRequestInternal implements Closeable {
/** This ID can either be block ID or temp UFS file ID. */
final long mId;
final long mSessionId;
WriteRequestInternal(long id, long sessionId) {
mId = id;
mSessionId = sessionId;
}
/**
* Cancels the request.
*/
abstract void cancel() throws IOException;
}
/**
* The next pos to queue to the buffer. This is only updated and used by the netty I/O thread.
*/
private long mPosToQueue;
/**
* The next pos to write to the block worker. This is only updated by the packet writer
* thread. The netty I/O reads this only for sanity check during initialization.
*/
protected volatile long mPosToWrite;
/**
* Creates an instance of {@link DataServerWriteHandler}.
*
* @param executorService the executor service to run {@link PacketWriter}s
*/
DataServerWriteHandler(ExecutorService executorService) {
mPacketWriterExecutor = executorService;
}
@Override
public void channelRead(ChannelHandlerContext ctx, Object object) throws Exception {
if (!acceptMessage(object)) {
ctx.fireChannelRead(object);
return;
}
RPCProtoMessage msg = (RPCProtoMessage) object;
Protocol.WriteRequest writeRequest = msg.getMessage().asWriteRequest();
// Only initialize (open the readers) if this is the first packet in the block/file.
if (writeRequest.getOffset() == 0) {
initializeRequest(msg);
}
// Validate msg.
try {
validateRequest(msg);
} catch (Exception e) {
pushAbortPacket(ctx.channel(), new Error(AlluxioStatusException.from(e), true));
return;
}
try (LockResource lr = new LockResource(mLock)) {
// If we have seen an error, return early and release the data. This can only
// happen for those mis-behaving clients who first sends some invalid requests, then
// then some random data. It can leak memory if we do not release buffers here.
if (mError != null) {
if (msg.getPayloadDataBuffer() != null) {
msg.getPayloadDataBuffer().release();
}
return;
}
ByteBuf buf;
if (writeRequest.getEof()) {
buf = EOF;
} else if (writeRequest.getCancel()) {
buf = CANCEL;
} else {
DataBuffer dataBuffer = msg.getPayloadDataBuffer();
Preconditions.checkState(dataBuffer != null && dataBuffer.getLength() > 0);
assert dataBuffer.getNettyOutput() instanceof ByteBuf;
buf = (ByteBuf) dataBuffer.getNettyOutput();
mPosToQueue += buf.readableBytes();
}
if (!mPacketWriterActive) {
mPacketWriterActive = true;
mPacketWriterExecutor.submit(new PacketWriter(ctx.channel()));
}
mPackets.offer(buf);
if (tooManyPacketsInFlight()) {
NettyUtils.disableAutoRead(ctx.channel());
}
}
}
@Override
public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) {
LOG.error("Failed to write block.", cause);
pushAbortPacket(ctx.channel(), new Error(AlluxioStatusException.from(cause), true));
}
@Override
public void channelUnregistered(ChannelHandlerContext ctx) {
pushAbortPacket(ctx.channel(), new Error(new InternalException("channel unregistered"), false));
ctx.fireChannelUnregistered();
}
/**
* @return true if there are too many packets in flight
*/
@GuardedBy("mLock")
private boolean tooManyPacketsInFlight() {
return mPackets.size() >= MAX_PACKETS_IN_FLIGHT;
}
/**
* Validates the block write request, throwing an exception if the request is invalid.
*
* @param msg the block write request
*/
private void validateRequest(RPCProtoMessage msg) {
Protocol.WriteRequest request = msg.getMessage().asWriteRequest();
if (request.getOffset() != mPosToQueue) {
throw new InvalidArgumentException(String.format(
"Offsets do not match [received: %d, expected: %d].", request.getOffset(), mPosToQueue));
}
if (msg.getPayloadDataBuffer() != null && msg.getPayloadDataBuffer().getLength() > 0 && (
request.getCancel() || request.getEof())) {
throw new InvalidArgumentException("Found data in a cancel/eof message.");
}
}
/**
* A runnable that polls from the packets queue and writes to the block worker.
*/
private final class PacketWriter implements Runnable {
private Channel mChannel;
/**
* Creates an instance of {@link PacketWriter}.
*
* @param channel the netty channel
*/
PacketWriter(Channel channel) {
mChannel = channel;
}
@Override
public void run() {
try {
runInternal();
} catch (Throwable e) {
// This should never happen.
LOG.error("Failed to run PacketWriter.", e);
throw e;
}
}
private void runInternal() {
boolean eof;
boolean cancel;
boolean abort;
while (true) {
ByteBuf buf;
try (LockResource lr = new LockResource(mLock)) {
buf = mPackets.poll();
if (buf == null || buf == EOF || buf == CANCEL || buf == ABORT) {
eof = buf == EOF;
cancel = buf == CANCEL;
// mError is checked here so that we can override EOF and CANCEL if error happens
// after we receive EOF or CANCEL signal.
// TODO(peis): Move to the pattern used in DataServerReadHandler to avoid
// using special packets.
abort = mError != null;
mPacketWriterActive = false;
break;
}
// Release all the packets if we have encountered an error. We guarantee that no more
// packets should be queued after we have received one of the done signals (EOF, CANCEL
// or ABORT).
if (mError != null) {
release(buf);
continue;
}
if (!tooManyPacketsInFlight()) {
NettyUtils.enableAutoRead(mChannel);
}
}
try {
mPosToWrite += buf.readableBytes();
incrementMetrics(buf.readableBytes());
writeBuf(buf, mPosToWrite);
} catch (Exception e) {
LOG.warn("Failed to write packet {}", e.getMessage());
pushAbortPacket(mChannel, new Error(AlluxioStatusException.from(e), true));
} finally {
release(buf);
}
}
if (abort) {
try {
cancel();
} catch (IOException e) {
LOG.warn("Failed to abort, cancel or complete the write request with error {}.",
e.getMessage());
}
replyError();
} else if (cancel || eof) {
try {
if (cancel) {
cancel();
replyCancel();
} else {
complete();
replySuccess();
}
} catch (Exception e) {
pushAbortPacket(mChannel, new Error(AlluxioStatusException.from(e), true));
}
}
}
/**
* Completes this write.
*/
private void complete() throws IOException {
if (mRequest != null) {
mRequest.close();
mRequest = null;
}
mPosToWrite = 0;
}
/**
* Cancels this write.
*/
private void cancel() throws IOException {
if (mRequest != null) {
mRequest.cancel();
mRequest = null;
}
mPosToWrite = 0;
}
/**
* Writes a response to signify the success of the write request.
*/
private void replySuccess() {
NettyUtils.enableAutoRead(mChannel);
mChannel.writeAndFlush(RPCProtoMessage.createOkResponse(null))
.addListeners(ChannelFutureListener.CLOSE_ON_FAILURE);
}
/**
* Writes a response to signify the successful cancellation of the write request.
*/
private void replyCancel() {
NettyUtils.enableAutoRead(mChannel);
mChannel.writeAndFlush(RPCProtoMessage.createCancelResponse())
.addListeners(ChannelFutureListener.CLOSE_ON_FAILURE);
}
/**
* Writes an error response to the channel and closes the channel after that.
*/
private void replyError() {
Error error;
try (LockResource lr = new LockResource(mLock)) {
error = Preconditions.checkNotNull(mError);
}
if (error.mNotifyClient) {
mChannel.writeAndFlush(RPCProtoMessage.createResponse(error.mCause))
.addListener(ChannelFutureListener.CLOSE);
}
}
}
/**
* Pushes {@link DataServerWriteHandler#ABORT} to the buffer if there has been no error so far.
*
* @param channel the channel
* @param error the error
*/
private void pushAbortPacket(Channel channel, Error error) {
try (LockResource lr = new LockResource(mLock)) {
if (mError != null) {
return;
}
mError = error;
mPackets.offer(ABORT);
if (!mPacketWriterActive) {
mPacketWriterActive = true;
mPacketWriterExecutor.submit(new PacketWriter(channel));
}
}
}
/**
* Releases a {@link ByteBuf}.
*
* @param buf the netty byte buffer
*/
private static void release(ByteBuf buf) {
if (buf != null && buf != EOF && buf != CANCEL && buf != ABORT) {
buf.release();
}
}
/**
* Checks whether this object should be processed by this handler.
*
* @param object the object
* @return true if this object should be processed
*/
protected boolean acceptMessage(Object object) {
if (!(object instanceof RPCProtoMessage)) {
return false;
}
RPCProtoMessage message = (RPCProtoMessage) object;
return message.getType() == RPCMessage.Type.RPC_WRITE_REQUEST;
}
/**
* Initializes the handler if necessary.
*
* @param msg the block write request
*/
protected void initializeRequest(RPCProtoMessage msg) throws Exception {
Preconditions.checkState(mRequest == null);
mPosToQueue = 0;
Preconditions.checkState(mPosToWrite == 0);
}
/**
* Writes the buffer.
*
* @param buf the buffer
* @param pos the pos
*/
protected abstract void writeBuf(ByteBuf buf, long pos) throws Exception;
/**
* @param bytesWritten bytes written
*/
protected abstract void incrementMetrics(long bytesWritten);
}
| |
/*******************************************************************************
* Copyright (c) 2007 University of Illinois at Urbana-Champaign and others.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* UIUC - Initial API and implementation
*******************************************************************************/
package fr.inria.verveine.extractor.fortran.parser.ast;
import java.util.LinkedList;
import java.util.List;
import java.util.Set;
import java.util.regex.Pattern;
import org.antlr.runtime.CharStream;
import org.antlr.runtime.Token;
import fortran.ofp.parser.java.FortranLexer;
import fortran.ofp.parser.java.FortranToken;
/**
* Tokens are returned by the lexical analyzer and serve as leaf nodes in the AST.
* <p>
* This is the implementation of {@link IToken} used by the Fortran parser.
*/
public class ASTToken implements Token, IASTNode
{
protected FortranToken actualToken = null;
/**
* Whitespace and whitetext appearing before this token that should be associated with this token
*/
protected String whiteBefore = null;
/**
* Whitespace and whitetext appearing after this token that should be associated with this token, not the next
*/
protected String whiteAfter = ""; //$NON-NLS-1$
/**
* Utility static method to create an instance from a FortranToken<br>
* Will return <code>null</code> if the argument is <code>null</code>
*/
public static ASTToken with(Token tok) {
if (tok != null) {
if (tok.getType() == FortranLexer.EOF) {
tok = new FortranToken(tok) ;
}
return new ASTToken( (FortranToken) tok);
}
else {
return null;
}
}
public ASTToken(FortranToken tok) {
actualToken = tok;
}
/**
* Returns whitespace and whitetext appearing before this token that should be associated with this token<BR>
* Get it, either from value set before {@link #setWhiteBefore(String)} or from the whiteText associated
* to the underlying OFP token
*/
public String getWhiteBefore() {
return (whiteBefore == null) ? actualToken.getWhiteText() : whiteBefore;
}
/**
* Sets whitespace and whitetext appearing before this token that should be associated with this token
*/
public void setWhiteBefore(String value) {
whiteBefore = (value == null ? "" : value);
}
/**
* Returns whitespace and whitetext appearing after this token that should be associated with this token, not the next
*/
public String getWhiteAfter() { return whiteAfter; }
/**
* Sets whitespace and whitetext appearing after this token that should be associated with this token, not the next
*/
public void setWhiteAfter(String value) { whiteAfter = value == null ? "" : value; } //$NON-NLS-1$
/**
* @param commentPattern
* @return
*/
private List<ASTToken> getPrecedingComments(Pattern commentPattern)
{
/* String whitetext = getWhiteBefore();
Matcher m = commentPattern.matcher(whitetext);
int startStreamOffset = getStreamOffset() - whitetext.length();
int startFileOffset = getFileOffset() - whitetext.length();
*/
List<ASTToken> result = new LinkedList<ASTToken>();
/*
for (int startSearchFrom = 0; m.find(startSearchFrom); startSearchFrom = m.end())
{
ASTToken token = new ASTToken(this);
token.setTerminal(Terminal.SKIP);
String prefix = m.group(1);
String directive = m.group(2).trim();
String suffix = directive.length() >= m.group(2).length() ? "" : m.group(2).substring(directive.length()); //$NON-NLS-1$
token.setWhiteBefore(prefix);
token.setText(directive);
token.setWhiteAfter(suffix);
token.setStreamOffset(startStreamOffset + m.start());
token.setFileOffset(startFileOffset + m.start());
token.setLength(prefix.length() + directive.length());
token.setParent(null);
token.setLine(token.getLine() - countNewlines(whitetext.substring(m.start())));
token.setCol(1);
result.add(token);
}
*/
return result;
}
public FortranToken getActualToken() {
return actualToken;
}
///////////////////////////////////////////////////////////////////////////
// IASTNode Implementation
///////////////////////////////////////////////////////////////////////////
private IASTNode parent = null;
public IASTNode getParent()
{
return parent;
}
public void setParent(IASTNode parent)
{
this.parent = parent;
}
public void accept(IASTVisitor visitor)
{
visitor.visitToken(this);
}
public <T extends IASTNode> Set<T> findAll(Class<T> targetClass)
{
return ASTNodeUtil.findAll(this, targetClass);
}
public <T extends IASTNode> T findFirst(Class<T> targetClass)
{
return ASTNodeUtil.findFirst(this, targetClass);
}
public <T extends IASTNode> T findLast(Class<T> targetClass)
{
return ASTNodeUtil.findLast(this, targetClass);
}
public ASTToken findFirstToken()
{
return this;
}
public ASTToken findLastToken()
{
return this;
}
public <T extends IASTNode> T findNearestAncestor(Class<T> targetClass)
{
return ASTNodeUtil.findNearestAncestor(this, targetClass);
}
public Iterable<? extends IASTNode> getChildren()
{
return new LinkedList<IASTNode>();
}
public boolean isFirstChildInList()
{
return ASTNodeUtil.isFirstChildInList(this);
}
public void replaceChild(IASTNode node, IASTNode withNode)
{
throw new UnsupportedOperationException();
}
@Override
public String fullyQualifiedName() {
return getParent().fullyQualifiedName()+".<"+actualToken.getText()+">";
}
@Override
public String getText() { return actualToken.getText(); }
@Override
public void setText(String text) {
actualToken.setText(text);
}
@Override
public int getLine()
{
return actualToken.getLine();
}
@Override
public void setLine(int line) {
actualToken.setLine(line);
}
@Override
public int getChannel() {
return actualToken.getChannel();
}
@Override
public int getCharPositionInLine() {
return actualToken.getCharPositionInLine();
}
@Override
public CharStream getInputStream() {
return actualToken.getInputStream();
}
@Override
public int getTokenIndex() {
return actualToken.getTokenIndex();
}
public int getStartIndex() {
return actualToken.getStartIndex();
}
public int getStopIndex() {
return actualToken.getStopIndex();
}
@Override
public int getType() {
return actualToken.getType();
}
@Override
public void setChannel(int channel) {
actualToken.setChannel(channel);
}
@Override
public void setCharPositionInLine(int pos) {
actualToken.setCharPositionInLine(pos);
}
@Override
public void setInputStream(CharStream input) {
actualToken.setInputStream(input);
}
@Override
public void setTokenIndex(int index) {
actualToken.setTokenIndex(index);
}
@Override
public void setType(int type) {
actualToken.setType(type);
}
@Override
public String toString() {
return actualToken.toString();
}
@Override
public boolean isNullNode() {
return false;
}
@Override
public boolean isTopLevelNode() {
return false;
}
}
| |
/*
* Copyright (c) 2011-2015 EPFL DATA Laboratory
* Copyright (c) 2014-2015 The Squall Collaboration (see NOTICE)
*
* All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ch.epfl.data.squall.ewh.storm_components;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.PriorityQueue;
import java.util.Random;
import java.util.TreeMap;
import org.apache.log4j.Logger;
import backtype.storm.Config;
import backtype.storm.task.OutputCollector;
import backtype.storm.task.TopologyContext;
import backtype.storm.topology.InputDeclarer;
import backtype.storm.topology.OutputFieldsDeclarer;
import backtype.storm.topology.TopologyBuilder;
import backtype.storm.topology.base.BaseRichBolt;
import backtype.storm.tuple.Fields;
import backtype.storm.tuple.Tuple;
import backtype.storm.tuple.Values;
import ch.epfl.data.squall.ewh.data_structures.FixedSizePriorityQueue;
import ch.epfl.data.squall.ewh.data_structures.KeyPriorityProbability;
import ch.epfl.data.squall.predicates.ComparisonPredicate;
import ch.epfl.data.squall.storm_components.StormComponent;
import ch.epfl.data.squall.storm_components.StormEmitter;
import ch.epfl.data.squall.storm_components.synchronization.TopologyKiller;
import ch.epfl.data.squall.types.NumericType;
import ch.epfl.data.squall.utilities.MyUtilities;
import ch.epfl.data.squall.utilities.SystemParameters;
import ch.epfl.data.squall.utilities.SystemParameters.HistogramType;
public class S1ReservoirGenerator<JAT extends Number & Comparable<JAT>> extends
BaseRichBolt implements StormEmitter {
private static final long serialVersionUID = 1L;
private static Logger LOG = Logger.getLogger(S1ReservoirGenerator.class);
private StormEmitter _d2Combiner, _s1Source;
private final String _d2CombinerIndex, _s1SourceIndex, _componentIndex,
_partitionerIndex;
private final String _componentName;
private NumericType _wrapper;
private ComparisonPredicate _comparison;
private Map _conf;
private OutputCollector _collector;
private boolean _isEWHS1Histogram; // receives either d2 or d2equi from
// D2Combiner
private static final boolean _isSinglePassD2Equi = true; // Opt2: can be set
// to true only for
// _isEWHS1Histogram
// = true
// reduces number
// of accesses to
// d2 by computing
// d2 from d2_equi
// in a single pass
// if turned off,
// check
// "If Opt2 is not used"
private static final boolean _isSinglePassDebug = false; // can be set to
// true only if
// _isSinglePassD2Equi
// is true;
// checks whether
// the result is
// correct by using
// more space:
// don't use it in
// production (only
// for testing
// purposes)
private Map<JAT, Integer> _d2OldDebug = new HashMap<JAT, Integer>(); // key,
// multiplicity:
// only
// if
// _isSinglePassDebug
// =
// true
// private TreeMap<JAT, Integer> _d2Equi = new TreeMap<JAT, Integer>(); //
// key, multiplicity: used only when _isSinglePassD2Equi is set
private TreeMap<JAT, Integer> _d2BeginEnd = new TreeMap<JAT, Integer>(); // key,
// multiplicity:
// used
// only
// when
// _isSinglePassD2Equi
// is
// set
private int _band;
private int _hierarchyPosition;
private int _firstNumOfBuckets, _secondNumOfBuckets;
private int _numRemainingParents;
private long _computedTotalOutputs;
private TreeMap<JAT, Integer> _d2 = new TreeMap<JAT, Integer>(); // Opt1 is
// JAT
// instead
// of
// String
// key
// (key,
// multiplicity).
// If Opt2
// is not
// used,
// use
// HashMap
private Map<String, Integer> _r1 = new HashMap<String, Integer>(); // key,
// multiplicity
private PriorityQueue<KeyPriorityProbability> _reservoir;
private Random _rndGen = new Random();
public S1ReservoirGenerator(
StormEmitter d2Reduce,
StormEmitter s1Source,
String componentName,
String partitionerName,
boolean isEWHS1Histogram, // receives either d2 or d2equi from
// D2Combiner
NumericType<JAT> wrapper, ComparisonPredicate comparison,
int firstNumOfBuckets, int secondNumOfBuckets,
List<String> allCompNames, int hierarchyPosition,
TopologyBuilder builder, TopologyKiller killer, Config conf) {
_d2Combiner = d2Reduce;
_d2CombinerIndex = String.valueOf(allCompNames.indexOf(d2Reduce
.getName()));
_s1Source = s1Source;
_s1SourceIndex = String
.valueOf(allCompNames.indexOf(s1Source.getName()));
_partitionerIndex = String.valueOf(allCompNames
.indexOf(partitionerName));
_componentName = componentName;
_componentIndex = String.valueOf(allCompNames.indexOf(componentName));
_hierarchyPosition = hierarchyPosition;
_conf = conf;
_comparison = comparison;
_wrapper = wrapper;
_isEWHS1Histogram = isEWHS1Histogram;
_firstNumOfBuckets = firstNumOfBuckets;
_secondNumOfBuckets = secondNumOfBuckets;
_band = _comparison.getInclusiveDiff();
final int parallelism = SystemParameters.getInt(conf, componentName
+ "_PAR");
// connecting with previous level
InputDeclarer currentBolt = builder.setBolt(componentName, this,
parallelism);
if (!_isEWHS1Histogram) {
currentBolt = MyUtilities.attachEmitterHash(
SystemParameters.D2_TO_S1_STREAM, conf, null, currentBolt,
d2Reduce);
// acks from d2Reduce and all the data from s1Source
currentBolt = MyUtilities.attachEmitterHash(conf, null,
currentBolt, d2Reduce, s1Source);
} else {
HistogramType dstHistType = HistogramType.S1_RES_HIST;
// if this histogram exists, the other must exist as well
if (!SystemParameters.getBooleanIfExist(_conf,
HistogramType.D2_COMB_HIST.readConfEntryName())) {
throw new RuntimeException(
"If S1Reservoir Histogram is set, D2Combiner histogram must also be set in the config file!");
}
HistogramType srcHistType = HistogramType.D2_COMB_HIST;
currentBolt = MyUtilities.attachEmitterFilteredRangeMulticast(
SystemParameters.D2_TO_S1_STREAM, conf, comparison,
wrapper, dstHistType, srcHistType, d2Reduce.getName(),
currentBolt, d2Reduce);
// acks from d2Reduce
currentBolt = MyUtilities.attachEmitterHash(conf, null,
currentBolt, d2Reduce);
// all the data from s1Source
currentBolt = MyUtilities.attachEmitterRange(_conf, _wrapper,
dstHistType, currentBolt, s1Source);
}
if (MyUtilities.isAutoOutputSampleSize(conf)) {
// from Partitioner we receive the size of output sample
currentBolt = MyUtilities.attachEmitterBroadcast(
SystemParameters.FROM_PARTITIONER, currentBolt,
partitionerName);
}
// manually setting reservoir size
if (!MyUtilities.isAutoOutputSampleSize(conf)) {
int outputSampleSize = MyUtilities.computeManualSampleSize(conf,
_firstNumOfBuckets, _secondNumOfBuckets);
LOG.info("Manually setting outputSampleSize to " + outputSampleSize);
constructReservoir(outputSampleSize);
}
if (_hierarchyPosition == StormComponent.FINAL_COMPONENT) {
killer.registerComponent(this, componentName, parallelism);
}
}
private void constructReservoir(int outputSampleSize) {
_reservoir = new FixedSizePriorityQueue<KeyPriorityProbability>(
outputSampleSize,
new KeyPriorityProbability.KeyPriorityComparator());
}
private void processNonLastTuple(String inputComponentIndex,
String sourceStreamId, List<String> tuple) {
if (inputComponentIndex.equals(_d2CombinerIndex)) {
String strKey = tuple.get(0);
JAT key = (JAT) _wrapper.fromString(strKey);
String strMult = tuple.get(1);
int mult = Integer.parseInt(strMult);
if (!_isEWHS1Histogram) {
maxMultiplicity(_d2, key, mult);
} else {
// d2_equi arrives, without duplicates (across boundaries)
if (_isSinglePassD2Equi) {
// for now update only _d2Equi, and later build _d2 in a
// single pass using createD2OutOfD2Equi
// addMultiplicity(_d2Equi, key, mult);
JAT beginKey = (JAT) _wrapper.getOffset(key, -_band);
addMultiplicity(_d2BeginEnd, beginKey, mult);
JAT endKey = (JAT) _wrapper.getOffset(key, _band + 1);
addMultiplicity(_d2BeginEnd, endKey, -mult);
if (_isSinglePassDebug) {
addMultiplicityJoin(_d2OldDebug, key, mult);
}
} else {
addMultiplicityJoin(_d2, key, mult);
}
}
} else if (inputComponentIndex.equals(_s1SourceIndex)) {
String strKey = tuple.get(0);
addMultiplicity(_r1, strKey, 1);
} else if (inputComponentIndex.equals(_partitionerIndex)) {
String strSampleSize = tuple.get(1);
int outputSampleSize = Integer.parseInt(strSampleSize);
constructReservoir(outputSampleSize);
// sending output sample size information to S1Merge
List<Integer> hashIndexes = new ArrayList<Integer>(Arrays.asList(0)); // does
// not
// matter
tupleSend(SystemParameters.RESERVOIR_TO_MERGE, tuple, hashIndexes);
} else {
throw new RuntimeException("In S1ReduceBolt, unrecognized source "
+ inputComponentIndex);
}
}
private <T> void addMultiplicity(Map<T, Integer> stats, T key, int mult) {
if (stats.containsKey(key)) {
mult += stats.get(key);
}
stats.put(key, mult);
}
private void addMultiplicityJoin(Map<JAT, Integer> stats, JAT key, int mult) {
List<JAT> joinableKeys = _comparison.getJoinableKeys(key);
for (JAT joinableKey : joinableKeys) {
// String joinableStrKey = MyUtilities.toSpecialString(joinableKey,
// _wrapper); not needed anymore
addMultiplicity(stats, joinableKey, mult);
}
}
// at D2Combiner, one machine which is assigned that very key will get all
// the joinable set from R2 (R2->D2Combiner uses RangeMulticast)
private void maxMultiplicity(Map<JAT, Integer> stats, JAT key, int mult) {
if (stats.containsKey(key)) {
if (mult > stats.get(key)) {
stats.put(key, mult);
}
} else {
stats.put(key, mult);
}
}
private void finalizeProcessing() {
LOG.info("Received data from from all D2Combiner/R1 bolts. About to start creating reservoir...");
if (_isEWHS1Histogram && _isSinglePassD2Equi) {
// we compute _d2 out of _d2Equi
createD2OutOfD2Equi();
}
if (SystemParameters.getBooleanIfExist(_conf, "D2_DEBUG_MODE")) {
// this is just for local debugging
LOG.info("d2 in S1ReservoirGenerator is " + _d2);
}
// creating reservoir
if (!_d2.isEmpty()) {
// no need to iterate over _r1 if _d2 is empty: _d2 could be empty
// as equi-depth on R1 and on R2 can vastly differ (see Bicd)
int r2RelationSize = getR2RelationSize();
for (Map.Entry<String, Integer> entry : _r1.entrySet()) {
String r1StrKey = entry.getKey();
JAT r1Key = (JAT) _wrapper.fromString(r1StrKey);
int r1Mult = entry.getValue();
int d2Mult;
if ((d2Mult = getMultiplicity(_d2, r1Key)) > 0) {
// there is no joinable tuple from d2, so the weight and
// priority is 0
double d2KeyProbability = ((double) d2Mult)
/ r2RelationSize;
double power = 1.0 / d2Mult;
for (int i = 0; i < r1Mult; i++) {
// there may be multiple rows from R1 with the same key
// for each of them a separate random generator
// invocation
double priority = Math.pow(_rndGen.nextDouble(), power);
KeyPriorityProbability keyPriority = new KeyPriorityProbability(
r1StrKey, priority, d2KeyProbability);
_reservoir.add(keyPriority); // add method is changed in
// the implementation such
// that we do not violate
// capacity constraints
}
// totalOutputs update
_computedTotalOutputs += ((long) r1Mult) * d2Mult;
}
}
}
// sending reservoir
KeyPriorityProbability kp;
LOG.info("I am sending reservoir of size " + _reservoir.size()
+ " (should be (at most) outputSampleSize large).");
while ((kp = _reservoir.poll()) != null) {
String strKey = kp.getKey();
double priority = kp.getPriority();
String strPriority = String.valueOf(priority);
double d2KeyProbability = kp.getD2KeyProbability();
String strD2KeyProbability = String.valueOf(d2KeyProbability);
List<String> tuple = new ArrayList<String>(Arrays.asList(strKey,
strPriority, strD2KeyProbability));
List<Integer> hashIndexes = new ArrayList<Integer>(Arrays.asList(0));
tupleSend(SystemParameters.RESERVOIR_TO_MERGE, tuple, hashIndexes);
if (SystemParameters.getBooleanIfExist(_conf, "DEBUG_MODE")) {
// this is just for local debugging
LOG.info("Reservoir element in S1ReservoirGenerator is "
+ tuple);
}
}
// sending total number of output tuples
Values totalOutpuSize = MyUtilities.createTotalOutputSizeTuple(
_componentIndex, _computedTotalOutputs);
_collector.emit(SystemParameters.PARTITIONER, totalOutpuSize);
LOG.info("Reservoir creation and sending is completed.");
}
// assumes TreeMap where non-mentioned elements have value of their closest
// left neighbor
private int getMultiplicity(TreeMap<JAT, Integer> d2, JAT r1Key) {
Entry<JAT, Integer> repositionedEntry = d2.floorEntry(r1Key);
if (repositionedEntry == null) {
return 0;
} else {
return repositionedEntry.getValue();
}
}
/*
* //If Opt2 is not used private int getMultiplicity(TreeMap<JAT, Integer>
* d2, JAT r1Key) { if(d2.containsKey(r1Key){ return d2.get(r1Key); }else{
* return 0; } }
*/
// Uses _d2Begin and _d2End (total number of accesses is 2 * #(d2Equi))
private void createD2OutOfD2Equi() {
if (!_d2BeginEnd.isEmpty()) {
int multInRange = 0;
// basically we are doing prefix sum
for (Map.Entry<JAT, Integer> entry : _d2BeginEnd.entrySet()) {
multInRange += entry.getValue(); // can be negative when exiting
// out of scope of a key
entry.setValue(multInRange); // updating in place: we won't need
// previous values anymore
}
_d2 = _d2BeginEnd;
if (_isSinglePassDebug) {
LOG.info("Don't use it in production: checking whether single pass computes d2 correctly...");
// checking if the result is as expected
StringBuilder sb = new StringBuilder();
if (!MyUtilities.isMapsEqual(_d2, _d2OldDebug, sb)) {
throw new RuntimeException(
"_d2 and _d2OldDebug are not equal! "
+ sb.toString());
}
}
}
}
/*
* Old version uses _d2Equi private void createD2OutOfD2Equi(){
* if(!_d2Equi.isEmpty()){ // _d2Equi could be empty as equi-depth on R1 and
* on R2 can vastly differ (see Bicd) JAT firstKey = _d2Equi.firstKey(); JAT
* lastKey = _d2Equi.lastKey(); int multInRange = 0; for(JAT currentKey =
* (JAT) _wrapper.getOffset(firstKey, -_band); currentKey.compareTo((JAT)
* _wrapper.getOffset(lastKey, _band + 1)) < 0; currentKey =
* (JAT)_wrapper.minIncrement(currentKey)){
*
* // entering into range // check if there is a d2Equi 'band' positions to
* the right JAT enteringKey = (JAT) _wrapper.getOffset(currentKey, _band);
* if (_d2Equi.containsKey(enteringKey)){ multInRange +=
* _d2Equi.get(enteringKey); } //exiting from range // check if there is a
* d2Equi 'band + 1' positions to the left JAT exitingKey = (JAT)
* _wrapper.getOffset(currentKey, -(_band + 1));
* if(_d2Equi.containsKey(exitingKey)){ multInRange -=
* _d2Equi.get(exitingKey); }
*
* if(multInRange > 0){ //writing this into data structure
* _d2.put(currentKey, multInRange); }else{ //let's move to the first next
* non-zero position // +1 because we will increment at the end of this
* iteration // we can use method higherKey because all the keys are unique
* in _d2Equi JAT nextPositivePos = (JAT)
* _wrapper.getOffset(_d2Equi.higherKey(currentKey), -(_band + 1));
* if(currentKey.compareTo(nextPositivePos) < 0){ currentKey =
* nextPositivePos; } } }
*
* if(_isSinglePassDebug){ LOG.info(
* "Don't use it in production: checking whether single pass computes d2 correctly..."
* ); // checking if the result is as expected StringBuilder sb = new
* StringBuilder(); if(!MyUtilities.isMapsEqual(_d2, _d2OldDebug, sb)){
* throw new RuntimeException("_d2 and _d2OldDebug are not equal! " +
* sb.toString()); } } } }
*/
private int getR2RelationSize() {
if (SystemParameters.getBoolean(_conf, "IS_FIRST_D2")) {
return SystemParameters.getInt(_conf, "FIRST_REL_SIZE");
} else {
return SystemParameters.getInt(_conf, "SECOND_REL_SIZE");
}
}
// BaseRichSpout
@Override
public void prepare(Map map, TopologyContext tc, OutputCollector collector) {
_collector = collector;
_numRemainingParents = MyUtilities.getNumParentTasks(tc,
Arrays.asList(_d2Combiner, _s1Source));
if (MyUtilities.isAutoOutputSampleSize(_conf)) {
_numRemainingParents += 1; // partitioner parallelism is always 1
}
}
@Override
public void declareOutputFields(OutputFieldsDeclarer declarer) {
if (_hierarchyPosition == StormComponent.FINAL_COMPONENT) {
declarer.declareStream(SystemParameters.EOF_STREAM, new Fields(
SystemParameters.EOF));
} else {
final List<String> outputFields = new ArrayList<String>();
outputFields.add(StormComponent.COMP_INDEX);
outputFields.add(StormComponent.TUPLE); // list of string
outputFields.add(StormComponent.HASH);
declarer.declareStream(SystemParameters.DATA_STREAM, new Fields(
outputFields));
final List<String> outputFieldsMerge = new ArrayList<String>();
outputFieldsMerge.add(StormComponent.COMP_INDEX);
outputFieldsMerge.add(StormComponent.TUPLE); // list of string
outputFieldsMerge.add(StormComponent.HASH);
declarer.declareStream(SystemParameters.RESERVOIR_TO_MERGE,
new Fields(outputFieldsMerge));
final List<String> outputFieldsPart = new ArrayList<String>();
outputFieldsPart.add(StormComponent.COMP_INDEX);
outputFieldsPart.add(StormComponent.TUPLE); // list of string
outputFieldsPart.add(StormComponent.HASH);
declarer.declareStream(SystemParameters.PARTITIONER, new Fields(
outputFieldsPart));
}
}
// ----------- below you don't need to change --------------
// if true, we should exit from method which called this method
@Override
public void execute(Tuple stormTupleRcv) {
final String inputComponentIndex = stormTupleRcv
.getStringByField(StormComponent.COMP_INDEX); // getString(0);
final List<String> tuple = (List<String>) stormTupleRcv
.getValueByField(StormComponent.TUPLE); // getValue(1);
String sourceStreamId = stormTupleRcv.getSourceStreamId();
if (processFinalAck(tuple, stormTupleRcv))
return;
processNonLastTuple(inputComponentIndex, sourceStreamId, tuple);
_collector.ack(stormTupleRcv);
}
private void tupleSend(List<String> tuple, List<Integer> hashIndexes) {
final Values stormTupleSnd = MyUtilities.createTupleValues(tuple, 0,
_componentIndex, hashIndexes, null, _conf);
MyUtilities.sendTuple(stormTupleSnd, null, _collector, _conf);
}
private void tupleSend(String streamId, List<String> tuple,
List<Integer> hashIndexes) {
final Values stormTupleSnd = MyUtilities.createTupleValues(tuple, 0,
_componentIndex, hashIndexes, null, _conf);
MyUtilities.sendTuple(streamId, stormTupleSnd, null, _collector, _conf);
}
protected boolean processFinalAck(List<String> tuple, Tuple stormTupleRcv) {
if (MyUtilities.isFinalAck(tuple, _conf)) {
_numRemainingParents--;
if (_numRemainingParents == 0) {
finalizeProcessing();
}
if (_numRemainingParents < 0) {
throw new RuntimeException(
"Negative number of remaining parents: "
+ _numRemainingParents);
}
MyUtilities.processFinalAck(_numRemainingParents,
_hierarchyPosition, _conf, stormTupleRcv, _collector);
MyUtilities.processFinalAckCustomStream(
SystemParameters.PARTITIONER, _numRemainingParents,
StormComponent.INTERMEDIATE, _conf, stormTupleRcv,
_collector);
return true;
}
return false;
}
// from IRichBolt
@Override
public Map<String, Object> getComponentConfiguration() {
return _conf;
}
@Override
public String[] getEmitterIDs() {
return new String[] { _componentName };
}
@Override
public String getName() {
return _componentName;
}
@Override
public String getInfoID() {
throw new RuntimeException("Should not be here!");
}
}
| |
package akechi.projectl;
import android.accounts.Account;
import android.accounts.AccountManager;
import android.app.Application;
import android.content.Context;
import android.content.SharedPreferences;
import android.net.ConnectivityManager;
import android.net.NetworkInfo;
import android.support.v7.app.ActionBar;
import android.widget.TextView;
import com.deploygate.sdk.DeployGate;
import com.google.api.client.extensions.android.http.AndroidHttp;
import com.google.api.client.repackaged.com.google.common.base.Strings;
import com.google.common.base.Functions;
import com.google.common.base.Joiner;
import com.google.common.base.Optional;
import com.google.common.base.Predicate;
import com.google.common.base.Splitter;
import com.google.common.collect.Iterables;
import java.io.File;
import java.util.Arrays;
import java.util.Collections;
import akechi.projectl.async.InlineImageHandler;
import jp.michikusa.chitose.lingr.LingrClient;
import jp.michikusa.chitose.lingr.LingrClientFactory;
import static com.google.common.base.Preconditions.checkNotNull;
public class AppContext
extends Application
{
public static enum ActionBarMode
{
DEFAULT
{
@Override
public void applyActionBar(Context ctx, ActionBar bar)
{
bar.setDisplayShowHomeEnabled(true);
bar.setDisplayShowCustomEnabled(true);
bar.setDisplayUseLogoEnabled(true);
bar.setLogo(R.drawable.icon_logo);
bar.setIcon(R.drawable.icon_logo);
}
},
CURRENT_ROOM
{
@Override
public void applyActionBar(Context ctx, ActionBar bar)
{
bar.setDisplayShowHomeEnabled(true);
bar.setDisplayShowCustomEnabled(true);
bar.setDisplayUseLogoEnabled(true);
bar.setLogo(R.drawable.icon_logo);
bar.setIcon(R.drawable.icon_logo);
final AppContext appContext= (AppContext)ctx.getApplicationContext();
final Account account= appContext.getAccount();
if(Iterables.size(appContext.getAccounts()) <= 1)
{
bar.setTitle(appContext.getRoomId(account));
}
else
{
bar.setTitle(String.format("%s / %s", appContext.getRoomId(account), account.name));
}
}
},
HIDDEN
{
@Override
public void applyActionBar(Context ctx, ActionBar bar)
{
bar.hide();
}
},
;
public abstract void applyActionBar(Context ctx, ActionBar bar);
}
public static enum InlineImageMode
{
ALWAYS
{
@Override
public void doWork(TextView view)
{
new InlineImageHandler(view);
}
},
WIFI_ONLY
{
@Override
public void doWork(TextView view)
{
final ConnectivityManager connMan= (ConnectivityManager)view.getContext().getSystemService(Context.CONNECTIVITY_SERVICE);
final NetworkInfo netInfo= connMan.getActiveNetworkInfo();
if(netInfo == null)
{
return;
}
if((netInfo.getType() & ConnectivityManager.TYPE_WIFI) == ConnectivityManager.TYPE_WIFI)
{
ALWAYS.doWork(view);
}
}
},
NEVER
{
@Override
public void doWork(TextView view)
{
// do nothing
}
},
;
public abstract void doWork(TextView view);
}
public Iterable<Account> getAccounts()
{
final AccountManager manager= AccountManager.get(this);
final Account[] accounts= manager.getAccountsByType("com.lingr");
return Arrays.asList(accounts);
}
public Account getAccount()
{
final String name= this.accountName;
final Iterable<Account> accounts= this.getAccounts();
if(Iterables.isEmpty(accounts))
{
return null;
}
final Optional<Account> account= Iterables.tryFind(accounts, new Predicate<Account>() {
@Override
public boolean apply(Account input) {
return input.name.equals(name);
}
});
if(account.isPresent())
{
return account.get();
}
return Iterables.getFirst(accounts, null);
}
public void setAccount(Account account)
{
checkNotNull(account, "account is null");
this.accountName= account.name;
}
public String getRoomId(Account account)
{
if(account == null)
{
return null;
}
final AccountManager manager= AccountManager.get(this);
return manager.getUserData(account, "roomId");
}
public void setRoomId(Account account, CharSequence roomId)
{
checkNotNull(account, "account is null");
final AccountManager manager= AccountManager.get(this);
manager.setUserData(account, "roomId", roomId.toString());
}
public Iterable<String> getRoomIds(Account account)
{
checkNotNull(account);
final AccountManager manager= AccountManager.get(this);
final String ids= manager.getUserData(account, "roomIdList");
if(Strings.isNullOrEmpty(ids))
{
return Collections.emptyList();
}
return Splitter.on(',').split(ids);
}
public void setRoomIds(Account account, Iterable<? extends CharSequence> roomIds)
{
checkNotNull(account);
final Iterable<String> ids= Iterables.transform(roomIds, Functions.toStringFunction());
final String value= Joiner.on(',').join(ids);
final AccountManager manager= AccountManager.get(this);
manager.setUserData(account, "roomIdList", value);
}
public boolean isIconCacheEnabled()
{
final Boolean oldVar= this.iconCacheEnabled;
if(oldVar != null)
{
return oldVar;
}
final SharedPreferences prefs= this.getSharedPreferences("prefs", Context.MODE_PRIVATE);
final boolean value= prefs.getBoolean("iconCacheEnabled", true);
this.iconCacheEnabled= value;
return value;
}
public void setIconCacheEnabled(boolean value)
{
final SharedPreferences prefs= this.getSharedPreferences("prefs", Context.MODE_PRIVATE);
prefs.edit()
.putBoolean("iconCacheEnabled", value)
.commit()
;
this.iconCacheEnabled= value;
}
public File getIconCacheDir()
{
return new File(this.getCacheDir(), "icons");
}
public ActionBarMode getActionBarMode()
{
final ActionBarMode oldVar= this.actionBarMode;
if(oldVar != null)
{
return oldVar;
}
final SharedPreferences prefs= this.getSharedPreferences("prefs", Context.MODE_PRIVATE);
final ActionBarMode value= ActionBarMode.valueOf(prefs.getString("actionBarMode", ActionBarMode.DEFAULT.name()));
this.actionBarMode= value;
return value;
}
public void setActionBarMode(ActionBarMode value)
{
final SharedPreferences prefs= this.getSharedPreferences("prefs", Context.MODE_PRIVATE);
prefs.edit()
.putString("actionBarMode", value.name())
.commit()
;
this.actionBarMode= value;
}
public InlineImageMode getInlineImageMode()
{
final InlineImageMode oldVar= this.inlineImageMode;
if(oldVar != null)
{
return oldVar;
}
final SharedPreferences prefs= this.getSharedPreferences("prefs", Context.MODE_PRIVATE);
final InlineImageMode value= InlineImageMode.valueOf(prefs.getString("inlineImageMode", InlineImageMode.WIFI_ONLY.name()));
this.inlineImageMode= value;
return value;
}
public void setInlineImageMode(InlineImageMode value)
{
final SharedPreferences prefs= this.getSharedPreferences("prefs", Context.MODE_PRIVATE);
prefs.edit()
.putString("inlineImageMode", value.name())
.commit()
;
this.inlineImageMode= value;
}
public boolean isBackgroundServiceEnabled()
{
final Boolean oldVar= this.backgroundServiceEnabled;
if(oldVar != null)
{
return oldVar;
}
final SharedPreferences prefs= this.getSharedPreferences("prefs", Context.MODE_PRIVATE);
final boolean value= prefs.getBoolean("backgroundServiceEnabled", true);
this.backgroundServiceEnabled= value;
return value;
}
public void setBackgroundServiceEnabled(boolean value)
{
final SharedPreferences prefs= this.getSharedPreferences("prefs", Context.MODE_PRIVATE);
prefs.edit()
.putBoolean("backgroundServiceEnabled", value)
.commit()
;
this.backgroundServiceEnabled= value;
}
public String getHighlightPattern()
{
final String oldVar= this.highlightPattern;
if(oldVar != null)
{
return oldVar;
}
final SharedPreferences prefs= this.getSharedPreferences("prefs", Context.MODE_PRIVATE);
final String value= prefs.getString("highlightPattern", "");
this.highlightPattern= value;
return Strings.nullToEmpty(value);
}
public void setHighlightPattern(CharSequence value)
{
String sval= (value != null)
? value.toString()
: "";
final SharedPreferences prefs= this.getSharedPreferences("prefs", Context.MODE_PRIVATE);
prefs.edit()
.putString("highlightPattern", sval)
.commit()
;
this.highlightPattern= sval;
}
public String getUnreadMessageId(Account account, CharSequence roomId)
{
checkNotNull(account);
checkNotNull(roomId);
final AccountManager manager= AccountManager.get(this);
return manager.getUserData(account, "unreadMessageId." + roomId);
}
public void setUnreadMessageId(Account account, CharSequence roomId, CharSequence messageId)
{
checkNotNull(account);
checkNotNull(roomId);
final AccountManager manager= AccountManager.get(this);
manager.setUserData(account, "unreadMessageId." + roomId, messageId != null ? messageId.toString() : "");
}
public LingrClient getLingrClient()
{
return lingrFactory.newLingrClient();
}
@Override
public void onCreate()
{
super.onCreate();
DeployGate.install(this);
}
private static final LingrClientFactory lingrFactory= LingrClientFactory.newLingrClientFactory(AndroidHttp.newCompatibleTransport());
private String accountName;
private Boolean iconCacheEnabled;
private ActionBarMode actionBarMode;
private InlineImageMode inlineImageMode;
private Boolean backgroundServiceEnabled;
private String highlightPattern;
}
| |
/*
* Copyright 2020 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.bremersee.actuator.security.authentication.resourceserver.reactive;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertTrue;
import java.nio.charset.StandardCharsets;
import org.bremersee.actuator.security.authentication.resourceserver.reactive.app.TestConfiguration;
import org.bremersee.test.security.authentication.WithJwtAuthenticationToken;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.TestInstance;
import org.junit.jupiter.api.TestInstance.Lifecycle;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.boot.test.context.SpringBootTest.WebEnvironment;
import org.springframework.boot.web.server.LocalServerPort;
import org.springframework.context.ApplicationContext;
import org.springframework.http.HttpStatus;
import org.springframework.http.MediaType;
import org.springframework.test.web.reactive.server.WebTestClient;
import org.springframework.web.reactive.function.BodyInserters;
import org.springframework.web.reactive.function.client.WebClient;
import reactor.core.publisher.Mono;
import reactor.test.StepVerifier;
/**
* The reactive jwt and in memory test.
*
* @author Christian Bremer
*/
@SpringBootTest(
classes = TestConfiguration.class,
webEnvironment = WebEnvironment.RANDOM_PORT,
properties = {
"spring.main.web-application-type=reactive",
"spring.application.name=resourceserver-jwt-in-memory",
"spring.security.oauth2.resourceserver.jwt.jwk-set-uri=http://localhost/jwk",
"management.endpoints.web.exposure.include=*",
"bremersee.actuator.auth.enable=auto",
"bremersee.auth.resource-server=auto",
"bremersee.auth.resource-server=auto",
"bremersee.auth.any-access-mode=deny_all",
"bremersee.auth.path-matchers[0].ant-pattern=/public/**",
"bremersee.auth.path-matchers[0].access-mode=permit_all",
"bremersee.auth.path-matchers[1].ant-pattern=/protected/**",
"bremersee.auth.path-matchers[1].http-method=POST",
"bremersee.auth.path-matchers[1].roles=ROLE_ADMIN",
"bremersee.auth.path-matchers[2].ant-pattern=/protected/**",
"bremersee.auth.path-matchers[2].roles=ROLE_USER",
"bremersee.auth.in-memory-users[0].name=user",
"bremersee.auth.in-memory-users[0].password=user",
"bremersee.auth.in-memory-users[0].authorities=ROLE_USER",
"bremersee.auth.in-memory-users[1].name=admin",
"bremersee.auth.in-memory-users[1].password=admin",
"bremersee.auth.in-memory-users[1].authorities=ROLE_ADMIN,ROLE_ACTUATOR_ADMIN",
"bremersee.auth.in-memory-users[2].name=someone",
"bremersee.auth.in-memory-users[2].password=someone",
"bremersee.auth.in-memory-users[2].authorities=ROLE_SOMETHING",
"bremersee.auth.in-memory-users[3].name=actuator",
"bremersee.auth.in-memory-users[3].password=actuator",
"bremersee.auth.in-memory-users[3].authorities=ROLE_ACTUATOR",
"bremersee.exception-mapping.api-paths=/**"
})
@TestInstance(Lifecycle.PER_CLASS) // allows us to use @BeforeAll with a non-static method
public class ReactiveJwtAndInMemoryTest {
/**
* The local server port.
*/
@LocalServerPort
int port;
/**
* The application context.
*/
@Autowired
ApplicationContext context;
/**
* The test web client (security configuration is by-passed).
*/
@SuppressWarnings("SpringJavaInjectionPointsAutowiringInspection")
@Autowired
WebTestClient webClient;
/**
* Setup tests.
*/
@BeforeAll
void setUp() {
// https://docs.spring.io/spring-security/site/docs/current/reference/html/test-webflux.html
WebTestClient
.bindToApplicationContext(this.context)
.configureClient()
.build();
}
/**
* Base url of the local server.
*
* @return the base url of the local server
*/
String baseUrl() {
return "http://localhost:" + port;
}
/**
* Creates a new web client, that uses the real security configuration.
*
* @return the web client
*/
WebClient newWebClient() {
return WebClient.builder()
.baseUrl(baseUrl())
.build();
}
/**
* Gets public.
*/
@Test
void getPublic() {
webClient
.get()
.uri("/public")
.exchange()
.expectStatus()
.isOk()
.expectBody(String.class)
.value(response -> assertEquals("public", response));
}
/**
* Gets protected.
*/
@Test
@WithJwtAuthenticationToken(roles = "ROLE_USER")
void getProtected() {
webClient
.get()
.uri("/protected")
.exchange()
.expectStatus()
.isOk()
.expectBody(String.class)
.value(response -> assertEquals("protected", response));
}
/**
* Gets protected and expect forbidden.
*/
@Test
@WithJwtAuthenticationToken(roles = "ROLE_SOMETHING")
void getProtectedAndExpectForbidden() {
webClient
.get()
.uri("/protected")
.exchange()
.expectStatus()
.isForbidden();
}
/**
* Post protected.
*/
@Test
@WithJwtAuthenticationToken(roles = "ROLE_ADMIN")
void postProtected() {
webClient
.post()
.uri("/protected")
.contentType(MediaType.TEXT_PLAIN)
.accept(MediaType.TEXT_PLAIN)
.body(BodyInserters.fromValue("hello"))
.exchange()
.expectStatus()
.isOk()
.expectBody(String.class)
.value(response -> assertEquals("hello", response));
}
/**
* Gets health.
*/
@Test
void getHealth() {
StepVerifier.create(newWebClient()
.get()
.uri("/actuator/health")
.retrieve()
.bodyToMono(String.class))
.assertNext(body -> assertTrue(body.contains("\"UP\"")))
.verifyComplete();
}
/**
* Gets metrics.
*/
@Test
void getMetrics() {
StepVerifier.create(newWebClient()
.get()
.uri("/actuator/metrics")
.headers(httpHeaders -> httpHeaders
.setBasicAuth("actuator", "actuator", StandardCharsets.UTF_8))
.exchangeToMono(clientResponse -> Mono.just(clientResponse.statusCode())))
.assertNext(status -> assertEquals(HttpStatus.OK, status))
.verifyComplete();
}
/**
* Gets metrics and expect forbidden.
*/
@Test
void getMetricsAndExpectForbidden() {
StepVerifier.create(newWebClient()
.get()
.uri("/actuator/metrics")
.headers(httpHeaders -> httpHeaders
.setBasicAuth("someone", "someone", StandardCharsets.UTF_8))
.exchangeToMono(clientResponse -> Mono.just(clientResponse.statusCode())))
.assertNext(status -> assertEquals(HttpStatus.FORBIDDEN, status))
.verifyComplete();
}
}
| |
/**
* Apache License
* Version 2.0, January 2004
* http://www.apache.org/licenses/
*
* TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
*
* 1. Definitions.
*
* "License" shall mean the terms and conditions for use, reproduction,
* and distribution as defined by Sections 1 through 9 of this document.
*
* "Licensor" shall mean the copyright owner or entity authorized by
* the copyright owner that is granting the License.
*
* "Legal Entity" shall mean the union of the acting entity and all
* other entities that control, are controlled by, or are under common
* control with that entity. For the purposes of this definition,
* "control" means (i) the power, direct or indirect, to cause the
* direction or management of such entity, whether by contract or
* otherwise, or (ii) ownership of fifty percent (50%) or more of the
* outstanding shares, or (iii) beneficial ownership of such entity.
*
* "You" (or "Your") shall mean an individual or Legal Entity
* exercising permissions granted by this License.
*
* "Source" form shall mean the preferred form for making modifications,
* including but not limited to software source code, documentation
* source, and configuration files.
*
* "Object" form shall mean any form resulting from mechanical
* transformation or translation of a Source form, including but
* not limited to compiled object code, generated documentation,
* and conversions to other media types.
*
* "Work" shall mean the work of authorship, whether in Source or
* Object form, made available under the License, as indicated by a
* copyright notice that is included in or attached to the work
* (an example is provided in the Appendix below).
*
* "Derivative Works" shall mean any work, whether in Source or Object
* form, that is based on (or derived from) the Work and for which the
* editorial revisions, annotations, elaborations, or other modifications
* represent, as a whole, an original work of authorship. For the purposes
* of this License, Derivative Works shall not include works that remain
* separable from, or merely link (or bind by name) to the interfaces of,
* the Work and Derivative Works thereof.
*
* "Contribution" shall mean any work of authorship, including
* the original version of the Work and any modifications or additions
* to that Work or Derivative Works thereof, that is intentionally
* submitted to Licensor for inclusion in the Work by the copyright owner
* or by an individual or Legal Entity authorized to submit on behalf of
* the copyright owner. For the purposes of this definition, "submitted"
* means any form of electronic, verbal, or written communication sent
* to the Licensor or its representatives, including but not limited to
* communication on electronic mailing lists, source code control systems,
* and issue tracking systems that are managed by, or on behalf of, the
* Licensor for the purpose of discussing and improving the Work, but
* excluding communication that is conspicuously marked or otherwise
* designated in writing by the copyright owner as "Not a Contribution."
*
* "Contributor" shall mean Licensor and any individual or Legal Entity
* on behalf of whom a Contribution has been received by Licensor and
* subsequently incorporated within the Work.
*
* 2. Grant of Copyright License. Subject to the terms and conditions of
* this License, each Contributor hereby grants to You a perpetual,
* worldwide, non-exclusive, no-charge, royalty-free, irrevocable
* copyright license to reproduce, prepare Derivative Works of,
* publicly display, publicly perform, sublicense, and distribute the
* Work and such Derivative Works in Source or Object form.
*
* 3. Grant of Patent License. Subject to the terms and conditions of
* this License, each Contributor hereby grants to You a perpetual,
* worldwide, non-exclusive, no-charge, royalty-free, irrevocable
* (except as stated in this section) patent license to make, have made,
* use, offer to sell, sell, import, and otherwise transfer the Work,
* where such license applies only to those patent claims licensable
* by such Contributor that are necessarily infringed by their
* Contribution(s) alone or by combination of their Contribution(s)
* with the Work to which such Contribution(s) was submitted. If You
* institute patent litigation against any entity (including a
* cross-claim or counterclaim in a lawsuit) alleging that the Work
* or a Contribution incorporated within the Work constitutes direct
* or contributory patent infringement, then any patent licenses
* granted to You under this License for that Work shall terminate
* as of the date such litigation is filed.
*
* 4. Redistribution. You may reproduce and distribute copies of the
* Work or Derivative Works thereof in any medium, with or without
* modifications, and in Source or Object form, provided that You
* meet the following conditions:
*
* (a) You must give any other recipients of the Work or
* Derivative Works a copy of this License; and
*
* (b) You must cause any modified files to carry prominent notices
* stating that You changed the files; and
*
* (c) You must retain, in the Source form of any Derivative Works
* that You distribute, all copyright, patent, trademark, and
* attribution notices from the Source form of the Work,
* excluding those notices that do not pertain to any part of
* the Derivative Works; and
*
* (d) If the Work includes a "NOTICE" text file as part of its
* distribution, then any Derivative Works that You distribute must
* include a readable copy of the attribution notices contained
* within such NOTICE file, excluding those notices that do not
* pertain to any part of the Derivative Works, in at least one
* of the following places: within a NOTICE text file distributed
* as part of the Derivative Works; within the Source form or
* documentation, if provided along with the Derivative Works; or,
* within a display generated by the Derivative Works, if and
* wherever such third-party notices normally appear. The contents
* of the NOTICE file are for informational purposes only and
* do not modify the License. You may add Your own attribution
* notices within Derivative Works that You distribute, alongside
* or as an addendum to the NOTICE text from the Work, provided
* that such additional attribution notices cannot be construed
* as modifying the License.
*
* You may add Your own copyright statement to Your modifications and
* may provide additional or different license terms and conditions
* for use, reproduction, or distribution of Your modifications, or
* for any such Derivative Works as a whole, provided Your use,
* reproduction, and distribution of the Work otherwise complies with
* the conditions stated in this License.
*
* 5. Submission of Contributions. Unless You explicitly state otherwise,
* any Contribution intentionally submitted for inclusion in the Work
* by You to the Licensor shall be under the terms and conditions of
* this License, without any additional terms or conditions.
* Notwithstanding the above, nothing herein shall supersede or modify
* the terms of any separate license agreement you may have executed
* with Licensor regarding such Contributions.
*
* 6. Trademarks. This License does not grant permission to use the trade
* names, trademarks, service marks, or product names of the Licensor,
* except as required for reasonable and customary use in describing the
* origin of the Work and reproducing the content of the NOTICE file.
*
* 7. Disclaimer of Warranty. Unless required by applicable law or
* agreed to in writing, Licensor provides the Work (and each
* Contributor provides its Contributions) on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied, including, without limitation, any warranties or conditions
* of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
* PARTICULAR PURPOSE. You are solely responsible for determining the
* appropriateness of using or redistributing the Work and assume any
* risks associated with Your exercise of permissions under this License.
*
* 8. Limitation of Liability. In no event and under no legal theory,
* whether in tort (including negligence), contract, or otherwise,
* unless required by applicable law (such as deliberate and grossly
* negligent acts) or agreed to in writing, shall any Contributor be
* liable to You for damages, including any direct, indirect, special,
* incidental, or consequential damages of any character arising as a
* result of this License or out of the use or inability to use the
* Work (including but not limited to damages for loss of goodwill,
* work stoppage, computer failure or malfunction, or any and all
* other commercial damages or losses), even if such Contributor
* has been advised of the possibility of such damages.
*
* 9. Accepting Warranty or Additional Liability. While redistributing
* the Work or Derivative Works thereof, You may choose to offer,
* and charge a fee for, acceptance of support, warranty, indemnity,
* or other liability obligations and/or rights consistent with this
* License. However, in accepting such obligations, You may act only
* on Your own behalf and on Your sole responsibility, not on behalf
* of any other Contributor, and only if You agree to indemnify,
* defend, and hold each Contributor harmless for any liability
* incurred by, or claims asserted against, such Contributor by reason
* of your accepting any such warranty or additional liability.
*
* END OF TERMS AND CONDITIONS
*
* APPENDIX: How to apply the Apache License to your work.
*
* To apply the Apache License to your work, attach the following
* boilerplate notice, with the fields enclosed by brackets "{}"
* replaced with your own identifying information. (Don't include
* the brackets!) The text should be enclosed in the appropriate
* comment syntax for the file format. We also recommend that a
* file or class name and description of purpose be included on the
* same "printed page" as the copyright notice for easier
* identification within third-party archives.
*
* Copyright {yyyy} {name of copyright owner}
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.deleidos.rtws.commons.monitor.notification;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.apache.log4j.Logger;
import org.trimou.Mustache;
import org.trimou.engine.MustacheEngine;
import org.trimou.engine.MustacheEngineBuilder;
import org.trimou.engine.locator.FileSystemTemplateLocator;
import com.amazonaws.AmazonClientException;
import com.amazonaws.auth.AWSCredentials;
import com.amazonaws.services.simpleemail.AmazonSimpleEmailService;
import com.amazonaws.services.simpleemail.AmazonSimpleEmailServiceClient;
import com.amazonaws.services.simpleemail.model.Body;
import com.amazonaws.services.simpleemail.model.Content;
import com.amazonaws.services.simpleemail.model.Destination;
import com.amazonaws.services.simpleemail.model.Message;
import com.amazonaws.services.simpleemail.model.SendEmailRequest;
import com.amazonaws.services.simpleemail.model.SendEmailResult;
import com.deleidos.rtws.commons.config.ExecutionEnvironment;
import com.deleidos.rtws.commons.config.RtwsConfig;
import com.deleidos.rtws.commons.config.UserDataProperties;
import com.deleidos.rtws.commons.monitor.core.ManagedMonitor.MonitorStatus;
/**
*
*
*
* Note: Static content used by the email templates are hosted by a Cloudfront distribution in the AWS-DEV account
*/
public class SimpleEmailStatusNotifier implements StatusNotifier {
private Logger logger = Logger.getLogger(getClass());
private MustacheEngine engine = null;
private Mustache mustache = null;
@Override
public void sendStatusNotification(AWSCredentials credentials, MonitorStatus status,
Set<StatusDetails> statusDetails, String... recipients) {
// TODO support Eucalyptus
if (!RtwsConfig.getInstance().getString("rtws.cloud.provider", "UNKNOWN").equals("AWS"))
return;
AmazonSimpleEmailService ses = null;
try {
if (engine == null) {
/**
* Workaround for https://github.com/trimou/trimou/issues/50 which is not in the 1.6.x versions required
* due to the current dependency on java 1.6
*/
if (ExecutionEnvironment.isInOperationalEnvironment()) {
engine = MustacheEngineBuilder
.newBuilder()
.addTemplateLocator(
new FileSystemTemplateLocator(1,
"/usr/local/rtws/commons-core/bin/boot/email/templates", "html")).build();
} else {
engine = MustacheEngineBuilder
.newBuilder()
.addTemplateLocator(
new FileSystemTemplateLocator(1, "./src/main/script/boot/email/templates", "html"))
.build();
}
mustache = engine.getMustache("systemStatus");
}
ses = new AmazonSimpleEmailServiceClient(credentials);
List<String> toAddresses = new ArrayList<String>();
for (String recipient : recipients) {
toAddresses.add(recipient.replaceAll("mailto:", ""));
}
Destination destination = new Destination(toAddresses);
Content subject = new Content(String.format("System Status Change Notification System: %s Status: %s",
UserDataProperties.getInstance().getString(UserDataProperties.RTWS_DOMAIN, "Unknown System Domain")
.split("\\.")[0], status));
Content messageContentHtml = new Content();
Map<String, String> data = new HashMap<String, String>();
data.put("system_domain",
UserDataProperties.getInstance().getString(UserDataProperties.RTWS_DOMAIN, "Unknown System Domain"));
data.put("mc_url", String.format("%s://%s",
RtwsConfig.getInstance().getString("webapp.repository.tenant.manager.url.scheme"), RtwsConfig
.getInstance().getString("webapp.repository.tenant.manager.url.host")));
switch (status) {
case STARTING:
data.put("state_image", "starting.png");
break;
case STOPPED:
data.put("state_image", "down.png");
break;
case OK:
data.put("state_image", "ok.png");
break;
case UNKNOWN:
data.put("state_image", "error.png");
break;
case ERROR:
data.put("state_image", "error.png");
// populate then update as needed
int i = 0;
while (i <= 3) {
data.put(String.format("error_pg_%s", i), "");
data.put(String.format("error_msg_%s", i), "");
i++;
}
int z = 0;
for (StatusDetails detail : statusDetails) {
if (logger.isDebugEnabled())
logger.debug(detail);
data.put(String.format("error_pg_%s", z), detail.getProcessGroup());
data.put(String.format("error_msg_%s", z), detail.getMessage());
z++;
}
break;
default:
break;
}
messageContentHtml.setData(mustache.render(data));
Content messageContentText = new Content(RtwsConfig.getInstance().getString(
"system.status.email.text.template", "Sorry, there was a problem generating this text message."));
Body body = new Body();
body.setHtml(messageContentHtml);
body.setText(messageContentText); // fallback for those email clients which don't support html
Message message = new Message(subject, body);
SendEmailRequest sendEmailRequest = new SendEmailRequest("digitaledge-support@leidos.com", destination,
message);
SendEmailResult sesResult = ses.sendEmail(sendEmailRequest);
if (sesResult.getMessageId() != null)
logger.info(sesResult.getMessageId());
} catch (AmazonClientException e) {
logger.error(e);
} catch (Exception e) {
logger.error(e.getMessage(), e);
} finally {
if (ses != null)
ses.shutdown();
}
}
}
| |
package com.ctrip.hermes.metaserver.rest.resource;
import java.util.HashMap;
import java.util.Map;
import javax.inject.Singleton;
import javax.servlet.http.HttpServletRequest;
import javax.ws.rs.Consumes;
import javax.ws.rs.DefaultValue;
import javax.ws.rs.POST;
import javax.ws.rs.Path;
import javax.ws.rs.Produces;
import javax.ws.rs.QueryParam;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.MediaType;
import org.apache.http.HttpResponse;
import org.apache.http.HttpStatus;
import org.apache.http.client.fluent.Request;
import org.apache.http.client.utils.URIBuilder;
import org.apache.http.entity.ContentType;
import org.apache.http.util.EntityUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.alibaba.fastjson.JSON;
import com.ctrip.hermes.core.bo.HostPort;
import com.ctrip.hermes.core.bo.Tpg;
import com.ctrip.hermes.core.lease.LeaseAcquireResponse;
import com.ctrip.hermes.core.service.SystemClockService;
import com.ctrip.hermes.core.utils.PlexusComponentLocator;
import com.ctrip.hermes.core.utils.StringUtils;
import com.ctrip.hermes.metaserver.broker.BrokerLeaseAllocator;
import com.ctrip.hermes.metaserver.cluster.ClusterStateHolder;
import com.ctrip.hermes.metaserver.commons.ClientContext;
import com.ctrip.hermes.metaserver.config.MetaServerConfig;
import com.ctrip.hermes.metaserver.consumer.ConsumerLeaseAllocator;
import com.ctrip.hermes.metaserver.consumer.ConsumerLeaseAllocatorLocator;
import com.ctrip.hermes.metaserver.meta.MetaServerAssignmentHolder;
/**
*
* @author Leo Liang(jhliang@ctrip.com)
*
*/
@Path("/lease/")
@Singleton
@Produces(MediaType.APPLICATION_JSON)
public class LeaseResource {
private static final Logger log = LoggerFactory.getLogger(LeaseResource.class);
private static final long NO_STRATEGY_DELAY_TIME_MILLIS = 20 * 1000L;
private static final long NO_ASSIGNMENT_DELAY_TIME_MILLIS = 5 * 1000L;
private static final long PROXY_PASS_FAIL_DELAY_TIME_MILLIS = 5 * 1000L;
private static final long EXCEPTION_CAUGHT_DELAY_TIME_MILLIS = 5 * 1000L;
private ConsumerLeaseAllocatorLocator m_consumerLeaseAllocatorLocator;
private BrokerLeaseAllocator m_brokerLeaseAllocator;
private SystemClockService m_systemClockService;
private MetaServerAssignmentHolder m_metaServerAssignmentHolder;
private ClusterStateHolder m_clusterStateHolder;
private MetaServerConfig m_config;
public LeaseResource() {
m_consumerLeaseAllocatorLocator = PlexusComponentLocator.lookup(ConsumerLeaseAllocatorLocator.class);
m_brokerLeaseAllocator = PlexusComponentLocator.lookup(BrokerLeaseAllocator.class);
m_systemClockService = PlexusComponentLocator.lookup(SystemClockService.class);
m_metaServerAssignmentHolder = PlexusComponentLocator.lookup(MetaServerAssignmentHolder.class);
m_config = PlexusComponentLocator.lookup(MetaServerConfig.class);
m_clusterStateHolder = PlexusComponentLocator.lookup(ClusterStateHolder.class);
}
@POST
@Consumes(MediaType.APPLICATION_JSON)
@Path("consumer/acquire")
public LeaseAcquireResponse tryAcquireConsumerLease(//
Tpg tpg, //
@QueryParam("sessionId") String sessionId,//
@QueryParam("host") @DefaultValue("-") String host,//
@Context HttpServletRequest req) {
Map<String, String> params = new HashMap<>();
params.put("sessionId", sessionId);
params.put("host", getRemoteAddr(host, req));
LeaseAcquireResponse leaseAcquireResponse = proxyConsumerLeaseRequestIfNecessary(tpg.getTopic(),
"/consumer/acquire", params, tpg);
if (leaseAcquireResponse == null) {
ConsumerLeaseAllocator leaseAllocator = m_consumerLeaseAllocatorLocator.findAllocator(tpg.getTopic(),
tpg.getGroupId());
try {
if (leaseAllocator != null) {
return leaseAllocator.tryAcquireLease(tpg, sessionId, getRemoteAddr(host, req));
} else {
return new LeaseAcquireResponse(false, null, m_systemClockService.now() + NO_STRATEGY_DELAY_TIME_MILLIS);
}
} catch (Exception e) {
return new LeaseAcquireResponse(false, null, m_systemClockService.now()
+ EXCEPTION_CAUGHT_DELAY_TIME_MILLIS);
}
} else {
return leaseAcquireResponse;
}
}
@POST
@Consumes(MediaType.APPLICATION_JSON)
@Path("consumer/renew")
public LeaseAcquireResponse tryRenewConsumerLease(//
Tpg tpg, //
@QueryParam("leaseId") long leaseId,//
@QueryParam("sessionId") String sessionId,//
@QueryParam("host") @DefaultValue("-") String host,//
@Context HttpServletRequest req) {
Map<String, String> params = new HashMap<>();
params.put("sessionId", sessionId);
params.put("leaseId", Long.toString(leaseId));
params.put("host", getRemoteAddr(host, req));
LeaseAcquireResponse leaseAcquireResponse = proxyConsumerLeaseRequestIfNecessary(tpg.getTopic(),
"/consumer/renew", params, tpg);
if (leaseAcquireResponse == null) {
ConsumerLeaseAllocator leaseAllocator = m_consumerLeaseAllocatorLocator.findAllocator(tpg.getTopic(),
tpg.getGroupId());
try {
if (leaseAllocator != null) {
return leaseAllocator.tryRenewLease(tpg, sessionId, leaseId, getRemoteAddr(host, req));
} else {
return new LeaseAcquireResponse(false, null, m_systemClockService.now() + NO_STRATEGY_DELAY_TIME_MILLIS);
}
} catch (Exception e) {
return new LeaseAcquireResponse(false, null, m_systemClockService.now()
+ EXCEPTION_CAUGHT_DELAY_TIME_MILLIS);
}
} else {
return leaseAcquireResponse;
}
}
@POST
@Consumes(MediaType.APPLICATION_JSON)
@Path("broker/acquire")
public LeaseAcquireResponse tryAcquireBrokerLease(//
@QueryParam("topic") String topic,//
@QueryParam("partition") int partition,//
@QueryParam("sessionId") String sessionId,//
@QueryParam("brokerPort") int port, //
@QueryParam("host") @DefaultValue("-") String host,// FIXME use empty string as default value
@Context HttpServletRequest req) {
Map<String, String> params = new HashMap<>();
params.put("topic", topic);
params.put("partition", Integer.toString(partition));
params.put("sessionId", sessionId);
params.put("brokerPort", Integer.toString(port));
params.put("host", getRemoteAddr(host, req));
LeaseAcquireResponse leaseAcquireResponse = proxyBrokerLeaseRequestIfNecessary("/broker/acquire", params, null);
if (leaseAcquireResponse == null) {
try {
return m_brokerLeaseAllocator.tryAcquireLease(topic, partition, sessionId, getRemoteAddr(host, req), port);
} catch (Exception e) {
return new LeaseAcquireResponse(false, null, m_systemClockService.now()
+ EXCEPTION_CAUGHT_DELAY_TIME_MILLIS);
}
} else {
return leaseAcquireResponse;
}
}
@POST
@Consumes(MediaType.APPLICATION_JSON)
@Path("broker/renew")
public LeaseAcquireResponse tryRenewBrokerLease(//
@QueryParam("topic") String topic,//
@QueryParam("partition") int partition, //
@QueryParam("leaseId") long leaseId,//
@QueryParam("sessionId") String sessionId,//
@QueryParam("brokerPort") int port,//
@QueryParam("host") @DefaultValue("-") String host,//
@Context HttpServletRequest req) {
Map<String, String> params = new HashMap<>();
params.put("topic", topic);
params.put("partition", Integer.toString(partition));
params.put("leaseId", Long.toString(leaseId));
params.put("sessionId", sessionId);
params.put("brokerPort", Integer.toString(port));
params.put("host", getRemoteAddr(host, req));
LeaseAcquireResponse leaseAcquireResponse = proxyBrokerLeaseRequestIfNecessary("/broker/renew", params, null);
if (leaseAcquireResponse == null) {
try {
return m_brokerLeaseAllocator.tryRenewLease(topic, partition, sessionId, leaseId, getRemoteAddr(host, req),
port);
} catch (Exception e) {
return new LeaseAcquireResponse(false, null, m_systemClockService.now()
+ EXCEPTION_CAUGHT_DELAY_TIME_MILLIS);
}
} else {
return leaseAcquireResponse;
}
}
private LeaseAcquireResponse proxyBrokerLeaseRequestIfNecessary(String uri, Map<String, String> params,
Object payload) {
if (m_clusterStateHolder.hasLeadership()) {
return null;
} else {
HostPort leader = m_clusterStateHolder.getLeader();
return proxyPass(leader.getHost(), leader.getPort(), uri, params, payload);
}
}
private LeaseAcquireResponse proxyConsumerLeaseRequestIfNecessary(String topic, String uri,
Map<String, String> params, Object payload) {
Map<String, ClientContext> responsors = m_metaServerAssignmentHolder.getAssignment(topic);
if (responsors != null && !responsors.isEmpty()) {
ClientContext responsor = responsors.values().iterator().next();
if (responsor != null) {
if (m_config.getMetaServerHost().equals(responsor.getIp())
&& m_config.getMetaServerPort() == responsor.getPort()) {
return null;
} else {
return proxyPass(responsor.getIp(), responsor.getPort(), uri, params, payload);
}
}
}
return new LeaseAcquireResponse(false, null, m_systemClockService.now() + NO_ASSIGNMENT_DELAY_TIME_MILLIS);
}
private LeaseAcquireResponse proxyPass(String host, int port, String uri, Map<String, String> params, Object payload) {
uri = "/lease" + uri;
if (log.isDebugEnabled()) {
log.debug("Proxy pass request to http://{}:{}{}(params={}, payload={})", host, port, uri, params,
JSON.toJSONString(payload));
}
try {
URIBuilder uriBuilder = new URIBuilder()//
.setScheme("http")//
.setHost(host)//
.setPort(port)//
.setPath(uri);
if (params != null) {
for (Map.Entry<String, String> entry : params.entrySet()) {
uriBuilder.addParameter(entry.getKey(), entry.getValue());
}
}
HttpResponse response = null;
if (payload != null) {
response = Request.Post(uriBuilder.build())//
.connectTimeout(m_config.getProxyPassConnectTimeout())//
.socketTimeout(m_config.getProxyPassReadTimeout())//
.bodyString(JSON.toJSONString(payload), ContentType.APPLICATION_JSON)//
.execute()//
.returnResponse();
} else {
response = Request.Post(uriBuilder.build())//
.connectTimeout(m_config.getProxyPassConnectTimeout())//
.socketTimeout(m_config.getProxyPassReadTimeout())//
.execute()//
.returnResponse();
}
if (response != null && response.getStatusLine().getStatusCode() == HttpStatus.SC_OK) {
String responseContent = EntityUtils.toString(response.getEntity());
if (!StringUtils.isBlank(responseContent)) {
return JSON.parseObject(responseContent, LeaseAcquireResponse.class);
} else {
return new LeaseAcquireResponse(false, null, m_systemClockService.now()
+ PROXY_PASS_FAIL_DELAY_TIME_MILLIS);
}
} else {
if (log.isDebugEnabled()) {
log.debug("Response error while proxy passing to http://{}:{}{}.(status={}}).", host, port, uri,
response.getStatusLine().getStatusCode());
}
return new LeaseAcquireResponse(false, null, m_systemClockService.now() + PROXY_PASS_FAIL_DELAY_TIME_MILLIS);
}
} catch (Exception e) {
// ignore
if (log.isDebugEnabled()) {
log.debug("Failed to proxy pass to http://{}:{}{}.", host, port, uri, e);
}
return new LeaseAcquireResponse(false, null, m_systemClockService.now() + PROXY_PASS_FAIL_DELAY_TIME_MILLIS);
}
}
private String getRemoteAddr(String host, HttpServletRequest req) {
return "-".equals(host) ? req.getRemoteAddr() : host;
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.websocket;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.List;
import java.util.Map;
import org.apache.camel.Consumer;
import org.apache.camel.Processor;
import org.apache.camel.Producer;
import org.apache.camel.impl.DefaultEndpoint;
import org.apache.camel.spi.Metadata;
import org.apache.camel.spi.UriEndpoint;
import org.apache.camel.spi.UriParam;
import org.apache.camel.spi.UriPath;
import org.apache.camel.util.ObjectHelper;
import org.apache.camel.util.jsse.SSLContextParameters;
import org.eclipse.jetty.server.Handler;
/**
* The websocket component provides websocket endpoints for communicating with clients using websocket.
*
* This component uses Jetty as the websocket implementation.
*/
@UriEndpoint(scheme = "websocket", title = "Jetty Websocket", syntax = "websocket:host:port/resourceUri", consumerClass = WebsocketConsumer.class, label = "websocket")
public class WebsocketEndpoint extends DefaultEndpoint {
private WebsocketComponent component;
private URI uri;
private List<Handler> handlers;
@UriPath(defaultValue = "0.0.0.0")
private String host;
@UriPath(defaultValue = "9292")
private Integer port;
@UriPath @Metadata(required = "true")
private String resourceUri;
@UriParam(label = "producer")
private Boolean sendToAll;
@UriParam
private boolean enableJmx;
@UriParam
private boolean sessionSupport;
@UriParam
private boolean crossOriginFilterOn;
@UriParam
private SSLContextParameters sslContextParameters;
@UriParam
private String allowedOrigins;
@UriParam
private String filterPath;
@UriParam
private String staticResources;
@UriParam(defaultValue = "8192")
private Integer bufferSize;
@UriParam(defaultValue = "300000")
private Integer maxIdleTime;
@UriParam
private Integer maxTextMessageSize;
@UriParam(defaultValue = "-1")
private Integer maxBinaryMessageSize;
@UriParam(defaultValue = "13")
private Integer minVersion;
public WebsocketEndpoint(WebsocketComponent component, String uri, String resourceUri, Map<String, Object> parameters) {
super(uri, component);
this.resourceUri = resourceUri;
this.component = component;
try {
this.uri = new URI(uri);
} catch (URISyntaxException e) {
throw new IllegalArgumentException(e);
}
}
@Override
public WebsocketComponent getComponent() {
ObjectHelper.notNull(component, "component");
return (WebsocketComponent) super.getComponent();
}
@Override
public Consumer createConsumer(Processor processor) throws Exception {
ObjectHelper.notNull(component, "component");
WebsocketConsumer consumer = new WebsocketConsumer(this, processor);
configureConsumer(consumer);
return consumer;
}
@Override
public Producer createProducer() throws Exception {
return new WebsocketProducer(this);
}
public void connect(WebsocketConsumer consumer) throws Exception {
component.connect(consumer);
}
public void disconnect(WebsocketConsumer consumer) throws Exception {
component.disconnect(consumer);
}
public void connect(WebsocketProducer producer) throws Exception {
component.connect(producer);
}
public void disconnect(WebsocketProducer producer) throws Exception {
component.disconnect(producer);
}
@Override
public boolean isSingleton() {
return true;
}
public URI getUri() {
return uri;
}
public Integer getPort() {
return port;
}
public String getHost() {
return host;
}
/**
* The hostname. The default value is <tt>0.0.0.0</tt>.
* Setting this option on the component will use the component configured value as default.
*/
public void setHost(String host) {
this.host = host;
}
/**
* The port number. The default value is <tt>9292</tt>.
* Setting this option on the component will use the component configured value as default.
*/
public void setPort(int port) {
this.port = port;
}
public String getStaticResources() {
return staticResources;
}
/**
* Set a resource path for static resources (such as .html files etc).
* <p/>
* The resources can be loaded from classpath, if you prefix with <tt>classpath:</tt>,
* otherwise the resources is loaded from file system or from JAR files.
* <p/>
* For example to load from root classpath use <tt>classpath:.</tt>, or
* <tt>classpath:WEB-INF/static</tt>
* <p/>
* If not configured (eg <tt>null</tt>) then no static resource is in use.
*/
public void setStaticResources(String staticResources) {
this.staticResources = staticResources;
}
public Boolean getSendToAll() {
return sendToAll;
}
/**
* To send to all websocket subscribers. Can be used to configure on endpoint level, instead of having to use the WebsocketConstants.SEND_TO_ALL header on the message.
*/
public void setSendToAll(Boolean sendToAll) {
this.sendToAll = sendToAll;
}
public String getProtocol() {
return uri.getScheme();
}
public String getPath() {
return uri.getPath();
}
/**
* Whether to enable session support which enables HttpSession for each http request.
*/
public void setSessionSupport(boolean support) {
sessionSupport = support;
}
public boolean isSessionSupport() {
return sessionSupport;
}
public Integer getBufferSize() {
return bufferSize;
}
/**
* Set the buffer size of the websocketServlet, which is also the max frame byte size (default 8192)
*/
public void setBufferSize(Integer bufferSize) {
this.bufferSize = bufferSize;
}
public Integer getMaxIdleTime() {
return maxIdleTime;
}
/**
* Set the time in ms that the websocket created by the websocketServlet may be idle before closing. (default is 300000)
*/
public void setMaxIdleTime(Integer maxIdleTime) {
this.maxIdleTime = maxIdleTime;
}
public Integer getMaxTextMessageSize() {
return maxTextMessageSize;
}
/**
* Can be used to set the size in characters that the websocket created by the websocketServlet may be accept before closing.
*/
public void setMaxTextMessageSize(Integer maxTextMessageSize) {
this.maxTextMessageSize = maxTextMessageSize;
}
public Integer getMaxBinaryMessageSize() {
return maxBinaryMessageSize;
}
/**
* Can be used to set the size in bytes that the websocket created by the websocketServlet may be accept before closing. (Default is -1 - or unlimited)
*/
public void setMaxBinaryMessageSize(Integer maxBinaryMessageSize) {
this.maxBinaryMessageSize = maxBinaryMessageSize;
}
public Integer getMinVersion() {
return minVersion;
}
/**
* Can be used to set the minimum protocol version accepted for the websocketServlet. (Default 13 - the RFC6455 version)
*/
public void setMinVersion(Integer minVersion) {
this.minVersion = minVersion;
}
public List<Handler> getHandlers() {
return handlers;
}
public void setHandlers(List<Handler> handlers) {
this.handlers = handlers;
}
public SSLContextParameters getSslContextParameters() {
return sslContextParameters;
}
/**
* To configure security using SSLContextParameters
*/
public void setSslContextParameters(SSLContextParameters sslContextParameters) {
this.sslContextParameters = sslContextParameters;
}
public boolean isEnableJmx() {
return this.enableJmx;
}
/**
* If this option is true, Jetty JMX support will be enabled for this endpoint. See Jetty JMX support for more details.
*/
public void setEnableJmx(boolean enableJmx) {
this.enableJmx = enableJmx;
}
public String getAllowedOrigins() {
return allowedOrigins;
}
/**
* The CORS allowed origins. Use * to allow all.
*/
public void setAllowedOrigins(String allowedOrigins) {
this.allowedOrigins = allowedOrigins;
}
public boolean isCrossOriginFilterOn() {
return crossOriginFilterOn;
}
/**
* Whether to enable CORS
*/
public void setCrossOriginFilterOn(boolean crossOriginFilterOn) {
this.crossOriginFilterOn = crossOriginFilterOn;
}
public String getFilterPath() {
return filterPath;
}
/**
* Context path for filtering CORS
*/
public void setFilterPath(String filterPath) {
this.filterPath = filterPath;
}
public String getResourceUri() {
return resourceUri;
}
/**
* Name of the websocket channel to use
*/
public void setResourceUri(String resourceUri) {
this.resourceUri = resourceUri;
}
}
| |
/**
* This class is generated by jOOQ
*/
package edu.kit.ipd.crowdcontrol.workerservice.database.model.tables.records;
import edu.kit.ipd.crowdcontrol.workerservice.database.model.tables.RatingConstraint;
import javax.annotation.Generated;
import org.jooq.Field;
import org.jooq.Record1;
import org.jooq.Record3;
import org.jooq.Row3;
import org.jooq.impl.UpdatableRecordImpl;
/**
* This class is generated by jOOQ.
*/
@Generated(
value = {
"http://www.jooq.org",
"jOOQ version:3.7.3"
},
comments = "This class is generated by jOOQ"
)
@SuppressWarnings({ "all", "unchecked", "rawtypes" })
public class RatingConstraintRecord extends UpdatableRecordImpl<RatingConstraintRecord> implements Record3<Integer, Integer, Integer> {
private static final long serialVersionUID = 1134107666;
/**
* Setter for <code>crowdcontrol.Rating_Constraint.id_rating_Constraints</code>.
*/
public void setIdRatingConstraints(Integer value) {
setValue(0, value);
}
/**
* Getter for <code>crowdcontrol.Rating_Constraint.id_rating_Constraints</code>.
*/
public Integer getIdRatingConstraints() {
return (Integer) getValue(0);
}
/**
* Setter for <code>crowdcontrol.Rating_Constraint.ref_rating</code>.
*/
public void setRefRating(Integer value) {
setValue(1, value);
}
/**
* Getter for <code>crowdcontrol.Rating_Constraint.ref_rating</code>.
*/
public Integer getRefRating() {
return (Integer) getValue(1);
}
/**
* Setter for <code>crowdcontrol.Rating_Constraint.off_constraint</code>.
*/
public void setOffConstraint(Integer value) {
setValue(2, value);
}
/**
* Getter for <code>crowdcontrol.Rating_Constraint.off_constraint</code>.
*/
public Integer getOffConstraint() {
return (Integer) getValue(2);
}
// -------------------------------------------------------------------------
// Primary key information
// -------------------------------------------------------------------------
/**
* {@inheritDoc}
*/
@Override
public Record1<Integer> key() {
return (Record1) super.key();
}
// -------------------------------------------------------------------------
// Record3 type implementation
// -------------------------------------------------------------------------
/**
* {@inheritDoc}
*/
@Override
public Row3<Integer, Integer, Integer> fieldsRow() {
return (Row3) super.fieldsRow();
}
/**
* {@inheritDoc}
*/
@Override
public Row3<Integer, Integer, Integer> valuesRow() {
return (Row3) super.valuesRow();
}
/**
* {@inheritDoc}
*/
@Override
public Field<Integer> field1() {
return RatingConstraint.RATING_CONSTRAINT.ID_RATING_CONSTRAINTS;
}
/**
* {@inheritDoc}
*/
@Override
public Field<Integer> field2() {
return RatingConstraint.RATING_CONSTRAINT.REF_RATING;
}
/**
* {@inheritDoc}
*/
@Override
public Field<Integer> field3() {
return RatingConstraint.RATING_CONSTRAINT.OFF_CONSTRAINT;
}
/**
* {@inheritDoc}
*/
@Override
public Integer value1() {
return getIdRatingConstraints();
}
/**
* {@inheritDoc}
*/
@Override
public Integer value2() {
return getRefRating();
}
/**
* {@inheritDoc}
*/
@Override
public Integer value3() {
return getOffConstraint();
}
/**
* {@inheritDoc}
*/
@Override
public RatingConstraintRecord value1(Integer value) {
setIdRatingConstraints(value);
return this;
}
/**
* {@inheritDoc}
*/
@Override
public RatingConstraintRecord value2(Integer value) {
setRefRating(value);
return this;
}
/**
* {@inheritDoc}
*/
@Override
public RatingConstraintRecord value3(Integer value) {
setOffConstraint(value);
return this;
}
/**
* {@inheritDoc}
*/
@Override
public RatingConstraintRecord values(Integer value1, Integer value2, Integer value3) {
value1(value1);
value2(value2);
value3(value3);
return this;
}
// -------------------------------------------------------------------------
// Constructors
// -------------------------------------------------------------------------
/**
* Create a detached RatingConstraintRecord
*/
public RatingConstraintRecord() {
super(RatingConstraint.RATING_CONSTRAINT);
}
/**
* Create a detached, initialised RatingConstraintRecord
*/
public RatingConstraintRecord(Integer idRatingConstraints, Integer refRating, Integer offConstraint) {
super(RatingConstraint.RATING_CONSTRAINT);
setValue(0, idRatingConstraints);
setValue(1, refRating);
setValue(2, offConstraint);
}
}
| |
/*
* Copyright 2013 Jacob Klinker
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.klnker.android.actionbutton;
import android.animation.ObjectAnimator;
import android.app.Activity;
import android.content.Context;
import android.graphics.Point;
import android.graphics.drawable.ShapeDrawable;
import android.graphics.drawable.StateListDrawable;
import android.graphics.drawable.shapes.OvalShape;
import android.os.Handler;
import android.util.TypedValue;
import android.view.Display;
import android.view.View;
import android.view.ViewGroup;
import android.view.animation.AccelerateInterpolator;
import android.view.animation.DecelerateInterpolator;
import android.widget.FrameLayout;
import android.widget.ImageButton;
/**
* Class which allows simple adding an interaction of an onscreen, circular image button
*/
public class ActionButton extends ImageButton {
private static final String LOGTAG = "ActionButton";
// some default constants for initializing the ActionButton
public static final int DEFAULT_DISTANCE_FROM_BOTTOM = 50;
public static final int DEFAULT_DISTANCE_FROM_RIGHT = 10;
public static final int DEFAULT_SLIDE_ANIMATION_TIME = 150;
public static final int DEFAULT_FADE_ANIMATION_TIME = 700;
public static final int DEFAULT_WIDTH = 50;
public static final int DEFAULT_HEIGHT = 50;
public static final int DEFAULT_COLOR = 0xFFCC0000;
public static final int DEFAULT_COLOR_SELECTED = 0xFFD94B4B;
// animations that can be set for the ActionButton when it is showing and hiding
public enum ActionButtonAnimation {
SLIDE_FROM_BOTTOM, SLIDE_FROM_RIGHT, FADE, NONE
}
// set up default values
private int distanceFromBottom = DEFAULT_DISTANCE_FROM_BOTTOM;
private int distanceFromRight = DEFAULT_DISTANCE_FROM_RIGHT;
private int width = DEFAULT_WIDTH;
private int height = DEFAULT_HEIGHT;
private boolean isShowing = false;
private ViewGroup parent = null;
private ActionButtonAnimation animation = ActionButtonAnimation.SLIDE_FROM_BOTTOM;
/**
* Default constructor
*
* @param context the context where ActionButton will be used
*/
public ActionButton(Context context) {
super(context);
// set colors to their defaults in case user doesn't specifically implement them
setColors(DEFAULT_COLOR, DEFAULT_COLOR_SELECTED);
}
/**
* Creates a simple circle background to be applied behind the button
*
* @param color the main color of the circle
* @param selectedColor the color to be displayed when button has been clicked
*/
public void setColors(int color, int selectedColor) {
// create an oval and set it to the main color
ShapeDrawable normal = new ShapeDrawable(new OvalShape());
normal.getPaint().setColor(color);
// create a second oval and set it to selected color
ShapeDrawable selected = new ShapeDrawable(new OvalShape());
selected.getPaint().setColor(selectedColor);
// create a state drawable which displays appropriate drawable according to the
// current state of the ActionButton
StateListDrawable back = new StateListDrawable();
back.addState(new int[] {android.R.attr.state_pressed},
selected);
back.addState(new int[] {},
normal);
// set the background for this button
setBackgroundDrawable(back);
}
/**
* Sets how far away from the bottom of the screen the button should be displayed.
* Distance should be the value in DP, it will be converted to the appropriate pixel value
*
* @param distance the distance from the bottom in dp
*/
public void setDistanceFromBottom(int distance) {
this.distanceFromBottom = distance;
}
/**
* Sets how far away from the right side of the screen the button should be displayed.
* Distance should be the value in DP, it will be converted to the appropriate pixel value
*
* @param distance the distance from the right in dp
*/
public void setDistanceFromRight(int distance) {
this.distanceFromRight = distance;
}
/**
* Sets the width of the button. Distance should be the value in DP, it will be
* converted to the appropriate pixel value
*
* @param width the width of the circle in dp
*/
public void setWidth(int width) {
this.width = width;
}
/**
* Gets the width of the button
*
* @return width of the button
*/
public int getButtonWidth() {
return this.width;
}
/**
* Sets the height of the button. Distance should be the value in DP, it will be
* converted to the appropriate pixel value
*
* @param height the height of the circle in dp
*/
public void setHeight(int height) {
this.height = height;
}
/**
* Sets the view that the buttons should be attached to
*
* @param parent the parent view to attach buttons to
*/
public void setParent(ViewGroup parent) {
this.parent = parent;
}
/**
* Gets the height of the button
*
* @return height of the button
*/
public int getButtonHeight() {
return this.height;
}
/**
* Tells whether or not the button is currently showing on the screen.
*
* @return true if ActionButton is showing, false otherwise
*/
public boolean isShowing() {
return isShowing;
}
/**
* Sets the animation for when the button shows and hides
*
* @param anim the animation to be used
*/
public void setAnimation(ActionButtonAnimation anim) {
this.animation = anim;
}
/**
* Animates the ActionButton onto the screen so that the user may interact.
* Animation occurs from the bottom of the screen, moving up until it reaches the
* appropriate distance from the bottom.
*/
public void show() {
final Activity activity = (Activity) getContext();
// set the correct width and height for ActionButton
ViewGroup.LayoutParams params = new FrameLayout.LayoutParams(toPx(activity, width), toPx(activity, height));
this.setLayoutParams(params);
if (parent == null) {
// get the current content FrameLayout and add ActionButton to the top
parent = (FrameLayout) activity.findViewById(android.R.id.content);
}
parent.addView(this);
// get the size of the screen so we know where to animate from and to
float frameWidth = parent.getWidth();
float frameHeight = parent.getHeight();
// perform the animation with an object animator, default to sliding up from bottom
setTranslationX(frameWidth - toPx(activity, distanceFromRight) - toPx(activity, width));
ObjectAnimator animator = null;
switch (animation) {
case FADE:
setTranslationY(frameHeight - toPx(activity, distanceFromBottom) - toPx(activity, height));
animator = ObjectAnimator.ofFloat(this, View.ALPHA, 0.0f, 1.0f);
animator.setDuration(DEFAULT_FADE_ANIMATION_TIME);
break;
case SLIDE_FROM_RIGHT:
setTranslationY(frameHeight - toPx(activity, distanceFromBottom) - toPx(activity, height));
animator = ObjectAnimator.ofFloat(this, View.X, frameWidth, frameWidth - toPx(activity, distanceFromRight) - toPx(activity, width));
animator.setInterpolator(new DecelerateInterpolator());
animator.setDuration(DEFAULT_SLIDE_ANIMATION_TIME);
break;
case SLIDE_FROM_BOTTOM:
animator = ObjectAnimator.ofFloat(this, View.Y, frameHeight, frameHeight - toPx(activity, distanceFromBottom) - toPx(activity, height));
animator.setInterpolator(new DecelerateInterpolator());
animator.setDuration(DEFAULT_SLIDE_ANIMATION_TIME);
break;
default:
// don't animate anything
setTranslationY(frameHeight - toPx(activity, distanceFromBottom) - toPx(activity, height));
}
if (animator != null) {
animator.start();
}
isShowing = true;
}
/**
* Animates the ActionButton off of the screen. Animation will go from its current position and
* down until it is no longer being shown to the user.
*/
public void hide() {
final Activity activity = (Activity) getContext();
// get size of screen
float frameWidth = parent.getWidth();
float frameHeight = parent.getHeight();
// perform animation
setTranslationX(frameWidth - toPx(activity, distanceFromRight) - toPx(activity, width));
ObjectAnimator animator = null;
int animTime = 0;
switch (animation) {
case FADE:
animator = ObjectAnimator.ofFloat(this, View.ALPHA, 1.0f, 0.0f);
animTime = DEFAULT_FADE_ANIMATION_TIME;
break;
case SLIDE_FROM_RIGHT:
setTranslationY(frameHeight - toPx(activity, distanceFromBottom) - toPx(activity, height));
animator = ObjectAnimator.ofFloat(this, View.X, frameWidth - toPx(activity, distanceFromRight) - toPx(activity, width), frameWidth);
animator.setInterpolator(new DecelerateInterpolator());
animator.setDuration(DEFAULT_SLIDE_ANIMATION_TIME);
animTime = DEFAULT_SLIDE_ANIMATION_TIME;
break;
case SLIDE_FROM_BOTTOM:
animator = ObjectAnimator.ofFloat(this, View.Y, frameHeight - toPx(activity, distanceFromBottom) - toPx(activity, height), frameHeight + toPx(activity, height));
animator.setInterpolator(new AccelerateInterpolator());
animTime = DEFAULT_SLIDE_ANIMATION_TIME;
break;
default:
// don't animate anything
setAlpha(0.0f);
}
if (animator != null) {
animator.setDuration(animTime);
animator.start();
}
// After animation has finished, remove the ActionButton from the content frame
new Handler().postDelayed(new Runnable() {
@Override
public void run() {
parent.removeView(ActionButton.this);
}
}, animTime);
isShowing = false;
}
/**
* Converts a dip value into a pixel value
*
* @param context the current context of the application
* @param num the number to be converted to PX
* @return the value of the number in PX
*/
public static int toPx(Context context, int num) {
return (int) TypedValue.applyDimension(TypedValue.COMPLEX_UNIT_DIP, num, context.getResources().getDisplayMetrics());
}
}
| |
package org.apereo.cas.adaptors.trusted.config;
import org.apereo.cas.adaptors.trusted.authentication.handler.support.PrincipalBearingCredentialsAuthenticationHandler;
import org.apereo.cas.adaptors.trusted.authentication.principal.PrincipalBearingPrincipalResolver;
import org.apereo.cas.adaptors.trusted.authentication.principal.RemoteRequestPrincipalAttributesExtractor;
import org.apereo.cas.adaptors.trusted.authentication.principal.ShibbolethServiceProviderRequestPrincipalAttributesExtractor;
import org.apereo.cas.adaptors.trusted.web.flow.ChainingPrincipalFromRequestNonInteractiveCredentialsAction;
import org.apereo.cas.adaptors.trusted.web.flow.PrincipalFromRequestExtractorAction;
import org.apereo.cas.adaptors.trusted.web.flow.PrincipalFromRequestHeaderNonInteractiveCredentialsAction;
import org.apereo.cas.adaptors.trusted.web.flow.PrincipalFromRequestRemoteUserNonInteractiveCredentialsAction;
import org.apereo.cas.adaptors.trusted.web.flow.PrincipalFromRequestUserPrincipalNonInteractiveCredentialsAction;
import org.apereo.cas.authentication.AuthenticationEventExecutionPlanConfigurer;
import org.apereo.cas.authentication.AuthenticationHandler;
import org.apereo.cas.authentication.CoreAuthenticationUtils;
import org.apereo.cas.authentication.PrincipalElectionStrategy;
import org.apereo.cas.authentication.adaptive.AdaptiveAuthenticationPolicy;
import org.apereo.cas.authentication.principal.PrincipalFactory;
import org.apereo.cas.authentication.principal.PrincipalFactoryUtils;
import org.apereo.cas.authentication.principal.PrincipalResolver;
import org.apereo.cas.authentication.principal.resolvers.ChainingPrincipalResolver;
import org.apereo.cas.authentication.principal.resolvers.EchoingPrincipalResolver;
import org.apereo.cas.configuration.CasConfigurationProperties;
import org.apereo.cas.services.ServicesManager;
import org.apereo.cas.util.CollectionUtils;
import org.apereo.cas.web.flow.resolver.CasDelegatingWebflowEventResolver;
import org.apereo.cas.web.flow.resolver.CasWebflowEventResolver;
import lombok.val;
import org.apereo.services.persondir.IPersonAttributeDao;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
import org.springframework.cloud.context.config.annotation.RefreshScope;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.ScopedProxyMode;
/**
* This is {@link TrustedAuthenticationConfiguration}.
*
* @author Misagh Moayyed
* @author Dmitriy Kopylenko
* @since 5.0.0
*/
@EnableConfigurationProperties(CasConfigurationProperties.class)
@Configuration(value = "TrustedAuthenticationConfiguration", proxyBeanMethods = false)
public class TrustedAuthenticationConfiguration {
@Configuration(value = "TrustedAuthenticationHandlerConfiguration", proxyBeanMethods = false)
@EnableConfigurationProperties(CasConfigurationProperties.class)
public static class TrustedAuthenticationHandlerConfiguration {
@Bean
@RefreshScope(proxyMode = ScopedProxyMode.DEFAULT)
@ConditionalOnMissingBean(name = "principalBearingCredentialsAuthenticationHandler")
public AuthenticationHandler principalBearingCredentialsAuthenticationHandler(
final CasConfigurationProperties casProperties,
@Qualifier("trustedPrincipalFactory")
final PrincipalFactory trustedPrincipalFactory,
@Qualifier(ServicesManager.BEAN_NAME)
final ServicesManager servicesManager) {
val trusted = casProperties.getAuthn().getTrusted();
return new PrincipalBearingCredentialsAuthenticationHandler(trusted.getName(),
servicesManager,
trustedPrincipalFactory,
trusted.getOrder());
}
}
@Configuration(value = "TrustedAuthenticationPrincipalConfiguration", proxyBeanMethods = false)
@EnableConfigurationProperties(CasConfigurationProperties.class)
public static class TrustedAuthenticationPrincipalConfiguration {
@ConditionalOnMissingBean(name = "trustedPrincipalFactory")
@Bean
@RefreshScope(proxyMode = ScopedProxyMode.DEFAULT)
public PrincipalFactory trustedPrincipalFactory() {
return PrincipalFactoryUtils.newPrincipalFactory();
}
}
@Configuration(value = "TrustedAuthenticationPrincipalResolutionConfiguration", proxyBeanMethods = false)
@EnableConfigurationProperties(CasConfigurationProperties.class)
public static class TrustedAuthenticationPrincipalResolutionConfiguration {
@Bean
@RefreshScope(proxyMode = ScopedProxyMode.DEFAULT)
@ConditionalOnMissingBean(name = "trustedPrincipalResolver")
public PrincipalResolver trustedPrincipalResolver(
@Qualifier(PrincipalElectionStrategy.BEAN_NAME)
final PrincipalElectionStrategy principalElectionStrategy,
final CasConfigurationProperties casProperties,
@Qualifier("trustedPrincipalFactory")
final PrincipalFactory trustedPrincipalFactory,
@Qualifier(PrincipalResolver.BEAN_NAME_ATTRIBUTE_REPOSITORY)
final IPersonAttributeDao attributeRepository) {
val resolver = new ChainingPrincipalResolver(principalElectionStrategy, casProperties);
val personDirectory = casProperties.getPersonDirectory();
val trusted = casProperties.getAuthn().getTrusted();
val bearingPrincipalResolver = CoreAuthenticationUtils.newPersonDirectoryPrincipalResolver(trustedPrincipalFactory,
attributeRepository,
CoreAuthenticationUtils.getAttributeMerger(casProperties.getAuthn().getAttributeRepository().getCore().getMerger()),
PrincipalBearingPrincipalResolver.class,
trusted,
personDirectory);
resolver.setChain(CollectionUtils.wrapList(new EchoingPrincipalResolver(),
bearingPrincipalResolver));
return resolver;
}
}
@Configuration(value = "TrustedAuthenticationExtractorConfiguration", proxyBeanMethods = false)
@EnableConfigurationProperties(CasConfigurationProperties.class)
public static class TrustedAuthenticationExtractorConfiguration {
@ConditionalOnMissingBean(name = "remoteRequestPrincipalAttributesExtractor")
@Bean
@RefreshScope(proxyMode = ScopedProxyMode.DEFAULT)
public RemoteRequestPrincipalAttributesExtractor remoteRequestPrincipalAttributesExtractor() {
return new ShibbolethServiceProviderRequestPrincipalAttributesExtractor();
}
}
@Configuration(value = "TrustedAuthenticationExecutionPlanConfiguration", proxyBeanMethods = false)
@EnableConfigurationProperties(CasConfigurationProperties.class)
public static class TrustedAuthenticationExecutionPlanConfiguration {
@ConditionalOnMissingBean(name = "trustedAuthenticationEventExecutionPlanConfigurer")
@Bean
@RefreshScope(proxyMode = ScopedProxyMode.DEFAULT)
public AuthenticationEventExecutionPlanConfigurer trustedAuthenticationEventExecutionPlanConfigurer(
@Qualifier("principalBearingCredentialsAuthenticationHandler")
final AuthenticationHandler principalBearingCredentialsAuthenticationHandler,
@Qualifier("trustedPrincipalResolver")
final PrincipalResolver trustedPrincipalResolver) {
return plan -> plan.registerAuthenticationHandlerWithPrincipalResolver(principalBearingCredentialsAuthenticationHandler,
trustedPrincipalResolver);
}
}
@Configuration(value = "TrustedAuthenticationActionConfiguration", proxyBeanMethods = false)
@EnableConfigurationProperties(CasConfigurationProperties.class)
public static class TrustedAuthenticationActionConfiguration {
@Bean
@RefreshScope(proxyMode = ScopedProxyMode.DEFAULT)
@ConditionalOnMissingBean(name = "principalFromRemoteUserAction")
public PrincipalFromRequestExtractorAction principalFromRemoteUserAction(
@Qualifier("trustedPrincipalFactory")
final PrincipalFactory trustedPrincipalFactory,
@Qualifier("remoteRequestPrincipalAttributesExtractor")
final RemoteRequestPrincipalAttributesExtractor remoteRequestPrincipalAttributesExtractor,
@Qualifier("adaptiveAuthenticationPolicy")
final AdaptiveAuthenticationPolicy adaptiveAuthenticationPolicy,
@Qualifier("serviceTicketRequestWebflowEventResolver")
final CasWebflowEventResolver serviceTicketRequestWebflowEventResolver,
@Qualifier("initialAuthenticationAttemptWebflowEventResolver")
final CasDelegatingWebflowEventResolver initialAuthenticationAttemptWebflowEventResolver) {
return new PrincipalFromRequestRemoteUserNonInteractiveCredentialsAction(initialAuthenticationAttemptWebflowEventResolver,
serviceTicketRequestWebflowEventResolver,
adaptiveAuthenticationPolicy,
trustedPrincipalFactory,
remoteRequestPrincipalAttributesExtractor);
}
@Bean
@RefreshScope(proxyMode = ScopedProxyMode.DEFAULT)
@ConditionalOnMissingBean(name = "principalFromRemoteUserPrincipalAction")
public PrincipalFromRequestExtractorAction principalFromRemoteUserPrincipalAction(
@Qualifier("trustedPrincipalFactory")
final PrincipalFactory trustedPrincipalFactory,
@Qualifier("remoteRequestPrincipalAttributesExtractor")
final RemoteRequestPrincipalAttributesExtractor remoteRequestPrincipalAttributesExtractor,
@Qualifier("adaptiveAuthenticationPolicy")
final AdaptiveAuthenticationPolicy adaptiveAuthenticationPolicy,
@Qualifier("serviceTicketRequestWebflowEventResolver")
final CasWebflowEventResolver serviceTicketRequestWebflowEventResolver,
@Qualifier("initialAuthenticationAttemptWebflowEventResolver")
final CasDelegatingWebflowEventResolver initialAuthenticationAttemptWebflowEventResolver) {
return new PrincipalFromRequestUserPrincipalNonInteractiveCredentialsAction(initialAuthenticationAttemptWebflowEventResolver,
serviceTicketRequestWebflowEventResolver,
adaptiveAuthenticationPolicy,
trustedPrincipalFactory,
remoteRequestPrincipalAttributesExtractor);
}
@Bean
@RefreshScope(proxyMode = ScopedProxyMode.DEFAULT)
@ConditionalOnMissingBean(name = "principalFromRemoteHeaderPrincipalAction")
public PrincipalFromRequestExtractorAction principalFromRemoteHeaderPrincipalAction(
final CasConfigurationProperties casProperties,
@Qualifier("trustedPrincipalFactory")
final PrincipalFactory trustedPrincipalFactory,
@Qualifier("remoteRequestPrincipalAttributesExtractor")
final RemoteRequestPrincipalAttributesExtractor remoteRequestPrincipalAttributesExtractor,
@Qualifier("adaptiveAuthenticationPolicy")
final AdaptiveAuthenticationPolicy adaptiveAuthenticationPolicy,
@Qualifier("serviceTicketRequestWebflowEventResolver")
final CasWebflowEventResolver serviceTicketRequestWebflowEventResolver,
@Qualifier("initialAuthenticationAttemptWebflowEventResolver")
final CasDelegatingWebflowEventResolver initialAuthenticationAttemptWebflowEventResolver) {
val trusted = casProperties.getAuthn().getTrusted();
return new PrincipalFromRequestHeaderNonInteractiveCredentialsAction(initialAuthenticationAttemptWebflowEventResolver,
serviceTicketRequestWebflowEventResolver,
adaptiveAuthenticationPolicy,
trustedPrincipalFactory,
remoteRequestPrincipalAttributesExtractor,
trusted.getRemotePrincipalHeader());
}
@ConditionalOnMissingBean(name = "remoteUserAuthenticationAction")
@Bean
@RefreshScope(proxyMode = ScopedProxyMode.DEFAULT)
public PrincipalFromRequestExtractorAction remoteUserAuthenticationAction(
@Qualifier("trustedPrincipalFactory")
final PrincipalFactory trustedPrincipalFactory,
@Qualifier("remoteRequestPrincipalAttributesExtractor")
final RemoteRequestPrincipalAttributesExtractor remoteRequestPrincipalAttributesExtractor,
@Qualifier("principalFromRemoteUserAction")
final PrincipalFromRequestExtractorAction principalFromRemoteUserAction,
@Qualifier("principalFromRemoteUserPrincipalAction")
final PrincipalFromRequestExtractorAction principalFromRemoteUserPrincipalAction,
@Qualifier("principalFromRemoteHeaderPrincipalAction")
final PrincipalFromRequestExtractorAction principalFromRemoteHeaderPrincipalAction,
@Qualifier("adaptiveAuthenticationPolicy")
final AdaptiveAuthenticationPolicy adaptiveAuthenticationPolicy,
@Qualifier("serviceTicketRequestWebflowEventResolver")
final CasWebflowEventResolver serviceTicketRequestWebflowEventResolver,
@Qualifier("initialAuthenticationAttemptWebflowEventResolver")
final CasDelegatingWebflowEventResolver initialAuthenticationAttemptWebflowEventResolver) {
val chain = new ChainingPrincipalFromRequestNonInteractiveCredentialsAction(initialAuthenticationAttemptWebflowEventResolver,
serviceTicketRequestWebflowEventResolver,
adaptiveAuthenticationPolicy,
trustedPrincipalFactory,
remoteRequestPrincipalAttributesExtractor);
chain.addAction(principalFromRemoteUserAction);
chain.addAction(principalFromRemoteUserPrincipalAction);
chain.addAction(principalFromRemoteHeaderPrincipalAction);
return chain;
}
}
}
| |
/*
* android-plugin-host-sdk-for-locale https://github.com/twofortyfouram/android-plugin-host-sdk-for-locale
* Copyright 2015 two forty four a.m. LLC
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the
* License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.twofortyfouram.locale.sdk.host.internal;
import android.content.pm.ActivityInfo;
import android.content.pm.ApplicationInfo;
import android.content.pm.ResolveInfo;
import android.support.annotation.NonNull;
import android.support.test.InstrumentationRegistry;
import android.support.test.filters.SmallTest;
import android.support.test.runner.AndroidJUnit4;
import com.twofortyfouram.locale.sdk.host.model.PluginType;
import com.twofortyfouram.spackle.AppBuildInfo;
import com.twofortyfouram.test.assertion.MoarAsserts;
import org.junit.Test;
import org.junit.runner.RunWith;
import java.util.Collection;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.greaterThanOrEqualTo;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.notNullValue;
@RunWith(AndroidJUnit4.class)
public final class PluginPackageScannerTest {
@SmallTest
@Test
public void nonInstantiable() {
MoarAsserts.assertNoninstantiable(PluginPackageScanner.class);
}
@SmallTest
@Test
public void findActivities_conditions() {
final Collection<ResolveInfo> infos = PluginPackageScanner.findActivities(
InstrumentationRegistry.getContext(),
PluginType.CONDITION, null);
assertThat(infos, notNullValue());
assertThat(infos.size(), greaterThanOrEqualTo(1));
}
@SmallTest
@Test
public void findActivities_settings() {
final Collection<ResolveInfo> infos = PluginPackageScanner
.findActivities(InstrumentationRegistry.getContext(),
PluginType.SETTING, null);
assertThat(infos, notNullValue());
assertThat(infos.size(), greaterThanOrEqualTo(1));
}
@SmallTest
@Test
public void findActivities_debug_condition() {
final Collection<ResolveInfo> infos = PluginPackageScanner
.findActivities(InstrumentationRegistry.getContext(),
PluginType.CONDITION,
InstrumentationRegistry.getContext().getPackageName());
assertThat(infos, notNullValue());
assertThat(infos.size(), is(1));
}
@SmallTest
@Test
public void findActivities_debug_setting() {
final Collection<ResolveInfo> infos = PluginPackageScanner
.findActivities(InstrumentationRegistry.getContext(),
PluginType.SETTING, InstrumentationRegistry.getContext().getPackageName());
assertThat(infos, notNullValue());
assertThat(infos.size(), is(1));
}
@SmallTest
@Test
public void findReceivers_conditions() {
final Collection<ResolveInfo> infos = PluginPackageScanner
.findReceivers(InstrumentationRegistry.getContext(),
PluginType.CONDITION, null);
assertThat(infos, notNullValue());
assertThat(infos.size(), greaterThanOrEqualTo(1));
}
@SmallTest
@Test
public void findReceivers_settings() {
final Collection<ResolveInfo> infos = PluginPackageScanner
.findReceivers(InstrumentationRegistry.getContext(),
PluginType.SETTING, null);
assertThat(infos, notNullValue());
assertThat(infos.size(), greaterThanOrEqualTo(1));
}
@SmallTest
@Test
public void findReceivers_debug_condition() {
final Collection<ResolveInfo> infos = PluginPackageScanner
.findReceivers(InstrumentationRegistry.getContext(),
PluginType.CONDITION,
InstrumentationRegistry.getContext().getPackageName());
assertThat(infos, notNullValue());
assertThat(infos.size(), is(1));
}
@SmallTest
@Test
public void findReceivers_debug_setting() {
final Collection<ResolveInfo> infos = PluginPackageScanner
.findReceivers(InstrumentationRegistry.getContext(),
PluginType.SETTING, InstrumentationRegistry.getContext().getPackageName());
assertThat(infos, notNullValue());
assertThat(infos.size(), is(1));
}
@SmallTest
@Test
public void getVersionCode_unknown() {
final int actualVersionCode = PluginPackageScanner
.getVersionCode(InstrumentationRegistry.getContext()
.getPackageManager(), "com.twofortyfouram.locale.bork"); //$NON-NLS-1$
assertThat(actualVersionCode, is(-1));
}
@SmallTest
@Test
public void getVersionCode_known() {
final int expectedVersionCode = AppBuildInfo
.getVersionCode(InstrumentationRegistry.getContext());
final int actualVersionCode = PluginPackageScanner
.getVersionCode(InstrumentationRegistry.getContext()
.getPackageManager(),
InstrumentationRegistry.getContext().getPackageName());
assertThat(actualVersionCode, is(expectedVersionCode));
}
@SmallTest
@Test
public void isTargetSdkCorrect_true() {
assertThat(PluginPackageScanner
.isTargetSdkCorrect(InstrumentationRegistry.getContext(),
getResolveInfoWithTargetSdkVersion(InstrumentationRegistry.getContext()
.getApplicationInfo().targetSdkVersion)), is(true));
}
@SmallTest
@Test
public void isTargetSdkCorrect_false() {
assertThat(PluginPackageScanner
.isTargetSdkCorrect(InstrumentationRegistry.getContext(),
getResolveInfoWithTargetSdkVersion(InstrumentationRegistry.getContext()
.getApplicationInfo().targetSdkVersion - 1)), is(false));
}
@SmallTest
@Test
public void isApplicationEnabled_true() {
assertThat(PluginPackageScanner
.isApplicationEnabled(getResolveInfoWithApplicationEnabled(true)), is(true));
}
@SmallTest
@Test
public void isApplicationEnabled_false() {
assertThat(PluginPackageScanner
.isApplicationEnabled(getResolveInfoWithApplicationEnabled(false)), is(false));
}
@SmallTest
@Test
public void isComponentEnabled_true() {
assertThat(
PluginPackageScanner.isComponentEnabled(getResolveInfoWithActivityEnabled(true)),
is(true));
}
@SmallTest
@Test
public void isComponentEnabled_false() {
assertThat(PluginPackageScanner
.isComponentEnabled(getResolveInfoWithActivityEnabled(false)), is(false));
}
@SmallTest
@Test
public void isComponentExported_true() {
assertThat(PluginPackageScanner
.isComponentExported(getResolveInfoWithActivityExported(true)), is(true));
}
@SmallTest
@Test
public void isComponentExported_false() {
assertThat(PluginPackageScanner
.isComponentExported(getResolveInfoWithActivityExported(false)), is(false));
}
@SmallTest
@Test
public void isComponentPermissionGranted_true() {
assertThat(PluginPackageScanner
.isComponentPermissionGranted(InstrumentationRegistry.getContext(),
getResolveInfoWithPermission(null)), is(true));
}
@SmallTest
@Test
public void isComponentPermissionGranted_false() {
assertThat(PluginPackageScanner
.isComponentPermissionGranted(InstrumentationRegistry.getContext(),
getResolveInfoWithPermission(
"com.nefarious.app.permission.NO_SOUP_FOR_YOU")
), is(false)); //$NON-NLS-1$
}
@SmallTest
@Test
public void isInstallLocationCorrect_internal() {
assertThat(
PluginPackageScanner.isInstallLocationCorrect(InstrumentationRegistry.getContext(),
getResolveInfoWithInstallLocation(false)), is(true));
}
@SmallTest
@Test
public void isInstallLocationCorrect_external() {
assertThat(
PluginPackageScanner.isInstallLocationCorrect(InstrumentationRegistry.getContext(),
getResolveInfoWithInstallLocation(true)), is(false));
}
@NonNull
private static ResolveInfo getResolveInfoWithTargetSdkVersion(final int targetSdkVersion) {
final ResolveInfo info = new ResolveInfo();
info.activityInfo = new ActivityInfo();
info.activityInfo.applicationInfo = new ApplicationInfo();
info.activityInfo.applicationInfo.targetSdkVersion = targetSdkVersion;
return info;
}
@NonNull
private static ResolveInfo getResolveInfoWithApplicationEnabled(final boolean isApplicationEnabled) {
final ResolveInfo info = new ResolveInfo();
info.activityInfo = new ActivityInfo();
info.activityInfo.applicationInfo = new ApplicationInfo();
info.activityInfo.applicationInfo.enabled = isApplicationEnabled;
return info;
}
@NonNull
private static ResolveInfo getResolveInfoWithActivityEnabled(final boolean isActivityEnabled) {
final ResolveInfo info = new ResolveInfo();
info.activityInfo = new ActivityInfo();
info.activityInfo.enabled = isActivityEnabled;
return info;
}
@NonNull
private static ResolveInfo getResolveInfoWithActivityExported(final boolean isActivityExported) {
final ResolveInfo info = new ResolveInfo();
info.activityInfo = new ActivityInfo();
info.activityInfo.exported = isActivityExported;
return info;
}
@NonNull
private static ResolveInfo getResolveInfoWithPermission(final String permissionString) {
final ResolveInfo info = new ResolveInfo();
info.activityInfo = new ActivityInfo();
info.activityInfo.permission = permissionString;
return info;
}
@NonNull
private ResolveInfo getResolveInfoWithInstallLocation(final boolean isExternal) {
final ResolveInfo info = new ResolveInfo();
info.activityInfo = new ActivityInfo();
info.activityInfo.applicationInfo = new ApplicationInfo();
info.activityInfo.packageName = InstrumentationRegistry.getContext().getPackageName();
info.activityInfo.applicationInfo.flags = isExternal ? ApplicationInfo.FLAG_EXTERNAL_STORAGE
: 0;
return info;
}
}
| |
/*
* TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
* Copyright (c) 2008, Nationwide Health Information Network (NHIN) Connect. All rights reserved.
* Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
*
* - Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
* - Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
* - Neither the name of the NHIN Connect Project nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
* END OF TERMS AND CONDITIONS
*/
/*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package mil.navy.med.dzreg.model;
import java.io.Serializable;
import java.math.BigDecimal;
import java.math.BigInteger;
import java.util.Collection;
import java.util.Date;
import javax.persistence.Basic;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.FetchType;
import javax.persistence.Id;
import javax.persistence.NamedQueries;
import javax.persistence.NamedQuery;
import javax.persistence.OneToMany;
import javax.persistence.Table;
import javax.persistence.Temporal;
import javax.persistence.TemporalType;
/**
*
* @author kim
*/
@Entity
@Table(name = "DZ_POC")
@NamedQueries({
@NamedQuery(name = "DzPoc.findAll", query = "SELECT d FROM DzPoc d"),
@NamedQuery(name = "DzPoc.findByPocid", query = "SELECT d FROM DzPoc d WHERE d.pocid = :pocid"),
@NamedQuery(name = "DzPoc.findByPoc", query = "SELECT d FROM DzPoc d WHERE d.poc = :poc")
// @NamedQuery(name = "DzPoc.findByInsertedDt", query = "SELECT d FROM DzPoc d WHERE d.insertedDt = :insertedDt"),
// @NamedQuery(name = "DzPoc.findByDataSource", query = "SELECT d FROM DzPoc d WHERE d.dataSource = :dataSource"),
// @NamedQuery(name = "DzPoc.findByHospitalLocationInactiveDt", query = "SELECT d FROM DzPoc d WHERE d.hospitalLocationInactiveDt = :hospitalLocationInactiveDt"),
// @NamedQuery(name = "DzPoc.findByDivisionId", query = "SELECT d FROM DzPoc d WHERE d.divisionId = :divisionId"),
// @NamedQuery(name = "DzPoc.findByDivisionDmis", query = "SELECT d FROM DzPoc d WHERE d.divisionDmis = :divisionDmis"),
// @NamedQuery(name = "DzPoc.findByFlag", query = "SELECT d FROM DzPoc d WHERE d.flag = :flag")
})
public class DzPoc implements Serializable {
private static final long serialVersionUID = 1L;
@Id
@Basic(optional = false)
@Column(name = "POCID")
private Long pocid;
@Column(name = "POC")
private String poc;
@Column(name = "INSERTED_DT")
@Temporal(TemporalType.DATE)
private Date insertedDt;
@Basic(optional = false)
@Column(name = "DATA_SOURCE")
private String dataSource;
@Column(name = "HOSPITAL_LOCATION_INACTIVE_DT")
@Temporal(TemporalType.DATE)
private Date hospitalLocationInactiveDt;
@Column(name = "DIVISION_ID")
private Integer divisionId;
@Column(name = "DIVISION_DMIS")
private String divisionDmis;
@Column(name = "FLAG")
private Integer flag;
//@OneToMany(mappedBy = "pocid", fetch = FetchType.EAGER)
//private Collection<DzAppt> dzApptCollection;
//@OneToMany(mappedBy = "pocid", fetch = FetchType.EAGER)
//private Collection<DzPatients> dzPatientsCollection;
public DzPoc() {
}
public DzPoc(Long pocid) {
this.pocid = pocid;
}
public DzPoc(Long pocid, String dataSource) {
this.pocid = pocid;
this.dataSource = dataSource;
}
public Long getPocid() {
return pocid;
}
public void setPocid(Long pocid) {
this.pocid = pocid;
}
public String getPoc() {
return poc;
}
public void setPoc(String poc) {
this.poc = poc;
}
public Date getInsertedDt() {
return insertedDt;
}
public void setInsertedDt(Date insertedDt) {
this.insertedDt = insertedDt;
}
public String getDataSource() {
return dataSource;
}
public void setDataSource(String dataSource) {
this.dataSource = dataSource;
}
public Date getHospitalLocationInactiveDt() {
return hospitalLocationInactiveDt;
}
public void setHospitalLocationInactiveDt(Date hospitalLocationInactiveDt) {
this.hospitalLocationInactiveDt = hospitalLocationInactiveDt;
}
public Integer getDivisionId() {
return divisionId;
}
public void setDivisionId(Integer divisionId) {
this.divisionId = divisionId;
}
public String getDivisionDmis() {
return divisionDmis;
}
public void setDivisionDmis(String divisionDmis) {
this.divisionDmis = divisionDmis;
}
public Integer getFlag() {
return flag;
}
public void setFlag(Integer flag) {
this.flag = flag;
}
// public Collection<DzAppt> getDzApptCollection() {
// return dzApptCollection;
// }
//
// public void setDzApptCollection(Collection<DzAppt> dzApptCollection) {
// this.dzApptCollection = dzApptCollection;
// }
// public Collection<DzPatients> getDzPatientsCollection() {
// return dzPatientsCollection;
// }
//
// public void setDzPatientsCollection(Collection<DzPatients> dzPatientsCollection) {
// this.dzPatientsCollection = dzPatientsCollection;
// }
@Override
public int hashCode() {
int hash = 0;
hash += (pocid != null ? pocid.hashCode() : 0);
return hash;
}
@Override
public boolean equals(Object object) {
// TODO: Warning - this method won't work in the case the id fields are not set
if (!(object instanceof DzPoc)) {
return false;
}
DzPoc other = (DzPoc) object;
if ((this.pocid == null && other.pocid != null) || (this.pocid != null && !this.pocid.equals(other.pocid))) {
return false;
}
return true;
}
@Override
public String toString() {
return "mil.navy.med.dzreg.jpa.DzPoc[pocid=" + pocid + "]";
}
}
| |
package ai.verta.modeldb.entities;
import ai.verta.common.Artifact;
import ai.verta.modeldb.App;
import ai.verta.modeldb.ModelDBConstants;
import java.io.Serializable;
import javax.persistence.CascadeType;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.FetchType;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
import javax.persistence.JoinColumn;
import javax.persistence.ManyToOne;
import javax.persistence.Table;
@Entity
@Table(name = "artifact")
public class ArtifactEntity implements Serializable {
public ArtifactEntity() {}
public ArtifactEntity(Object entity, String fieldType, Artifact artifact) {
var app = App.getInstance();
var artifactStoreConfig = app.mdbConfig.artifactStoreConfig;
setKey(artifact.getKey());
setPath(artifact.getPath());
if (!artifact.getPathOnly()) {
setStore_type_path(artifactStoreConfig.storeTypePathPrefix() + artifact.getPath());
}
setArtifact_type(artifact.getArtifactTypeValue());
setPath_only(artifact.getPathOnly());
setLinked_artifact_id(artifact.getLinkedArtifactId());
setFilename_extension(artifact.getFilenameExtension());
this.serialization = artifact.getSerialization();
this.artifact_subtype = artifact.getArtifactSubtype();
if (entity instanceof ProjectEntity) {
setProjectEntity(entity);
} else if (entity instanceof ExperimentEntity) {
setExperimentEntity(entity);
} else if (entity instanceof ExperimentRunEntity) {
setExperimentRunEntity(entity);
} else if (entity instanceof CodeVersionEntity) {
// OneToOne mapping, do nothing
} else if (entity instanceof ObservationEntity) {
// OneToOne mapping, do nothing
} else {
throw new IllegalStateException("unexpected entity class " + entity.getClass());
}
this.field_type = fieldType;
var uploadCompleted = !artifactStoreConfig.getArtifactStoreType().equals(ModelDBConstants.S3);
if (artifact.getUploadCompleted()) {
uploadCompleted = true;
}
setUploadCompleted(uploadCompleted);
}
@Id
@GeneratedValue(strategy = GenerationType.IDENTITY)
@Column(name = "id", updatable = false, nullable = false)
private Long id;
@Column(name = "ar_key", columnDefinition = "TEXT")
private String key;
@Column(name = "ar_path", columnDefinition = "TEXT")
private String path;
@Column(name = "store_type_path", columnDefinition = "TEXT")
private String store_type_path;
@Column(name = "artifact_type")
private Integer artifact_type;
@Column(name = "path_only")
private Boolean path_only;
@Column(name = "linked_artifact_id")
private String linked_artifact_id;
@Column(name = "filename_extension", length = 50)
private String filename_extension;
@Column(name = "upload_id")
private String uploadId;
@Column(name = "upload_completed")
private boolean uploadCompleted;
@Column(name = "serialization", columnDefinition = "TEXT")
private String serialization;
@Column(name = "artifact_subtype")
private String artifact_subtype;
@ManyToOne(cascade = CascadeType.ALL, fetch = FetchType.LAZY)
@JoinColumn(name = "project_id")
private ProjectEntity projectEntity;
@ManyToOne(cascade = CascadeType.ALL, fetch = FetchType.LAZY)
@JoinColumn(name = "experiment_id")
private ExperimentEntity experimentEntity;
@ManyToOne(cascade = CascadeType.ALL, fetch = FetchType.LAZY)
@JoinColumn(name = "experiment_run_id")
private ExperimentRunEntity experimentRunEntity;
@Column(name = "entity_name", length = 50)
private String entityName;
@Column(name = "field_type", length = 50)
private String field_type;
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public String getKey() {
return key;
}
public void setKey(String key) {
this.key = key;
}
public String getPath() {
return path;
}
public void setPath(String path) {
this.path = path;
}
public String getStore_type_path() {
return store_type_path;
}
public void setStore_type_path(String store_type_path) {
this.store_type_path = store_type_path;
}
public Integer getArtifact_type() {
return artifact_type;
}
public void setArtifact_type(Integer artifactType) {
this.artifact_type = artifactType;
}
public Boolean getPath_only() {
return path_only;
}
public void setPath_only(Boolean path_only) {
this.path_only = path_only;
}
public String getLinked_artifact_id() {
return linked_artifact_id;
}
public void setLinked_artifact_id(String linked_artifact_id) {
this.linked_artifact_id = linked_artifact_id;
}
public String getFilename_extension() {
return filename_extension;
}
public void setFilename_extension(String filename_extension) {
this.filename_extension = filename_extension;
}
public ProjectEntity getProjectEntity() {
return projectEntity;
}
private void setProjectEntity(Object entity) {
this.projectEntity = (ProjectEntity) entity;
this.entityName = this.projectEntity.getClass().getSimpleName();
}
public ExperimentEntity getExperimentEntity() {
return experimentEntity;
}
private void setExperimentEntity(Object entity) {
this.experimentEntity = (ExperimentEntity) entity;
this.entityName = this.experimentEntity.getClass().getSimpleName();
}
public ExperimentRunEntity getExperimentRunEntity() {
return experimentRunEntity;
}
private void setExperimentRunEntity(Object entity) {
this.experimentRunEntity = (ExperimentRunEntity) entity;
this.entityName = this.experimentRunEntity.getClass().getSimpleName();
}
public String getField_type() {
return field_type;
}
public void setUploadId(String uploadId) {
this.uploadId = uploadId;
}
public String getUploadId() {
return uploadId;
}
public boolean isUploadCompleted() {
return uploadCompleted;
}
public void setUploadCompleted(boolean uploadCompleted) {
this.uploadCompleted = uploadCompleted;
}
public Artifact getProtoObject() {
return Artifact.newBuilder()
.setKey(key)
.setPath(path)
.setArtifactTypeValue(artifact_type)
.setPathOnly(path_only)
.setLinkedArtifactId(linked_artifact_id)
.setFilenameExtension(filename_extension)
.setSerialization(serialization)
.setArtifactSubtype(artifact_subtype)
.setUploadCompleted(this.uploadCompleted)
.build();
}
}
| |
package com.miscitems.MiscItemsAndBlocks.Container.Utils;
import cpw.mods.fml.relauncher.Side;
import cpw.mods.fml.relauncher.SideOnly;
import net.minecraft.client.Minecraft;
import net.minecraft.client.gui.GuiScreen;
import net.minecraft.client.renderer.OpenGlHelper;
import net.minecraft.client.renderer.RenderHelper;
import net.minecraft.client.renderer.entity.RenderItem;
import net.minecraft.client.renderer.texture.TextureMap;
import net.minecraft.entity.player.InventoryPlayer;
import net.minecraft.inventory.Container;
import net.minecraft.inventory.Slot;
import net.minecraft.item.ItemStack;
import net.minecraft.util.EnumChatFormatting;
import net.minecraft.util.IIcon;
import net.minecraft.util.MathHelper;
import org.lwjgl.input.Keyboard;
import org.lwjgl.opengl.GL11;
import org.lwjgl.opengl.GL12;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Set;
@SideOnly(Side.CLIENT)
public abstract class ModGuiContainer extends GuiScreen
{
protected static RenderItem itemRenderer = new RenderItem();
protected int xSize = 176;
protected int ySize = 166;
public ActiveContainer container;
protected int guiLeft;
protected int guiTop;
protected Slot mainSlot;
protected Slot clickedSlot = null;
protected boolean isRightMouseClick = false;
protected ItemStack draggedStack = null;
protected int field_85049_r = 0;
protected int field_85048_s = 0;
protected Slot returningStackDestSlot = null;
protected long returningStackTime = 0L;
protected ItemStack returningStack = null;
protected Slot field_92033_y = null;
protected long field_92032_z = 0L;
@SuppressWarnings("rawtypes")
protected final Set field_94077_p = new HashSet();
protected boolean field_94076_q;
protected int field_94071_C = 0;
protected int field_94067_D = 0;
protected boolean field_94068_E = false;
protected int field_94069_F;
protected long field_94070_G = 0L;
protected Slot field_94072_H = null;
protected int field_94073_I = 0;
protected boolean field_94074_J;
protected ItemStack field_94075_K = null;
public ModGuiContainer(ActiveContainer container)
{
this.container = container;
this.field_94068_E = true;
}
public void initGui ()
{
super.initGui();
this.mc.thePlayer.openContainer = this.container;
this.guiLeft = (this.width - this.xSize) / 2;
this.guiTop = (this.height - this.ySize) / 2;
}
public void drawScreen (int mouseX, int mouseY, float par3)
{
this.drawDefaultBackground();
int gLeft = this.guiLeft;
int gTop = this.guiTop;
this.drawGuiContainerBackgroundLayer(par3, mouseX, mouseY);
GL11.glDisable(GL12.GL_RESCALE_NORMAL);
RenderHelper.disableStandardItemLighting();
GL11.glDisable(GL11.GL_LIGHTING);
GL11.glDisable(GL11.GL_DEPTH_TEST);
super.drawScreen(mouseX, mouseY, par3);
RenderHelper.enableGUIStandardItemLighting();
GL11.glPushMatrix();
GL11.glTranslatef((float) gLeft, (float) gTop, 0.0F);
GL11.glColor4f(1.0F, 1.0F, 1.0F, 1.0F);
GL11.glEnable(GL12.GL_RESCALE_NORMAL);
this.mainSlot = null;
short short1 = 240;
short short2 = 240;
OpenGlHelper.setLightmapTextureCoords(OpenGlHelper.lightmapTexUnit, (float) short1 / 1.0F, (float) short2 / 1.0F);
GL11.glColor4f(1.0F, 1.0F, 1.0F, 1.0F);
int slotXPos;
int slotYPos;
for (int slotIter = 0; slotIter < this.container.inventorySlots.size(); ++slotIter)
{
Slot slot = (Slot) this.container.inventorySlots.get(slotIter);
if (!(slot instanceof ActiveSlot) || ((ActiveSlot) slot).getActive())
{
this.drawSlotInventory(slot);
if (this.isMouseOverSlot(slot, mouseX, mouseY))
{
this.mainSlot = slot;
GL11.glDisable(GL11.GL_LIGHTING);
GL11.glDisable(GL11.GL_DEPTH_TEST);
slotXPos = slot.xDisplayPosition;
slotYPos = slot.yDisplayPosition;
this.drawGradientRect(slotXPos, slotYPos, slotXPos + 16, slotYPos + 16, -2130706433, -2130706433);
GL11.glEnable(GL11.GL_LIGHTING);
GL11.glEnable(GL11.GL_DEPTH_TEST);
}
}
}
this.drawGuiContainerForegroundLayer(mouseX, mouseY);
InventoryPlayer inventoryplayer = this.mc.thePlayer.inventory;
ItemStack itemstack = this.draggedStack == null ? inventoryplayer.getItemStack() : this.draggedStack;
if (itemstack != null)
{
byte b0 = 8;
slotYPos = this.draggedStack == null ? 8 : 16;
String s = null;
if (this.draggedStack != null && this.isRightMouseClick)
{
itemstack = itemstack.copy();
itemstack.stackSize = MathHelper.ceiling_float_int((float) itemstack.stackSize / 2.0F);
}
else if (this.field_94076_q && this.field_94077_p.size() > 1)
{
itemstack = itemstack.copy();
itemstack.stackSize = this.field_94069_F;
if (itemstack.stackSize == 0)
{
s = "" + EnumChatFormatting.YELLOW + "0";
}
}
this.drawItemStack(itemstack, mouseX - gLeft - b0, mouseY - gTop - slotYPos, s);
}
if (this.returningStack != null)
{
float f1 = (float) (Minecraft.getSystemTime() - this.returningStackTime) / 100.0F;
if (f1 >= 1.0F)
{
f1 = 1.0F;
this.returningStack = null;
}
slotXPos = this.returningStackDestSlot.xDisplayPosition - this.field_85049_r;
slotYPos = this.returningStackDestSlot.yDisplayPosition - this.field_85048_s;
int xPos = this.field_85049_r + (int) ((float) slotXPos * f1);
int yPos = this.field_85048_s + (int) ((float) slotYPos * f1);
this.drawItemStack(this.returningStack, xPos, yPos, (String) null);
}
GL11.glPopMatrix();
if (inventoryplayer.getItemStack() == null && this.mainSlot != null && this.mainSlot.getHasStack())
{
ItemStack itemstack1 = this.mainSlot.getStack();
this.drawItemStackTooltip(itemstack1, mouseX, mouseY);
}
GL11.glEnable(GL11.GL_LIGHTING);
GL11.glEnable(GL11.GL_DEPTH_TEST);
RenderHelper.enableStandardItemLighting();
}
protected void drawItemStack (ItemStack par1ItemStack, int par2, int par3, String par4Str)
{
GL11.glTranslatef(0.0F, 0.0F, 32.0F);
this.zLevel = 200.0F;
itemRenderer.zLevel = 200.0F;
itemRenderer.renderItemAndEffectIntoGUI(this.fontRendererObj, this.mc.renderEngine, par1ItemStack, par2, par3);
itemRenderer.renderItemOverlayIntoGUI(this.fontRendererObj, this.mc.renderEngine, par1ItemStack, par2, par3 - (this.draggedStack == null ? 0 : 8), par4Str);
this.zLevel = 0.0F;
itemRenderer.zLevel = 0.0F;
}
@SuppressWarnings("unchecked")
protected void drawItemStackTooltip (ItemStack par1ItemStack, int par2, int par3)
{
List list = par1ItemStack.getTooltip(this.mc.thePlayer, this.mc.gameSettings.advancedItemTooltips);
for (int k = 0; k < list.size(); ++k)
{
if (k == 0)
{
// list.set(k, "\u00a7" + Integer.toHexString(par1ItemStack.getRarity().rarityColor) + (String) list.get(k));
}
else
{
list.set(k, EnumChatFormatting.GRAY + (String) list.get(k));
}
}
this.func_102021_a(list, par2, par3);
}
protected void func_102021_a (List par1List, int par2, int par3)
{
if (!par1List.isEmpty())
{
GL11.glDisable(GL12.GL_RESCALE_NORMAL);
RenderHelper.disableStandardItemLighting();
GL11.glDisable(GL11.GL_LIGHTING);
GL11.glDisable(GL11.GL_DEPTH_TEST);
int k = 0;
Iterator iterator = par1List.iterator();
while (iterator.hasNext())
{
String s = (String) iterator.next();
int l = this.fontRendererObj.getStringWidth(s);
if (l > k)
{
k = l;
}
}
int i1 = par2 + 12;
int j1 = par3 - 12;
int k1 = 8;
if (par1List.size() > 1)
{
k1 += 2 + (par1List.size() - 1) * 10;
}
if (i1 + k > this.width)
{
i1 -= 28 + k;
}
if (j1 + k1 + 6 > this.height)
{
j1 = this.height - k1 - 6;
}
this.zLevel = 300.0F;
itemRenderer.zLevel = 300.0F;
int l1 = -267386864;
this.drawGradientRect(i1 - 3, j1 - 4, i1 + k + 3, j1 - 3, l1, l1);
this.drawGradientRect(i1 - 3, j1 + k1 + 3, i1 + k + 3, j1 + k1 + 4, l1, l1);
this.drawGradientRect(i1 - 3, j1 - 3, i1 + k + 3, j1 + k1 + 3, l1, l1);
this.drawGradientRect(i1 - 4, j1 - 3, i1 - 3, j1 + k1 + 3, l1, l1);
this.drawGradientRect(i1 + k + 3, j1 - 3, i1 + k + 4, j1 + k1 + 3, l1, l1);
int i2 = 1347420415;
int j2 = (i2 & 16711422) >> 1 | i2 & -16777216;
this.drawGradientRect(i1 - 3, j1 - 3 + 1, i1 - 3 + 1, j1 + k1 + 3 - 1, i2, j2);
this.drawGradientRect(i1 + k + 2, j1 - 3 + 1, i1 + k + 3, j1 + k1 + 3 - 1, i2, j2);
this.drawGradientRect(i1 - 3, j1 - 3, i1 + k + 3, j1 - 3 + 1, i2, i2);
this.drawGradientRect(i1 - 3, j1 + k1 + 2, i1 + k + 3, j1 + k1 + 3, j2, j2);
for (int k2 = 0; k2 < par1List.size(); ++k2)
{
String s1 = (String) par1List.get(k2);
this.fontRendererObj.drawStringWithShadow(s1, i1, j1, -1);
if (k2 == 0)
{
j1 += 2;
}
j1 += 10;
}
this.zLevel = 0.0F;
itemRenderer.zLevel = 0.0F;
GL11.glEnable(GL11.GL_LIGHTING);
GL11.glEnable(GL11.GL_DEPTH_TEST);
RenderHelper.enableStandardItemLighting();
GL11.glEnable(GL12.GL_RESCALE_NORMAL);
}
}
protected void drawGuiContainerForegroundLayer (int par1, int par2)
{
}
protected abstract void drawGuiContainerBackgroundLayer (float f, int i, int j);
protected void drawSlotInventory (Slot par1Slot)
{
int i = par1Slot.xDisplayPosition;
int j = par1Slot.yDisplayPosition;
ItemStack itemstack = par1Slot.getStack();
boolean flag = false;
boolean flag1 = par1Slot == this.clickedSlot && this.draggedStack != null && !this.isRightMouseClick;
ItemStack itemstack1 = this.mc.thePlayer.inventory.getItemStack();
String s = null;
if (par1Slot == this.clickedSlot && this.draggedStack != null && this.isRightMouseClick && itemstack != null)
{
itemstack = itemstack.copy();
itemstack.stackSize /= 2;
}
else if (this.field_94076_q && this.field_94077_p.contains(par1Slot) && itemstack1 != null)
{
if (this.field_94077_p.size() == 1)
{
return;
}
if (Container.func_94527_a(par1Slot, itemstack1, true) && this.container.canDragIntoSlot(par1Slot))
{
itemstack = itemstack1.copy();
flag = true;
Container.func_94525_a(this.field_94077_p, this.field_94071_C, itemstack, par1Slot.getStack() == null ? 0 : par1Slot.getStack().stackSize);
if (itemstack.stackSize > itemstack.getMaxStackSize())
{
s = EnumChatFormatting.YELLOW + "" + itemstack.getMaxStackSize();
itemstack.stackSize = itemstack.getMaxStackSize();
}
if (itemstack.stackSize > par1Slot.getSlotStackLimit())
{
s = EnumChatFormatting.YELLOW + "" + par1Slot.getSlotStackLimit();
itemstack.stackSize = par1Slot.getSlotStackLimit();
}
}
else
{
this.field_94077_p.remove(par1Slot);
this.func_94066_g();
}
}
this.zLevel = 100.0F;
itemRenderer.zLevel = 100.0F;
if (itemstack == null)
{
IIcon icon = par1Slot.getBackgroundIconIndex();
if (icon != null)
{
GL11.glDisable(GL11.GL_LIGHTING);
this.mc.getTextureManager().bindTexture(TextureMap.locationItemsTexture);
this.drawTexturedModelRectFromIcon(i, j, icon, 16, 16);
GL11.glEnable(GL11.GL_LIGHTING);
flag1 = true;
}
}
if (!flag1)
{
if (flag)
{
drawRect(i, j, i + 16, j + 16, -2130706433);
}
GL11.glEnable(GL11.GL_DEPTH_TEST);
itemRenderer.renderItemAndEffectIntoGUI(this.fontRendererObj, this.mc.renderEngine, itemstack, i, j);
itemRenderer.renderItemOverlayIntoGUI(this.fontRendererObj, this.mc.renderEngine, itemstack, i, j, s);
}
itemRenderer.zLevel = 0.0F;
this.zLevel = 0.0F;
}
protected void func_94066_g ()
{
ItemStack itemstack = this.mc.thePlayer.inventory.getItemStack();
if (itemstack != null && this.field_94076_q)
{
this.field_94069_F = itemstack.stackSize;
ItemStack itemstack1;
int i;
for (Iterator iterator = this.field_94077_p.iterator(); iterator.hasNext(); this.field_94069_F -= itemstack1.stackSize - i)
{
Slot slot = (Slot) iterator.next();
itemstack1 = itemstack.copy();
i = slot.getStack() == null ? 0 : slot.getStack().stackSize;
Container.func_94525_a(this.field_94077_p, this.field_94071_C, itemstack1, i);
if (itemstack1.stackSize > itemstack1.getMaxStackSize())
{
itemstack1.stackSize = itemstack1.getMaxStackSize();
}
if (itemstack1.stackSize > slot.getSlotStackLimit())
{
itemstack1.stackSize = slot.getSlotStackLimit();
}
}
}
}
protected Slot getSlotAtPosition (int mouseX, int mouseY)
{
for (int k = 0; k < this.container.inventorySlots.size(); ++k)
{
Slot slot = (Slot) this.container.inventorySlots.get(k);
if (this.isMouseOverSlot(slot, mouseX, mouseY))
{
return slot;
}
}
return null;
}
protected void mouseClicked (int mouseX, int mouseY, int mouseButton)
{
super.mouseClicked(mouseX, mouseY, mouseButton);
boolean flag = mouseButton == this.mc.gameSettings.keyBindPickBlock.getKeyCode() + 100;
Slot slot = this.getSlotAtPosition(mouseX, mouseY);
long l = Minecraft.getSystemTime();
this.field_94074_J = this.field_94072_H == slot && l - this.field_94070_G < 250L && this.field_94073_I == mouseButton;
this.field_94068_E = false;
if (mouseButton == 0 || mouseButton == 1 || flag)
{
int gLeft = this.guiLeft;
int gTop = this.guiTop;
boolean flag1 = mouseX < gLeft || mouseY < gTop || mouseX >= gLeft + this.xSize || mouseY >= gTop + this.ySize;
int k1 = -1;
if (slot != null)
{
k1 = slot.slotNumber;
}
if (flag1)
{
k1 = -999;
}
if (this.mc.gameSettings.touchscreen && flag1 && this.mc.thePlayer.inventory.getItemStack() == null)
{
this.mc.displayGuiScreen((GuiScreen) null);
return;
}
if (k1 != -1)
{
if (this.mc.gameSettings.touchscreen)
{
if (slot != null && slot.getHasStack())
{
this.clickedSlot = slot;
this.draggedStack = null;
this.isRightMouseClick = mouseButton == 1;
}
else
{
this.clickedSlot = null;
}
}
else if (!this.field_94076_q)
{
if (this.mc.thePlayer.inventory.getItemStack() == null)
{
if (mouseButton == this.mc.gameSettings.keyBindPickBlock.getKeyCode() + 100)
{
this.handleMouseClick(slot, k1, mouseButton, 3);
}
else
{
boolean flag2 = k1 != -999 && (Keyboard.isKeyDown(42) || Keyboard.isKeyDown(54));
byte b0 = 0;
if (flag2)
{
this.field_94075_K = slot != null && slot.getHasStack() ? slot.getStack() : null;
b0 = 1;
}
else if (k1 == -999)
{
b0 = 4;
}
this.handleMouseClick(slot, k1, mouseButton, b0);
}
this.field_94068_E = true;
}
else
{
this.field_94076_q = true;
this.field_94067_D = mouseButton;
this.field_94077_p.clear();
if (mouseButton == 0)
{
this.field_94071_C = 0;
}
else if (mouseButton == 1)
{
this.field_94071_C = 1;
}
}
}
}
}
this.field_94072_H = slot;
this.field_94070_G = l;
this.field_94073_I = mouseButton;
}
protected void func_85041_a (int par1, int par2, int par3, long par4)
{
Slot slot = this.getSlotAtPosition(par1, par2);
ItemStack itemstack = this.mc.thePlayer.inventory.getItemStack();
if (this.clickedSlot != null && this.mc.gameSettings.touchscreen)
{
if (par3 == 0 || par3 == 1)
{
if (this.draggedStack == null)
{
if (slot != this.clickedSlot)
{
this.draggedStack = this.clickedSlot.getStack().copy();
}
}
else if (this.draggedStack.stackSize > 1 && slot != null && Container.func_94527_a(slot, this.draggedStack, false))
{
long i1 = Minecraft.getSystemTime();
if (this.field_92033_y == slot)
{
if (i1 - this.field_92032_z > 500L)
{
this.handleMouseClick(this.clickedSlot, this.clickedSlot.slotNumber, 0, 0);
this.handleMouseClick(slot, slot.slotNumber, 1, 0);
this.handleMouseClick(this.clickedSlot, this.clickedSlot.slotNumber, 0, 0);
this.field_92032_z = i1 + 750L;
--this.draggedStack.stackSize;
}
}
else
{
this.field_92033_y = slot;
this.field_92032_z = i1;
}
}
}
}
else if (this.field_94076_q && slot != null && itemstack != null && itemstack.stackSize > this.field_94077_p.size() && Container.func_94527_a(slot, itemstack, true)
&& slot.isItemValid(itemstack) && this.container.canDragIntoSlot(slot))
{
this.field_94077_p.add(slot);
this.func_94066_g();
}
}
protected void mouseMovedOrUp (int par1, int par2, int par3)
{
Slot slot = this.getSlotAtPosition(par1, par2);
int l = this.guiLeft;
int i1 = this.guiTop;
boolean flag = par1 < l || par2 < i1 || par1 >= l + this.xSize || par2 >= i1 + this.ySize;
int j1 = -1;
if (slot != null)
{
j1 = slot.slotNumber;
}
if (flag)
{
j1 = -999;
}
Slot slot1;
Iterator iterator;
if (this.field_94074_J && slot != null && par3 == 0 && this.container.func_94530_a((ItemStack) null, slot))
{
if (isShiftKeyDown())
{
if (slot != null && slot.inventory != null && this.field_94075_K != null)
{
iterator = this.container.inventorySlots.iterator();
while (iterator.hasNext())
{
slot1 = (Slot) iterator.next();
if (slot1 != null && slot1.canTakeStack(this.mc.thePlayer) && slot1.getHasStack() && slot1.inventory == slot.inventory
&& Container.func_94527_a(slot1, this.field_94075_K, true))
{
this.handleMouseClick(slot1, slot1.slotNumber, par3, 1);
}
}
}
}
else
{
this.handleMouseClick(slot, j1, par3, 6);
}
this.field_94074_J = false;
this.field_94070_G = 0L;
}
else
{
if (this.field_94076_q && this.field_94067_D != par3)
{
this.field_94076_q = false;
this.field_94077_p.clear();
this.field_94068_E = true;
return;
}
if (this.field_94068_E)
{
this.field_94068_E = false;
return;
}
boolean flag1;
if (this.clickedSlot != null && this.mc.gameSettings.touchscreen)
{
if (par3 == 0 || par3 == 1)
{
if (this.draggedStack == null && slot != this.clickedSlot)
{
this.draggedStack = this.clickedSlot.getStack();
}
flag1 = Container.func_94527_a(slot, this.draggedStack, false);
if (j1 != -1 && this.draggedStack != null && flag1)
{
this.handleMouseClick(this.clickedSlot, this.clickedSlot.slotNumber, par3, 0);
this.handleMouseClick(slot, j1, 0, 0);
if (this.mc.thePlayer.inventory.getItemStack() != null)
{
this.handleMouseClick(this.clickedSlot, this.clickedSlot.slotNumber, par3, 0);
this.field_85049_r = par1 - l;
this.field_85048_s = par2 - i1;
this.returningStackDestSlot = this.clickedSlot;
this.returningStack = this.draggedStack;
this.returningStackTime = Minecraft.getSystemTime();
}
else
{
this.returningStack = null;
}
}
else if (this.draggedStack != null)
{
this.field_85049_r = par1 - l;
this.field_85048_s = par2 - i1;
this.returningStackDestSlot = this.clickedSlot;
this.returningStack = this.draggedStack;
this.returningStackTime = Minecraft.getSystemTime();
}
this.draggedStack = null;
this.clickedSlot = null;
}
}
else if (this.field_94076_q && !this.field_94077_p.isEmpty())
{
this.handleMouseClick((Slot) null, -999, Container.func_94534_d(0, this.field_94071_C), 5);
iterator = this.field_94077_p.iterator();
while (iterator.hasNext())
{
slot1 = (Slot) iterator.next();
this.handleMouseClick(slot1, slot1.slotNumber, Container.func_94534_d(1, this.field_94071_C), 5);
}
this.handleMouseClick((Slot) null, -999, Container.func_94534_d(2, this.field_94071_C), 5);
}
else if (this.mc.thePlayer.inventory.getItemStack() != null)
{
if (par3 == this.mc.gameSettings.keyBindPickBlock.getKeyCode() + 100)
{
this.handleMouseClick(slot, j1, par3, 3);
}
else
{
flag1 = j1 != -999 && (Keyboard.isKeyDown(42) || Keyboard.isKeyDown(54));
if (flag1)
{
this.field_94075_K = slot != null && slot.getHasStack() ? slot.getStack() : null;
}
this.handleMouseClick(slot, j1, par3, flag1 ? 1 : 0);
}
}
}
if (this.mc.thePlayer.inventory.getItemStack() == null)
{
this.field_94070_G = 0L;
}
this.field_94076_q = false;
}
protected boolean isMouseOverSlot (Slot slot, int mouseX, int mouseY)
{
if (!(slot instanceof ActiveSlot) || ((ActiveSlot) slot).getActive())
{
return this.isPointInRegion(slot.xDisplayPosition, slot.yDisplayPosition, 16, 16, mouseX, mouseY);
}
return false;
}
protected boolean isPointInRegion (int par1, int par2, int par3, int par4, int par5, int par6)
{
int k1 = this.guiLeft;
int l1 = this.guiTop;
par5 -= k1;
par6 -= l1;
return par5 >= par1 - 1 && par5 < par1 + par3 + 1 && par6 >= par2 - 1 && par6 < par2 + par4 + 1;
}
protected void handleMouseClick (Slot par1Slot, int par2, int par3, int par4)
{
if (par1Slot != null)
{
par2 = par1Slot.slotNumber;
}
this.mc.playerController.windowClick(this.container.windowId, par2, par3, par4, this.mc.thePlayer);
}
protected void keyTyped (char par1, int par2)
{
if (par2 == 1 || par2 == this.mc.gameSettings.keyBindInventory.getKeyCode())
{
this.mc.thePlayer.closeScreen();
}
this.checkHotbarKeys(par2);
if (this.mainSlot != null && this.mainSlot.getHasStack())
{
if (par2 == this.mc.gameSettings.keyBindPickBlock.getKeyCode())
{
this.handleMouseClick(this.mainSlot, this.mainSlot.slotNumber, 0, 3);
}
else if (par2 == this.mc.gameSettings.keyBindDrop.getKeyCode())
{
this.handleMouseClick(this.mainSlot, this.mainSlot.slotNumber, isCtrlKeyDown() ? 1 : 0, 4);
}
}
}
protected boolean checkHotbarKeys (int par1)
{
if (this.mc.thePlayer.inventory.getItemStack() == null && this.mainSlot != null)
{
for (int j = 0; j < 9; ++j)
{
if (par1 == 2 + j)
{
this.handleMouseClick(this.mainSlot, this.mainSlot.slotNumber, j, 2);
return true;
}
}
}
return false;
}
public void onGuiClosed ()
{
if (this.mc.thePlayer != null)
{
this.container.onContainerClosed(this.mc.thePlayer);
}
}
public boolean doesGuiPauseGame ()
{
return false;
}
public void updateScreen ()
{
super.updateScreen();
if (!this.mc.thePlayer.isEntityAlive() || this.mc.thePlayer.isDead)
{
this.mc.thePlayer.closeScreen();
}
}
}
| |
/**
* JBoss, Home of Professional Open Source.
* Copyright 2014 Red Hat, Inc., and individual contributors
* as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jboss.pnc.model;
import org.hibernate.annotations.ForeignKey;
import org.hibernate.annotations.Index;
import org.hibernate.annotations.Type;
import javax.persistence.*;
import javax.validation.constraints.NotNull;
import java.util.*;
/**
* Created by <a href="mailto:matejonnet@gmail.com">Matej Lazar</a> on 2014-11-23.
* <p>
* This class contains the build result of a project configuration, and contains additional metadata, as the build script, the
* starting and ending time of a build, the status of the build, the sources url used, the user that triggered the build, plus
* all the Artifacts that were built and all the Artifacts that were used for the final build. It stores also the buildDriverID
* that was used to run the build, the system Image where is was run in, and is mapped to a BuildRecordSet, that encapsulates
* the set of buildRecord that compose a Product
*/
@Entity
public class BuildRecord implements GenericEntity<Integer> {
private static final long serialVersionUID = -5472083609387609797L;
public static final String SEQUENCE_NAME = "build_record_id_seq";
@Id
private Integer id;
/**
* Link to the latest version of the configuration settings for building this project.
* These settings may have been updated since this record was created, so this
* can not be used to run an exact rebuild, but it is convenient for reference
* if a new build of the same project needs to be executed.
* The join column "buildconfiguration_id" is the same db field used by
* buildConfigurationAudited, thus this is a read-only field which automatically
* changes when the buildConfigurationAudited is changed.
*/
@NotNull
@ManyToOne(cascade = { CascadeType.REFRESH })
@JoinColumn(name = "buildconfiguration_id", insertable = false, updatable = false)
@ForeignKey(name = "fk_buildrecord_buildconfiguration")
@Index(name="idx_buildrecord_buildconfiguration")
private BuildConfiguration latestBuildConfiguration;
/**
* Contains the settings that were used at the time the build was executed.
* Hibernate envers identifies each audited record using the "id" of the
* original db record along with a revision number. This can be used to
* re-run the build with the exact same settings used previously.
*/
@NotNull
@ManyToOne(cascade = { CascadeType.REFRESH })
@JoinColumns({ @JoinColumn(name = "buildconfiguration_id", referencedColumnName = "id"),
@JoinColumn(name = "buildconfiguration_rev", referencedColumnName = "rev") })
@ForeignKey(name = "fk_buildrecord_buildconfiguration_aud")
@Index(name="idx_buildrecord_buildconfiguration_aud")
private BuildConfigurationAudited buildConfigurationAudited;
private String buildContentId;
/**
* The time which the build was submitted to the system.
*/
@NotNull
@Column(columnDefinition="timestamp with time zone")
private Date submitTime;
/**
* The time when the build execution started. Note that it's possible for this to
* be null in the case of a system error before the build is started.
*/
@Column(columnDefinition="timestamp with time zone")
private Date startTime;
/**
* The time when the build completed. Note that it's possible for this to be null
* if the build never finished.
*/
@Column(columnDefinition="timestamp with time zone")
private Date endTime;
// @NotNull //TODO uncomment
@ManyToOne
@ForeignKey(name = "fk_buildrecord_user")
@Index(name="idx_buildrecord_user")
private User user;
/**
* The scm repository URL used for executing the build. Note, this can be different
* than the repository URL contained in the linked build configuration due to pre-build
* processing tasks such as repository mirroring and automated build changes.
*/
private String scmRepoURL;
/**
* The scm revision used for build execution. Note, this can be different than the
* revision submitted by the user due to automated build processing steps which modify
* the sources before executing the build. This should always be an unmodifiable commit ID
* and should never be a tag or branch.
*/
private String scmRevision;
@Lob
// org.hibernate.type.StringClobType works for all DBs
// Use "org.hibernate.type.MaterializedClobType" from Hibernate 4.2.x
@Type(type = "org.hibernate.type.StringClobType")
@Basic(fetch = FetchType.LAZY)
private String buildLog;
@Enumerated(value = EnumType.STRING)
private BuildStatus status;
/**
* Artifacts which were produced by this build
*/
@ManyToMany
@JoinTable(name = "build_record_built_artifact_map", joinColumns = {
@JoinColumn(name = "build_record_id", referencedColumnName = "id") }, inverseJoinColumns = {
@JoinColumn(name = "built_artifact_id", referencedColumnName = "id") }, uniqueConstraints = @UniqueConstraint(name = "uk_build_record_id_built_artifact_id", columnNames = {
"build_record_id", "built_artifact_id" }) )
@ForeignKey(name = "fk_build_record_built_artifact_map")
@Index(name = "idx_build_record_built_artifact_map")
private List<BuiltArtifact> builtArtifacts;
/**
* Artifacts which are required external dependencies of this build
*/
@ManyToMany
@JoinTable(name = "build_record_artifact_dependencies_map", joinColumns = {
@JoinColumn(name = "build_record_id", referencedColumnName = "id") }, inverseJoinColumns = {
@JoinColumn(name = "dependency_artifact_id", referencedColumnName = "id") }, uniqueConstraints = @UniqueConstraint(name = "uk_build_record_id_dependency_artifact_id", columnNames = {
"build_record_id", "dependency_artifact_id" }) )
@ForeignKey(name = "fk_build_record_artifact_dependencies_map")
@Index(name = "idx_build_record_artifact_dependencies_map")
private List<Artifact> dependencies;
/**
* Driver that was used to run the build.
*/
private String buildDriverId;
/**
* Image that was used to instantiate a build server.
*/
@ManyToOne
@ForeignKey(name = "fk_buildrecord_systemimage")
@Index(name="idx_buildrecord_systemimage")
private BuildEnvironment systemImage;
/**
* Sets of related build records in which this build record is included
*/
@ManyToMany(mappedBy = "buildRecords")
private Set<BuildRecordSet> buildRecordSets;
/**
* If this build was executed as part of a set, this will contain the link to the overall results of the set. Otherwise,
* this field will be null.
*/
@ManyToOne
@ForeignKey(name = "fk_buildrecord_buildconfigsetrecord")
@Index(name="idx_buildrecord_buildconfigsetrecord")
private BuildConfigSetRecord buildConfigSetRecord;
/**
* Contains the ID of the matching build record store in an external system. For example, Koji/Brew build ID.
*/
private Integer externalArchiveId;
/**
* Instantiates a new project build result.
*/
public BuildRecord() {
buildRecordSets = new HashSet<>();
dependencies = new ArrayList<>();
builtArtifacts = new ArrayList<>();
}
@PreRemove
private void removeBuildRecordFromSets() {
for (BuildRecordSet brs : buildRecordSets) {
brs.getBuildRecords().remove(this);
}
}
/**
* Gets the id.
*
* @return the id
*/
public Integer getId() {
return id;
}
/**
* Sets the id.
*
* @param id the new id
*/
@Override
public void setId(Integer id) {
this.id = id;
}
/**
* The time when the build was submitted.
*
* @return the submit time
*/
public Date getSubmitTime() {
return submitTime;
}
public void setSubmitTime(Date submitTime) {
this.submitTime = submitTime;
}
/**
* The time when the build execution was started.
* The build task wait time can be determined by the difference
* between the startTime and the submitTime.
*
* @return the start time
*/
public Date getStartTime() {
return startTime;
}
public void setStartTime(Date startTime) {
this.startTime = startTime;
}
/**
* Get the time when the build finished.
* The build duration can be determined by the difference
* between the endTime and the startTime.
*
* @return the end time
*/
public Date getEndTime() {
return endTime;
}
public void setEndTime(Date endTime) {
this.endTime = endTime;
}
/**
* Gets the user.
*
* @return the user
*/
public User getUser() {
return user;
}
/**
* Sets the user.
*
* @param user the new user
*/
public void setUser(User user) {
this.user = user;
}
public String getScmRepoURL() {
return scmRepoURL;
}
public void setScmRepoURL(String scmRepoURL) {
this.scmRepoURL = scmRepoURL;
}
public String getScmRevision() {
return scmRevision;
}
public void setScmRevision(String scmRevision) {
this.scmRevision = scmRevision;
}
/**
* Gets the builds the log.
*
* @return the builds the log
*/
public String getBuildLog() {
return buildLog;
}
/**
* Sets the builds the log.
*
* @param buildLog the new builds the log
*/
public void setBuildLog(String buildLog) {
this.buildLog = buildLog;
}
/**
* Gets the status.
*
* @return the status
*/
public BuildStatus getStatus() {
return status;
}
/**
* Sets the status.
*
* @param status the new status
*/
public void setStatus(BuildStatus status) {
this.status = status;
}
/**
* Gets the built artifacts.
*
* @return the built artifacts
*/
public List<BuiltArtifact> getBuiltArtifacts() {
return builtArtifacts;
}
public void addBuiltArtifact(BuiltArtifact builtArtifact) {
builtArtifacts.add(builtArtifact);
}
/**
* Sets the built artifacts.
*
* @param builtArtifacts the new built artifacts
*/
public void setBuiltArtifacts(List<BuiltArtifact> builtArtifacts) {
this.builtArtifacts = builtArtifacts;
}
/**
* Gets the dependencies.
*
* @return the dependencies
*/
public List<Artifact> getDependencies() {
return dependencies;
}
public void addDependency(Artifact artifact) {
dependencies.add(artifact);
}
/**
* Sets the dependencies.
*
* @param dependencies the new dependencies
*/
public void setDependencies(List<Artifact> dependencies) {
this.dependencies = dependencies;
}
/**
* Gets the builds the driver id.
*
* @return the builds the driver id
*/
public String getBuildDriverId() {
return buildDriverId;
}
/**
* Sets the builds the driver id.
*
* @param buildDriverId the new builds the driver id
*/
public void setBuildDriverId(String buildDriverId) {
this.buildDriverId = buildDriverId;
}
/**
* Gets the system image.
*
* @return the system image
*/
public BuildEnvironment getSystemImage() {
return systemImage;
}
/**
* Sets the system image.
*
* @param systemImage the new system image
*/
public void setSystemImage(BuildEnvironment systemImage) {
this.systemImage = systemImage;
}
/**
* @return The latest version of the build configuration used to create this build record
*/
public BuildConfiguration getLatestBuildConfiguration() {
return latestBuildConfiguration;
}
public void setLatestBuildConfiguration(BuildConfiguration latestBuildConfiguration) {
this.latestBuildConfiguration = latestBuildConfiguration;
}
/**
* @return The audited version of the build configuration used to create this build record
*/
public BuildConfigurationAudited getBuildConfigurationAudited() {
return buildConfigurationAudited;
}
public void setBuildConfigurationAudited(BuildConfigurationAudited buildConfigurationAudited) {
this.buildConfigurationAudited = buildConfigurationAudited;
}
/**
* @return the buildRecordSets
*/
public Set<BuildRecordSet> getBuildRecordSets() {
return buildRecordSets;
}
/**
* @param buildRecordSets the buildRecordSets to set
*/
public void setBuildRecordSets(Set<BuildRecordSet> buildRecordSets) {
if (buildRecordSets == null) {
this.buildRecordSets = new HashSet<>();
} else {
this.buildRecordSets = buildRecordSets;
}
}
/**
* Add this build record to a build record set.
*
* @param buildRecordSet The set to which this record will be added
* @return True if the build record was successfully added to the given set
*/
public boolean addBuildRecordSet(BuildRecordSet buildRecordSet) {
if (!buildRecordSet.getBuildRecords().contains(this)) {
buildRecordSet.getBuildRecords().add(this);
}
return this.buildRecordSets.add(buildRecordSet);
}
public String getBuildContentId() {
return buildContentId;
}
/**
* @param buildContentId The identifier to use when accessing repository or other content stored via external services.
*/
public void setBuildContentId(String buildContentId) {
this.buildContentId = buildContentId;
}
public BuildConfigSetRecord getBuildConfigSetRecord() {
return buildConfigSetRecord;
}
public void setBuildConfigSetRecord(BuildConfigSetRecord buildConfigSetRecord) {
this.buildConfigSetRecord = buildConfigSetRecord;
}
@Override
public String toString() {
return "BuildRecord [id=" + id + ", project=" + buildConfigurationAudited.getProject().getName()
+ ", buildConfiguration=" + buildConfigurationAudited + "]";
}
public Integer getExternalArchiveId() {
return externalArchiveId;
}
public void setExternalArchiveId(Integer externalArchiveId) {
this.externalArchiveId = externalArchiveId;
}
public static class Builder {
private Integer id;
private String buildContentId;
private Date submitTime;
private Date startTime;
private Date endTime;
private BuildConfiguration latestBuildConfiguration;
private BuildConfigurationAudited buildConfigurationAudited;
private User user;
private String scmRepoURL;
private String scmRevision;
private String buildLog;
private BuildStatus status;
private List<BuiltArtifact> builtArtifacts;
private List<Artifact> dependencies;
private String buildDriverId;
private BuildEnvironment systemImage;
private Set<BuildRecordSet> buildRecordSets;
private Integer externalArchiveId;
private BuildConfigSetRecord buildConfigSetRecord;
public Builder() {
builtArtifacts = new ArrayList<>();
dependencies = new ArrayList<>();
buildRecordSets = new HashSet<>();
}
public static Builder newBuilder() {
return new Builder();
}
public BuildRecord build() {
BuildRecord buildRecord = new BuildRecord();
buildRecord.setId(id);
buildRecord.setBuildContentId(buildContentId);
buildRecord.setSubmitTime(submitTime);
buildRecord.setStartTime(startTime);
buildRecord.setEndTime(endTime);
buildRecord.setLatestBuildConfiguration(latestBuildConfiguration);
buildRecord.setBuildConfigurationAudited(buildConfigurationAudited);
buildRecord.setUser(user);
buildRecord.setScmRepoURL(scmRepoURL);
buildRecord.setScmRevision(scmRevision);
buildRecord.setBuildLog(buildLog);
buildRecord.setStatus(status);
buildRecord.setBuildDriverId(buildDriverId);
buildRecord.setSystemImage(systemImage);
buildRecord.setExternalArchiveId(externalArchiveId);
if (buildConfigSetRecord != null) {
buildRecord.setBuildConfigSetRecord(buildConfigSetRecord);
}
// Set the bi-directional mapping
for (BuiltArtifact artifact : builtArtifacts) {
artifact.addBuildRecord(buildRecord);
}
buildRecord.setBuiltArtifacts(builtArtifacts);
// Set the bi-directional mapping
for (Artifact artifact : dependencies) {
artifact.addDependantBuildRecord(buildRecord);
}
buildRecord.setDependencies(dependencies);
// Note: the buildRecordSet is the owning side, so new relations
// need to be saved by buildRecordSet
buildRecord.setBuildRecordSets(buildRecordSets);
return buildRecord;
}
public Builder id(Integer id) {
this.id = id;
return this;
}
public Builder buildContentId(String buildContentId) {
this.buildContentId = buildContentId;
return this;
}
public Builder submitTime(Date submitTime) {
this.submitTime = submitTime;
return this;
}
public Builder startTime(Date startTime) {
this.startTime = startTime;
return this;
}
public Builder endTime(Date endTime) {
this.endTime = endTime;
return this;
}
public Builder latestBuildConfiguration(BuildConfiguration latestBuildConfiguration) {
this.latestBuildConfiguration = latestBuildConfiguration;
return this;
}
public Builder buildConfigurationAudited(BuildConfigurationAudited buildConfigurationAudited) {
this.buildConfigurationAudited = buildConfigurationAudited;
return this;
}
public Builder user(User user) {
this.user = user;
return this;
}
public Builder scmRepoURL(String scmRepoURL) {
this.scmRepoURL = scmRepoURL;
return this;
}
public Builder scmRevision(String scmRevision) {
this.scmRevision = scmRevision;
return this;
}
public Builder buildLog(String buildLog) {
this.buildLog = buildLog;
return this;
}
public Builder status(BuildStatus status) {
this.status = status;
return this;
}
public Builder builtArtifact(BuiltArtifact builtArtifact) {
this.builtArtifacts.add(builtArtifact);
return this;
}
public Builder builtArtifacts(List<BuiltArtifact> builtArtifacts) {
this.builtArtifacts = builtArtifacts;
return this;
}
public Builder dependency(Artifact artifact) {
this.dependencies.add(artifact);
return this;
}
public Builder dependencies(List<Artifact> dependencies) {
this.dependencies = dependencies;
return this;
}
public Builder buildDriverId(String buildDriverId) {
this.buildDriverId = buildDriverId;
return this;
}
public Builder systemImage(BuildEnvironment systemImage) {
this.systemImage = systemImage;
return this;
}
public Builder buildRecordSets(Set<BuildRecordSet> buildRecordSets) {
this.buildRecordSets = buildRecordSets;
return this;
}
public Builder buildConfigSetRecord(BuildConfigSetRecord buildConfigSetRecord) {
this.buildConfigSetRecord = buildConfigSetRecord;
return this;
}
public Builder externalArchiveId(Integer externalArchiveId) {
this.externalArchiveId = externalArchiveId;
return this;
}
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package parquet.hadoop;
import io.netty.buffer.ByteBuf;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.drill.common.exceptions.DrillRuntimeException;
import org.apache.drill.exec.memory.BufferAllocator;
import org.apache.drill.exec.memory.OutOfMemoryRuntimeException;
import org.apache.drill.exec.store.parquet.DirectCodecFactory;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import parquet.bytes.BytesInput;
import parquet.column.ColumnDescriptor;
import parquet.column.page.DataPage;
import parquet.column.page.DataPageV1;
import parquet.column.page.DataPageV2;
import parquet.column.page.DictionaryPage;
import parquet.column.page.PageReadStore;
import parquet.column.page.PageReader;
import parquet.format.DataPageHeaderV2;
import parquet.format.PageHeader;
import parquet.format.Util;
import parquet.format.converter.ParquetMetadataConverter;
import parquet.hadoop.CodecFactory.BytesDecompressor;
import parquet.hadoop.metadata.ColumnChunkMetaData;
import parquet.hadoop.util.CompatibilityUtil;
import static parquet.format.converter.ParquetMetadataConverter.fromParquetStatistics;
public class ColumnChunkIncReadStore implements PageReadStore {
private static ParquetMetadataConverter parquetMetadataConverter = new ParquetMetadataConverter();
private DirectCodecFactory codecFactory;
private BufferAllocator allocator;
private FileSystem fs;
private Path path;
private long rowCount;
private List<FSDataInputStream> streams = new ArrayList();
public ColumnChunkIncReadStore(long rowCount, DirectCodecFactory codecFactory, BufferAllocator allocator,
FileSystem fs, Path path) {
this.codecFactory = codecFactory;
this.allocator = allocator;
this.fs = fs;
this.path = path;
this.rowCount = rowCount;
}
public class ColumnChunkIncPageReader implements PageReader {
ColumnChunkMetaData metaData;
ColumnDescriptor columnDescriptor;
long fileOffset;
long size;
private long valueReadSoFar = 0;
private DictionaryPage dictionaryPage;
private FSDataInputStream in;
private BytesDecompressor decompressor;
private ByteBuf lastPage;
public ColumnChunkIncPageReader(ColumnChunkMetaData metaData, ColumnDescriptor columnDescriptor, FSDataInputStream in) {
this.metaData = metaData;
this.columnDescriptor = columnDescriptor;
this.size = metaData.getTotalSize();
this.fileOffset = metaData.getStartingPos();
this.in = in;
this.decompressor = codecFactory.getDecompressor(metaData.getCodec());
}
@Override
public DictionaryPage readDictionaryPage() {
if (dictionaryPage == null) {
PageHeader pageHeader = new PageHeader();
long pos = 0;
try {
pos = in.getPos();
pageHeader = Util.readPageHeader(in);
if (pageHeader.getDictionary_page_header() == null) {
in.seek(pos);
return null;
}
dictionaryPage =
new DictionaryPage(
decompressor.decompress(BytesInput.from(in, pageHeader.compressed_page_size), pageHeader.getUncompressed_page_size()),
pageHeader.getDictionary_page_header().getNum_values(),
parquetMetadataConverter.getEncoding(pageHeader.dictionary_page_header.encoding)
);
} catch (Exception e) {
throw new DrillRuntimeException("Error reading dictionary page." +
"\nFile path: " + path.toUri().getPath() +
"\nRow count: " + rowCount +
"\nColumn Chunk Metadata: " + metaData +
"\nPage Header: " + pageHeader +
"\nFile offset: " + fileOffset +
"\nSize: " + size +
"\nValue read so far: " + valueReadSoFar +
"\nPosition: " + pos, e);
}
}
return dictionaryPage;
}
@Override
public long getTotalValueCount() {
return metaData.getValueCount();
}
@Override
public DataPage readPage() {
PageHeader pageHeader = new PageHeader();
try {
if (lastPage != null) {
lastPage.release();
lastPage = null;
}
while(valueReadSoFar < metaData.getValueCount()) {
pageHeader = Util.readPageHeader(in);
int uncompressedPageSize = pageHeader.getUncompressed_page_size();
int compressedPageSize = pageHeader.getCompressed_page_size();
switch (pageHeader.type) {
case DICTIONARY_PAGE:
if (dictionaryPage == null) {
dictionaryPage =
new DictionaryPage(
decompressor.decompress(BytesInput.from(in, pageHeader.compressed_page_size), pageHeader.getUncompressed_page_size()),
pageHeader.uncompressed_page_size,
parquetMetadataConverter.getEncoding(pageHeader.dictionary_page_header.encoding)
);
} else {
in.skip(pageHeader.compressed_page_size);
}
break;
case DATA_PAGE:
valueReadSoFar += pageHeader.data_page_header.getNum_values();
ByteBuf buf = allocator.buffer(pageHeader.compressed_page_size);
lastPage = buf;
ByteBuffer buffer = buf.nioBuffer(0, pageHeader.compressed_page_size);
while (buffer.remaining() > 0) {
CompatibilityUtil.getBuf(in, buffer, pageHeader.compressed_page_size);
}
return new DataPageV1(
decompressor.decompress(BytesInput.from(buffer, 0, pageHeader.compressed_page_size), pageHeader.getUncompressed_page_size()),
pageHeader.data_page_header.num_values,
pageHeader.uncompressed_page_size,
fromParquetStatistics(pageHeader.data_page_header.statistics, columnDescriptor.getType()),
parquetMetadataConverter.getEncoding(pageHeader.data_page_header.repetition_level_encoding),
parquetMetadataConverter.getEncoding(pageHeader.data_page_header.definition_level_encoding),
parquetMetadataConverter.getEncoding(pageHeader.data_page_header.encoding)
);
// TODO - finish testing this with more files
case DATA_PAGE_V2:
valueReadSoFar += pageHeader.data_page_header_v2.getNum_values();
buf = allocator.buffer(pageHeader.compressed_page_size);
lastPage = buf;
buffer = buf.nioBuffer(0, pageHeader.compressed_page_size);
while (buffer.remaining() > 0) {
CompatibilityUtil.getBuf(in, buffer, pageHeader.compressed_page_size);
}
DataPageHeaderV2 dataHeaderV2 = pageHeader.getData_page_header_v2();
int dataSize = compressedPageSize - dataHeaderV2.getRepetition_levels_byte_length() - dataHeaderV2.getDefinition_levels_byte_length();
BytesInput decompressedPageData =
decompressor.decompress(
BytesInput.from(buffer, 0, pageHeader.compressed_page_size),
pageHeader.uncompressed_page_size);
return new DataPageV2(
dataHeaderV2.getNum_rows(),
dataHeaderV2.getNum_nulls(),
dataHeaderV2.getNum_values(),
BytesInput.from(decompressedPageData.toByteBuffer(), 0, dataHeaderV2.getRepetition_levels_byte_length()),
BytesInput.from(decompressedPageData.toByteBuffer(),
dataHeaderV2.getRepetition_levels_byte_length(),
dataHeaderV2.getDefinition_levels_byte_length()),
parquetMetadataConverter.getEncoding(dataHeaderV2.getEncoding()),
BytesInput.from(decompressedPageData.toByteBuffer(),
dataHeaderV2.getRepetition_levels_byte_length() + dataHeaderV2.getDefinition_levels_byte_length(),
dataSize),
uncompressedPageSize,
fromParquetStatistics(dataHeaderV2.getStatistics(), columnDescriptor.getType()),
dataHeaderV2.isIs_compressed()
);
default:
in.skip(pageHeader.compressed_page_size);
break;
}
}
in.close();
return null;
} catch (OutOfMemoryRuntimeException e) {
throw e; // throw as it is
} catch (Exception e) {
throw new DrillRuntimeException("Error reading page." +
"\nFile path: " + path.toUri().getPath() +
"\nRow count: " + rowCount +
"\nColumn Chunk Metadata: " + metaData +
"\nPage Header: " + pageHeader +
"\nFile offset: " + fileOffset +
"\nSize: " + size +
"\nValue read so far: " + valueReadSoFar, e);
}
}
void close() {
if (lastPage != null) {
lastPage.release();
lastPage = null;
}
}
}
private Map<ColumnDescriptor, ColumnChunkIncPageReader> columns = new HashMap();
public void addColumn(ColumnDescriptor descriptor, ColumnChunkMetaData metaData) throws IOException {
FSDataInputStream in = fs.open(path);
streams.add(in);
in.seek(metaData.getStartingPos());
ColumnChunkIncPageReader reader = new ColumnChunkIncPageReader(metaData, descriptor, in);
columns.put(descriptor, reader);
}
public void close() throws IOException {
for (FSDataInputStream stream : streams) {
stream.close();
}
for (ColumnChunkIncPageReader reader : columns.values()) {
reader.close();
}
}
@Override
public PageReader getPageReader(ColumnDescriptor descriptor) {
return columns.get(descriptor);
}
@Override
public long getRowCount() {
return rowCount;
}
}
| |
/*
*
* * Copyright 2015 Skymind,Inc.
* *
* * Licensed under the Apache License, Version 2.0 (the "License");
* * you may not use this file except in compliance with the License.
* * You may obtain a copy of the License at
* *
* * http://www.apache.org/licenses/LICENSE-2.0
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS,
* * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* * See the License for the specific language governing permissions and
* * limitations under the License.
*
*
*/
package org.nd4j.linalg.ops.transforms;
import org.nd4j.linalg.api.buffer.DataBuffer;
import org.nd4j.linalg.api.ndarray.INDArray;
import org.nd4j.linalg.api.ops.ScalarOp;
import org.nd4j.linalg.api.ops.TransformOp;
import org.nd4j.linalg.api.ops.impl.accum.distances.CosineSimilarity;
import org.nd4j.linalg.api.ops.impl.scalar.ScalarMax;
import org.nd4j.linalg.api.ops.impl.transforms.*;
import org.nd4j.linalg.api.ops.impl.transforms.comparison.Eps;
import org.nd4j.linalg.api.ops.impl.transforms.comparison.GreaterThanOrEqual;
import org.nd4j.linalg.api.ops.impl.transforms.comparison.LessThanOrEqual;
import org.nd4j.linalg.convolution.Convolution;
import org.nd4j.linalg.factory.Nd4j;
import org.nd4j.linalg.indexing.INDArrayIndex;
import org.nd4j.linalg.indexing.NDArrayIndex;
import org.nd4j.linalg.util.ArrayUtil;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
/**
* Functional interface for the different op classes
*
* @author Adam Gibson
*/
public class Transforms {
/**
* Max pooling
*
* @param input
* @param ds the strides with which to max pool expectations
* @return
* @parma ignoreBorder whether to ignore the borders of images
*/
public static INDArray maxPool(INDArray input, int[] ds, boolean ignoreBorder) {
assert input.length() >= 2 : "Max pooling requires an ndarray of >= length 2";
assert ds.length == 2 : "Down sampling must be of length 2 (the factors used for each image size";
assert input.shape().length == 4 : "Only supports 4 dimensional tensors";
int batchSize = ArrayUtil.prod(new int[]{input.size(0) * input.size(1)});
//possibly look at a input implementation instead (looping over the outer dimension slice wise with calling input repeatedly)
//use the given rows and columns if ignoring borders
int rows = input.size(2);
int cols = input.size(3);
INDArray signalNDArray = input.reshape(batchSize, 1, rows, cols);
INDArray zz = Nd4j.create(signalNDArray.shape());
int rowIter = ignoreBorder ? (int) (rows / Math.pow(ds[0], 2)) : rows;
int colIter = ignoreBorder ? (int) (cols / Math.pow(ds[1], 2)) : cols;
rowIter = Math.max(1,rowIter);
colIter = Math.max(1, colIter);
for (int i = 0; i < signalNDArray.size(0); i++) {
for (int j = 0; j < signalNDArray.size(1); j++) {
for (int k = 0; k < rowIter; k++) {
int zk = k / ds[0];
for (int l = 0; l < colIter; l++) {
int zl = l / ds[1];
double num = input.getDouble(i, j, k, l);
double zzGet = zz.getDouble(i, j, zk, zl);
zz.putScalar(new int[]{i, j, zk, zl}, Math.max(num, zzGet));
}
}
}
}
return zz.reshape(signalNDArray.shape());
}
/**
* Down sampling a signal
* for the first stride dimensions
*
* @param d1 the data to down sample
* @param stride the stride at which to downsample
* @return the down sampled ndarray
*/
public static INDArray downSample(INDArray d1, int[] stride) {
INDArray d = Nd4j.ones(stride);
d.divi(ArrayUtil.prod(stride));
if(stride.length != d1.shape().length) {
if(stride.length > d1.shape().length) {
int[] newShape = new int[stride.length];
Arrays.fill(newShape, 1);
int delta = Math.abs(d.shape().length - newShape.length);
for(int i = newShape.length - 1; i >= delta; i--)
newShape[i] = d.shape()[i - delta];
d1 = d1.reshape(newShape);
}
else {
int[] newStride = new int[d1.shape().length];
Arrays.fill(newStride, 1);
int delta = Math.abs(d.shape().length - newStride.length);
for(int i = newStride.length - 1; i >= delta; i--)
newStride[i] = d.shape()[i - delta];
d = d.reshape(newStride);
}
}
INDArray ret = Convolution.convn(d1, d, Convolution.Type.VALID);
INDArrayIndex[] indices = new INDArrayIndex[d1.shape().length];
for(int i = 0; i < indices.length; i++) {
if(i < stride.length) {
indices[i] = NDArrayIndex.interval(0,stride[i],d1.size(i) ,true);
}
else {
indices[i] = NDArrayIndex.interval(0,d1.size(i) ,true);
}
}
ret = ret.get(indices);
return ret;
}
/**
* Pooled expectations(avg)
*
* @param toPool the ndarray to sumPooling
* @param stride the 2d stride across the ndarray
* @return
*/
public static INDArray avgPooling(INDArray toPool, int[] stride) {
int nDims = toPool.shape().length;
assert nDims >= 3 : "NDArray must have 3 dimensions";
int nRows = toPool.shape()[nDims - 2];
int nCols = toPool.shape()[nDims - 1];
int yStride = stride[0], xStride = stride[1];
INDArray blocks = Nd4j.create(toPool.shape());
for (int iR = 0; iR < Math.ceil(nRows / yStride); iR++) {
INDArrayIndex rows = NDArrayIndex.interval(iR * yStride, iR * yStride, true);
for (int jC = 0; jC < Math.ceil(nCols / xStride); jC++) {
INDArrayIndex cols = NDArrayIndex.interval(jC * xStride, (jC * xStride) + 1, true);
INDArray blockVal = toPool.get(rows, cols).sum(toPool.shape().length - 1).mean(toPool.shape().length - 1);
blocks.put(
new INDArrayIndex[]{rows, cols},
blockVal.permute(new int[]{1, 2, 0}))
.repmat(new int[]{rows.length(), cols.length()});
}
}
return blocks;
}
/**
* Pooled expectations(sum)
*
* @param toPool the ndarray to sumPooling
* @param stride the 2d stride across the ndarray
* @return
*/
public static INDArray sumPooling(INDArray toPool, int[] stride) {
int nDims = toPool.shape().length;
assert nDims >= 3 : "NDArray must have 3 dimensions";
int nRows = toPool.shape()[nDims - 2];
int nCols = toPool.shape()[nDims - 1];
int yStride = stride[0], xStride = stride[1];
INDArray blocks = Nd4j.create(toPool.shape());
for (int iR = 0; iR < Math.ceil(nRows / yStride); iR++) {
INDArrayIndex rows = NDArrayIndex.interval(iR * yStride, iR * yStride, true);
for (int jC = 0; jC < Math.ceil(nCols / xStride); jC++) {
INDArrayIndex cols = NDArrayIndex.interval(jC * xStride, (jC * xStride) + 1, true);
INDArray blockVal = toPool.get(rows, cols).sum(toPool.shape().length - 1).sum(toPool.shape().length - 1);
blocks.put(
new INDArrayIndex[]{rows, cols},
blockVal.permute(new int[]{1, 2, 0}))
.repmat(new int[]{rows.length(), cols.length()});
}
}
return blocks;
}
/**
* Upsampling a signal (specifically the first 2 dimensions)
*
* @param d the data to upsample
* @param scale the amount to scale by
* @return the upsampled ndarray
*/
public static INDArray upSample(INDArray d, INDArray scale) {
List<INDArray> idx = new ArrayList<>();
for (int i = 0; i < d.shape().length; i++) {
INDArray tmp = Nd4j.zeros(d.size(i) * (int) scale.getDouble(i), 1);
int[] indices = ArrayUtil.range(0, (int) scale.getDouble(i) * d.size(i), (int) scale.getDouble(i));
NDArrayIndex index = new NDArrayIndex(indices);
tmp.put(new NDArrayIndex[]{index}, 1);
INDArray put = tmp.cumsum(0);
idx.add(put.sub(1));
}
INDArray ret = Nd4j.create(ArrayUtil.toInts(ArrayUtil.toNDArray(d.shape()).muli(scale)));
INDArray retLinear = ret.linearView();
for(int i = 0; i < retLinear.length(); i++) {
for(int j = 0; j < idx.get(0).length(); j++) {
int slice = idx.get(0).getInt(j);
for(int k = 1; k < idx.size(); k++) {
}
}
}
return ret;
}
/**
* Cosine similarity
*
* @param d1 the first vector
* @param d2 the second vector
* @return the cosine similarities between the 2 arrays
*
*/
public static double cosineSim(INDArray d1, INDArray d2) {
return Nd4j.getExecutioner().execAndReturn(new CosineSimilarity(d1, d2, d1.length())).getFinalResult().doubleValue();
}
/**
* Normalize data to zero mean and unit variance
* substract by the mean and divide by the standard deviation
*
* @param toNormalize the ndarray to normalize
* @return the normalized ndarray
*/
public static INDArray normalizeZeroMeanAndUnitVariance(INDArray toNormalize) {
INDArray columnMeans = toNormalize.mean(0);
INDArray columnStds = toNormalize.std(0);
toNormalize.subiRowVector(columnMeans);
//padding for non zero
columnStds.addi(Nd4j.EPS_THRESHOLD);
toNormalize.diviRowVector(columnStds);
return toNormalize;
}
/**
* Scale by 1 / norm2 of the matrix
*
* @param toScale the ndarray to scale
* @return the scaled ndarray
*/
public static INDArray unitVec(INDArray toScale) {
double length = toScale.norm2Number().doubleValue();
if (length > 0) {
if (toScale.data().dataType() == (DataBuffer.Type.FLOAT))
return Nd4j.getBlasWrapper().scal(1.0f / (float) length, toScale);
else
return Nd4j.getBlasWrapper().scal(1.0 / length, toScale);
}
return toScale;
}
/**
* Returns the negative of an ndarray
*
* @param ndArray the ndarray to take the negative of
* @return the negative of the ndarray
*/
public static INDArray neg(INDArray ndArray) {
return neg(ndArray, Nd4j.copyOnOps);
}
/**
* Binary matrix of whether the number at a given index is greater than
*
* @param ndArray
* @return
*/
public static INDArray floor(INDArray ndArray) {
return floor(ndArray, Nd4j.copyOnOps);
}
/**
* Binary matrix of whether the number at a given index is greater than
*
* @param ndArray
* @return
*/
public static INDArray ceiling(INDArray ndArray) {
return ceiling(ndArray, Nd4j.copyOnOps);
}
/**
* Ceiling function
* @param ndArray
* @param copyOnOps
* @return
*/
public static INDArray ceiling(INDArray ndArray, boolean copyOnOps) {
return exec(copyOnOps ? new Ceil(ndArray, ndArray.dup()) : new Ceil(ndArray, ndArray));
}
/**
* Signum function of this ndarray
*
* @param toSign
* @return
*/
public static INDArray sign(INDArray toSign) {
return sign(toSign, Nd4j.copyOnOps);
}
public static INDArray stabilize(INDArray ndArray, double k) {
return stabilize(ndArray, k, Nd4j.copyOnOps);
}
/**
* Abs funciton
*
* @param ndArray
* @return
*/
public static INDArray abs(INDArray ndArray) {
return abs(ndArray, true);
}
public static INDArray exp(INDArray ndArray) {
return exp(ndArray, Nd4j.copyOnOps);
}
public static INDArray hardTanh(INDArray ndArray) {
return hardTanh(ndArray, Nd4j.copyOnOps);
}
/**
*
* @param ndArray
* @return
*/
public static INDArray identity(INDArray ndArray) {
return identity(ndArray, Nd4j.copyOnOps);
}
/**
* Pow function
*
* @param ndArray the ndarray to raise hte power of
* @param power the power to raise by
* @return the ndarray raised to this power
*/
public static INDArray pow(INDArray ndArray, Number power) {
return pow(ndArray, power, Nd4j.copyOnOps);
}
/**
* Rounding function
*
* @param ndArray
* @return
*/
public static INDArray round(INDArray ndArray) {
return round(ndArray, Nd4j.copyOnOps);
}
/**
* Sigmoid function
*
* @param ndArray
* @return
*/
public static INDArray sigmoid(INDArray ndArray) {
return sigmoid(ndArray, Nd4j.copyOnOps);
}
/**
* Sqrt function
*
* @param ndArray
* @return
*/
public static INDArray sqrt(INDArray ndArray) {
return sqrt(ndArray, Nd4j.copyOnOps);
}
/**
* Tanh function
*
* @param ndArray
* @return
*/
public static INDArray tanh(INDArray ndArray) {
return tanh(ndArray, Nd4j.copyOnOps);
}
public static INDArray log(INDArray ndArray) {
return log(ndArray, Nd4j.copyOnOps);
}
public static INDArray eps(INDArray ndArray) {
return eps(ndArray, Nd4j.copyOnOps);
}
/**
* 1 if greater than or equal to 0 otherwise (at each element)
*
* @param first
* @param ndArray
* @return
*/
public static INDArray greaterThanOrEqual(INDArray first, INDArray ndArray) {
return greaterThanOrEqual(first, ndArray, Nd4j.copyOnOps);
}
/**
* 1 if less than or equal to 0 otherwise (at each element)
*
* @param first
* @param ndArray
* @return
*/
public static INDArray lessThanOrEqual(INDArray first, INDArray ndArray) {
return lessThanOrEqual(first, ndArray, Nd4j.copyOnOps);
}
/**
* Eps function
*
* @param ndArray
* @return
*/
public static INDArray lessThanOrEqual(INDArray first, INDArray ndArray, boolean dup) {
return exec(dup ? new LessThanOrEqual(first.dup(), ndArray) : new LessThanOrEqual(first, ndArray));
}
/**
* Eps function
*
* @param ndArray
* @return
*/
public static INDArray greaterThanOrEqual(INDArray first, INDArray ndArray, boolean dup) {
return exec(dup ? new GreaterThanOrEqual(first.dup(), ndArray) : new GreaterThanOrEqual(first, ndArray));
}
/**
* Eps function
*
* @param ndArray
* @return
*/
public static INDArray eps(INDArray ndArray, boolean dup) {
return exec(dup ? new Eps(ndArray.dup()) : new Eps(ndArray));
}
/**
* Floor function
*
* @param ndArray
* @return
*/
public static INDArray floor(INDArray ndArray, boolean dup) {
return exec(dup ? new Floor(ndArray.dup()) : new Floor(ndArray));
}
/**
* Signum function of this ndarray
*
* @param toSign
* @return
*/
public static INDArray sign(INDArray toSign, boolean dup) {
return exec(dup ? new Sign(toSign, toSign.dup()) : new Sign(toSign));
}
/**
* Stabilize to be within a range of k
*
* @param ndArray tbe ndarray
* @param k
* @param dup
* @return
*/
public static INDArray max(INDArray ndArray, double k, boolean dup) {
return exec(dup ? new ScalarMax(ndArray.dup(), k) : new ScalarMax(ndArray, k));
}
/**
* Stabilize to be within a range of k
*
* @param ndArray tbe ndarray
* @param k
* @return
*/
public static INDArray max(INDArray ndArray, double k) {
return max(ndArray, k, Nd4j.copyOnOps);
}
/**
* Stabilize to be within a range of k
*
* @param ndArray tbe ndarray
* @param k
* @param dup
* @return
*/
public static INDArray stabilize(INDArray ndArray, double k, boolean dup) {
return exec(dup ? new Stabilize(ndArray, ndArray.dup(), k) : new Stabilize(ndArray, k));
}
/**
* Abs function
*
* @param ndArray
* @param dup
* @return
*/
public static INDArray abs(INDArray ndArray, boolean dup) {
return exec(dup ? new Abs(ndArray, ndArray.dup()) : new Abs(ndArray));
}
/**
* Exp function
*
* @param ndArray
* @param dup
* @return
*/
public static INDArray exp(INDArray ndArray, boolean dup) {
return exec(dup ? new Exp(ndArray, ndArray.dup()) : new Exp(ndArray));
}
/**
* Hard tanh
*
* @param ndArray the input
* @param dup whether to duplicate the ndarray and return it as the result
* @return the output
*/
public static INDArray hardTanh(INDArray ndArray, boolean dup) {
return exec(dup ? new HardTanh(ndArray, ndArray.dup()) : new HardTanh(ndArray));
}
/**
* Identity function
*
* @param ndArray
* @param dup
* @return
*/
public static INDArray identity(INDArray ndArray, boolean dup) {
return exec(dup ? new Identity(ndArray, ndArray.dup()) : new Identity(ndArray));
}
/**
* Pow function
*
* @param ndArray
* @param power
* @param dup
* @return
*/
public static INDArray pow(INDArray ndArray, Number power, boolean dup) {
return exec(dup ? new Pow(ndArray, ndArray.dup(), power.doubleValue()) : new Pow(ndArray, power.doubleValue()));
}
/**
* Rounding function
*
* @param ndArray the ndarray
* @param dup
* @return
*/
public static INDArray round(INDArray ndArray, boolean dup) {
return exec(dup ? new Round(ndArray, ndArray.dup()) : new Round(ndArray));
}
/**
* Sigmoid function
*
* @param ndArray
* @param dup
* @return
*/
public static INDArray sigmoid(INDArray ndArray, boolean dup) {
return exec(dup ? new Sigmoid(ndArray, ndArray.dup()) : new Sigmoid(ndArray));
}
/**
* Sqrt function
*
* @param ndArray
* @param dup
* @return
*/
public static INDArray sqrt(INDArray ndArray, boolean dup) {
return exec(dup ? new Sqrt(ndArray, ndArray.dup()) : new Sqrt(ndArray));
}
/**
* Tanh function
*
* @param ndArray
* @param dup
* @return
*/
public static INDArray tanh(INDArray ndArray, boolean dup) {
return exec(dup ? new Tanh(ndArray, ndArray.dup()) : new Tanh(ndArray));
}
/**
* Log function
*
* @param ndArray
* @param dup
* @return
*/
public static INDArray log(INDArray ndArray, boolean dup) {
return exec(dup ? new Log(ndArray, ndArray.dup()) : new Log(ndArray));
}
/**
* Negative
*
* @param ndArray
* @param dup
* @return
*/
public static INDArray neg(INDArray ndArray, boolean dup) {
return exec(dup ? new Negative(ndArray, ndArray.dup()) : new Negative(ndArray));
}
/**
* Apply the given elementwise op
*
* @param op the factory to create the op
* @return the new ndarray
*/
private static INDArray exec(ScalarOp op) {
if(op.x().isCleanedUp())
throw new IllegalStateException("NDArray already freed");
return Nd4j.getExecutioner().exec(op).z();
}
/**
* Apply the given elementwise op
*
* @param op the factory to create the op
* @return the new ndarray
*/
private static INDArray exec(TransformOp op) {
if(op.x().isCleanedUp())
throw new IllegalStateException("NDArray already freed");
return Nd4j.getExecutioner().execAndReturn(op);
}
}
| |
/**
* Copyright T Jake Luciani
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package lucandra;
import java.io.*;
import java.lang.reflect.Field;
import java.lang.reflect.Modifier;
import java.math.BigInteger;
import java.net.MalformedURLException;
import java.net.URL;
import java.nio.ByteBuffer;
import java.nio.charset.Charset;
import java.util.*;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import java.util.zip.DataFormatException;
import java.util.zip.Inflater;
import org.apache.cassandra.config.ConfigurationException;
import org.apache.cassandra.config.Schema;
import org.apache.cassandra.db.*;
import org.apache.cassandra.db.filter.QueryPath;
import org.apache.cassandra.service.AbstractCassandraDaemon;
import org.apache.cassandra.service.StorageProxy;
import org.apache.cassandra.service.StorageService;
import org.apache.cassandra.thrift.*;
import org.apache.cassandra.utils.ByteBufferUtil;
import org.apache.cassandra.utils.FBUtilities;
import org.apache.log4j.Logger;
import org.apache.log4j.PropertyConfigurator;
import org.apache.lucene.document.Fieldable;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.FieldCache;
import org.apache.thrift.TException;
import org.xerial.snappy.Snappy;
import org.xerial.snappy.SnappyException;
public class CassandraUtils
{
private static final Logger logger = Logger.getLogger(CassandraUtils.class);
public static final Properties properties;
public static final String keySpace;
public static int retryAttempts;
public static int retryAttemptSleep;
// how often to check for cache invalidation
public static int cacheInvalidationInterval;
public static final ConsistencyLevel consistency;
public static boolean useCompression;
public static final int port;
public static final String webHost;
// Initialize logging in such a way that it checks for config changes every
// 10 seconds.
static
{
String config = System.getProperty("log4j.configuration", "log4j.properties");
URL configLocation = null;
try
{
// try loading from a physical location first.
configLocation = new URL(config);
}
catch (MalformedURLException ex)
{
// load from the classpath.
configLocation = AbstractCassandraDaemon.class.getClassLoader().getResource(config);
if (configLocation == null)
throw new RuntimeException("Couldn't figure out log4j configuration.");
}
PropertyConfigurator.configureAndWatch(configLocation.getFile(), 10000);
org.apache.log4j.Logger.getLogger(AbstractCassandraDaemon.class).info("Logging initialized");
}
// Solandra global properties init
static
{
try
{
properties = new Properties();
properties.load(CassandraUtils.class.getClassLoader().getResourceAsStream("solandra.properties"));
keySpace = properties.getProperty("solandra.keyspace", "L");
retryAttempts = Integer.valueOf(properties.getProperty("cassandra.retries", "1024"));
retryAttemptSleep = Integer.valueOf(properties.getProperty("cassandra.retries.sleep", "100"));
// how often to check for cache invalidation
cacheInvalidationInterval = Integer.valueOf(properties.getProperty(
"solandra.cache.invalidation.check.interval", "1000"));
consistency = ConsistencyLevel.valueOf(properties.getProperty("solandra.consistency", ConsistencyLevel.ONE
.name()));
useCompression = Boolean.valueOf(properties.getProperty("solandra.compression", "true"));
port = Integer.parseInt(properties.getProperty("solandra.port", "8983"));
webHost = properties.getProperty("solandra.host", "0.0.0.0");
/*try
{
setFinalStatic(FieldCache.class.getDeclaredField("DEFAULT"), new org.apache.lucene.search.LucandraFieldCache());
logger.info("Sucessfully Hijacked FieldCacheImpl");
}
catch (SecurityException e)
{
logger.info("Unable to hijack the FieldCache");
}
catch (NoSuchFieldException e)
{
throw new RuntimeException(e);
}
catch (Exception e)
{
throw new RuntimeException(e);
}*/
}
catch (FileNotFoundException e)
{
throw new RuntimeException("Can't locate solandra.properties file");
}
catch (IOException e)
{
throw new RuntimeException("Error reading solandra.properties file");
}
}
public static final String termVecColumnFamily = "TI";
public static final String docColumnFamily = "Docs";
public static final String metaInfoColumnFamily = "TL";
public static final String fieldCacheColumnFamily = "FC";
public static final String schemaInfoColumnFamily = "SI";
public static final String positionVectorKey = "P";
public static final String offsetVectorKey = "O";
public static final String termFrequencyKey = "F";
public static final String normsKey = "N";
public static final String schemaKey = "S";
public static final String cachedCol = "CC";
public static final ByteBuffer cachedColBytes = ByteBufferUtil.bytes(cachedCol);
public static final ByteBuffer positionVectorKeyBytes = ByteBufferUtil.bytes(positionVectorKey);
public static final ByteBuffer offsetVectorKeyBytes = ByteBufferUtil.bytes(offsetVectorKey);
public static final ByteBuffer termFrequencyKeyBytes = ByteBufferUtil.bytes(termFrequencyKey);
public static final ByteBuffer normsKeyBytes = ByteBufferUtil.bytes(normsKey);
public static final ByteBuffer schemaKeyBytes = ByteBufferUtil.bytes(schemaKey);
public static final List<Number> emptyArray = Arrays.asList(new Number[] {});
public static final String delimeter = new String("\uffff");
public static final byte[] delimeterBytes = ByteBufferUtil.bytes(delimeter).array();
public static final String finalToken = new String("\ufffe\ufffe");
public static final ByteBuffer finalTokenBytes = ByteBufferUtil.bytes(finalToken);
public static final String documentMetaField = delimeter + "META" + delimeter;
public static final ByteBuffer documentMetaFieldBytes = ByteBufferUtil.bytes(documentMetaField);
public static final boolean indexHashingEnabled = Boolean.valueOf(System.getProperty("index.hashing",
"true"));
public static final QueryPath metaColumnPath = new QueryPath(CassandraUtils.docColumnFamily);
public static final Charset UTF_8 = Charset.forName("UTF-8");
private static boolean cassandraStarted = false;
private static CassandraDaemon daemon = null;
public static String fakeToken = String.valueOf(System.nanoTime());
public static synchronized void setStartup()
{
if (cassandraStarted)
{
throw new RuntimeException("You attempted to set the casandra started flag after it has started");
}
cassandraStarted = true;
}
public static synchronized void startupClient() throws IOException
{
if (cassandraStarted)
return;
cassandraStarted = true;
try
{
System.setProperty("cassandra-foreground", "1");
StorageService.instance.initClient();
logger.info("Started Solandra in client mode... waiting for gossip information");
Thread.sleep(10000);
// createCassandraSchema();
}
catch (IOException e2)
{
e2.printStackTrace();
System.exit(2);
}
catch (ConfigurationException e2)
{
e2.printStackTrace();
System.exit(2);
}
catch (InterruptedException e)
{
e.printStackTrace();
System.exit(2);
}
return;
}
// Start Cassandra up!!!
public static synchronized void startupServer() throws IOException
{
if (cassandraStarted)
return;
cassandraStarted = true;
System.setProperty("cassandra-foreground", "1");
daemon = new CassandraDaemon();
try
{
// run in own thread
new Thread(new Runnable() {
public void run()
{
daemon.activate();
}
}).start();
}
catch (Throwable e)
{
e.printStackTrace();
System.exit(2);
}
// wait for startup to complete
try
{
daemon.getStartedLatch().await(1, TimeUnit.HOURS);
createCassandraSchema();
}
catch (InterruptedException e1)
{
logger.error("Cassandra not started after 1 hour");
System.exit(3);
}
}
public static synchronized void stopServer()
{
if (!cassandraStarted)
return;
daemon.deactivate();
daemon = null;
cassandraStarted = false;
}
public static void createCassandraSchema() throws IOException
{
if (!cassandraStarted)
{
logger.error("start cassandra before adding schema");
return;
}
if (Schema.instance.getNonSystemTables().contains(keySpace))
{
logger.info("Found Solandra specific schema");
return;
}
try
{
Thread.sleep(1000);
int sleep = new Random().nextInt(6000);
logger.info("\nSleeping " + sleep + "ms to stagger solandra schema creation\n");
Thread.sleep(sleep);
}
catch (InterruptedException e1)
{
e1.printStackTrace();
System.exit(2);
}
if (Schema.instance.getNonSystemTables().contains(keySpace))
{
logger.info("Found Solandra specific schema");
return;
}
List<CfDef> cfs = new ArrayList<CfDef>();
CfDef cf = new CfDef();
cf.setName(docColumnFamily);
cf.setComparator_type("BytesType");
cf.setKey_cache_size(0);
cf.setRow_cache_size(0);
cf.setComment("Stores the document and field data for each doc with docId as key");
cf.setKeyspace(keySpace);
cfs.add(cf);
cf = new CfDef();
cf.setName(termVecColumnFamily);
cf.setComparator_type("lucandra.VIntType");
cf.setKey_cache_size(0);
cf.setRow_cache_size(0);
cf.setComment("Stores term information with indexName/field/term as composite key");
cf.setKeyspace(keySpace);
cfs.add(cf);
cf = new CfDef();
cf.setName(fieldCacheColumnFamily);
cf.setComparator_type("lucandra.VIntType");
cf.setKey_cache_size(0);
cf.setRow_cache_size(0);
cf.setComment("Stores term per doc per field");
cf.setKeyspace(keySpace);
cfs.add(cf);
cf = new CfDef();
cf.setName(metaInfoColumnFamily);
cf.setComparator_type("BytesType");
cf.setKey_cache_size(0);
cf.setRow_cache_size(0);
cf.setComment("Stores ordered list of terms for a given field with indexName/field as composite key");
cf.setKeyspace(keySpace);
cfs.add(cf);
cf = new CfDef();
cf.setName(schemaInfoColumnFamily);
cf.setColumn_type("Super");
cf.setComparator_type("BytesType");
cf.setKey_cache_size(0);
cf.setRow_cache_size(0);
cf.setComment("Stores solr and index id information");
cf.setKeyspace(keySpace);
cfs.add(cf);
Map<String, String> strategyOptions = new HashMap<String, String>();
strategyOptions.put("replication_factor", "1");
KsDef solandraKS = new KsDef()
.setName(keySpace)
.setStrategy_class("org.apache.cassandra.locator.SimpleStrategy")
.setStrategy_options(strategyOptions)
.setCf_defs(cfs);
CassandraServer cs = new CassandraServer();
try
{
cs.system_add_keyspace(solandraKS);
}
catch (InvalidRequestException e)
{
throw new IOException(e);
}
catch (TException e)
{
throw new IOException(e);
}
catch (Exception e)
{
throw new IOException(e);
}
logger.info("Added Solandra specific schema");
}
public static ByteBuffer createColumnName(Fieldable field)
{
return ByteBuffer.wrap(createColumnName(field.name(), field.stringValue()));
}
public static ByteBuffer createColumnName(Term term)
{
return ByteBuffer.wrap(createColumnName(term.field(), term.text()));
}
public static byte[] createColumnName(String field, String text)
{
// case of all terms
if (field.equals("") || text == null)
return delimeterBytes;
try
{
return (field + delimeter + text).getBytes("UTF-8");
}
catch (UnsupportedEncodingException e)
{
throw new RuntimeException("JVM doesn't support UTF-8", e);
}
}
public static Term parseTerm(String termStr)
{
int index = termStr.indexOf(delimeter);
if (index < 0)
{
throw new RuntimeException("invalid term format: " + index + " " + termStr);
}
return new Term(termStr.substring(0, index), termStr.substring(index + delimeter.length()));
}
public static final byte[] intToByteArray(int value)
{
return new byte[] { (byte) (value >>> 24), (byte) (value >>> 16), (byte) (value >>> 8), (byte) value };
}
public static final int byteArrayToInt(ByteBuffer b)
{
return (b.get(b.position() + 0) << 24) + ((b.get(b.position() + 1) & 0xFF) << 16)
+ ((b.get(b.position() + 2) & 0xFF) << 8) + (b.get(b.position() + 3) & 0xFF);
}
public static final ByteBuffer intVectorToByteArray(List<Number> intVector)
{
if (intVector.size() == 0)
return ByteBufferUtil.EMPTY_BYTE_BUFFER;
if (intVector.get(0) instanceof Byte)
return ByteBuffer.wrap(new byte[] { intVector.get(0).byteValue() });
ByteBuffer buffer = ByteBuffer.allocate(4 * (intVector.size() + 1));
// Number of int's
buffer.putInt(intVector.size());
for (Number i : intVector)
{
buffer.putInt(i.intValue());
}
buffer.flip();
return buffer;
}
public static final int[] byteArrayToIntArray(ByteBuffer b)
{
if (b.remaining() % 4 != 0)
throw new RuntimeException("Not a valid int array:" + b.remaining());
int[] intArray = new int[b.remaining() / 4];
int idx = 0;
for (int i = b.position(); i < b.limit(); i += 4)
{
intArray[idx++] = (b.get(i) << 24) + ((b.get(i + 1) & 0xFF) << 16) + ((b.get(i + 2) & 0xFF) << 8)
+ (b.get(i + 3) & 0xFF);
}
return intArray;
}
public static void addMutations(Map<ByteBuffer, RowMutation> mutationList, String columnFamily, byte[] column,
ByteBuffer key, byte[] value)
{
addMutations(mutationList, columnFamily, ByteBuffer.wrap(column), key, ByteBuffer.wrap(value));
}
public static void addMutations(Map<ByteBuffer, RowMutation> mutationList, String columnFamily, byte[] column,
ByteBuffer key, ByteBuffer value)
{
addMutations(mutationList, columnFamily, ByteBuffer.wrap(column), key, value);
}
public static void addMutations(Map<ByteBuffer, RowMutation> mutationList, String columnFamily, ByteBuffer column,
ByteBuffer key, ByteBuffer value)
{
// Find or create row mutation
RowMutation rm = mutationList.get(key);
if (rm == null)
{
rm = new RowMutation(CassandraUtils.keySpace, key);
mutationList.put(key, rm);
}
if (value == null)
{ // remove
if (column != null)
{
rm.delete(new QueryPath(columnFamily, null, column), System.currentTimeMillis()-1);
}
else
{
rm.delete(new QueryPath(columnFamily), System.currentTimeMillis()-1);
}
}
else
{ // insert
rm.add(new QueryPath(columnFamily, null, column), value, System.currentTimeMillis());
}
}
public static void robustInsert(ConsistencyLevel cl, RowMutation... mutations)
{
int attempts = 0;
while (attempts++ < retryAttempts)
{
try
{
StorageProxy.mutate(Arrays.asList(mutations), cl);
return;
}
catch (UnavailableException e)
{
}
catch (TimeoutException e)
{
}
try
{
Thread.sleep(retryAttemptSleep);
}
catch (InterruptedException e)
{
}
}
throw new RuntimeException("insert failed after 10 attempts");
}
public static List<Row> robustRead(ConsistencyLevel cl, ReadCommand... rc) throws IOException
{
List<Row> rows = null;
int attempts = 0;
while (attempts++ < retryAttempts)
{
try
{
rows = StorageProxy.read(Arrays.asList(rc), cl);
return rows;
}
catch (UnavailableException e1)
{
}
catch (TimeoutException e1)
{
}
catch (InvalidRequestException e)
{
throw new IOException(e);
}
try
{
Thread.sleep(retryAttemptSleep);
}
catch (InterruptedException e)
{
}
}
throw new IOException("Read command failed after " + retryAttempts + "attempts");
}
public static List<Row> robustRead(ByteBuffer key, QueryPath qp, List<ByteBuffer> columns, ConsistencyLevel cl)
throws IOException
{
ReadCommand rc = new SliceByNamesReadCommand(CassandraUtils.keySpace, key, qp, columns);
return robustRead(cl, rc);
}
public static BigInteger md5hash(ByteBuffer data)
{
byte[] result = FBUtilities.hash(data);
BigInteger hash = new BigInteger(result);
return hash.abs();
}
public static ByteBuffer hashBytes(byte[] key)
{
byte[] hashBytes = null;
try
{
hashBytes = md5hash(ByteBuffer.wrap(key)).toString().getBytes("UTF-8");
}
catch (UnsupportedEncodingException e)
{
throw new RuntimeException(e);
}
ByteBuffer hashBuf = ByteBuffer.allocate(hashBytes.length + delimeterBytes.length);
hashBuf.put(hashBytes);
hashBuf.put(delimeterBytes);
hashBuf.flip();
return hashBuf;
}
public static ByteBuffer hashKeyBytes(byte[]... keys)
{
byte hashedKey[] = null;
if (keys.length <= 1 || !Arrays.equals(keys[keys.length - 2], delimeterBytes))
throw new IllegalStateException("malformed key");
byte[] indexName = keys[0];
if (indexHashingEnabled)
{
int delimiterCount = 1;
for (int i = 0; i < keys.length - 2; i++)
{
if (Arrays.equals(keys[i], delimeterBytes))
{
delimiterCount++;
}
}
if (delimiterCount > 2)
throw new IllegalStateException("key contains too many delimiters");
indexName = hashBytes(indexName).array(); // safe, uses .wrap
}
// no hashing, just combine the arrays together
int totalBytes = indexName.length;
for (int i = 1; i < keys.length; i++)
{
// for index hashing we've already add the delimiter
if (indexHashingEnabled && i == 1)
continue;
totalBytes += keys[i].length;
}
hashedKey = new byte[totalBytes];
System.arraycopy(indexName, 0, hashedKey, 0, indexName.length);
int currentLen = indexName.length;
for (int i = 1; i < keys.length; i++)
{
// for index hashing we've already add the delimiter
if (indexHashingEnabled && i == 1)
continue;
System.arraycopy(keys[i], 0, hashedKey, currentLen, keys[i].length);
currentLen += keys[i].length;
}
return ByteBuffer.wrap(hashedKey);
}
public static int mreadVInt(ByteBuffer buf)
{
int length = buf.remaining();
if (length == 0)
return 0;
byte b = buf.get();
int i = b & 0x7F;
for (int pos = 1, shift = 7; (b & 0x80) != 0 && pos < length; shift += 7, pos++)
{
b = buf.get();
i |= (b & 0x7F) << shift;
}
return i;
}
public static int readVInt(ByteBuffer buf)
{
int length = buf.remaining();
if (length == 0)
return 0;
byte b = buf.get(buf.position());
int i = b & 0x7F;
for (int pos = 1, shift = 7; (b & 0x80) != 0 && pos < length; shift += 7, pos++)
{
b = buf.get(buf.position() + pos);
i |= (b & 0x7F) << shift;
}
return i;
}
public static byte[] writeVInt(int i)
{
int length = 0;
int p = i;
while ((p & ~0x7F) != 0)
{
p >>>= 7;
length++;
}
length++;
byte[] buf = new byte[length];
int pos = 0;
while ((i & ~0x7F) != 0)
{
buf[pos] = ((byte) ((i & 0x7f) | 0x80));
i >>>= 7;
pos++;
}
buf[pos] = (byte) i;
return buf;
}
public static byte[] compress(byte[] input) throws IOException
{
if(!useCompression)
return input;
return Snappy.compress(input);
}
public static byte[] decompress(byte[] input) throws IOException
{
if(!useCompression)
return input;
return Snappy.uncompress(input);
}
//Java lets you do EVIL things
public static void setFinalStatic(Field field, Object newValue) throws Exception {
field.setAccessible(true);
Field modifiersField = Field.class.getDeclaredField("modifiers");
modifiersField.setAccessible(true);
modifiersField.setInt(field, field.getModifiers() & ~Modifier.FINAL);
field.set(null, newValue);
}
}
| |
package core.resources.opd;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Date;
import java.util.List;
import java.util.Set;
import javax.swing.text.DateFormatter;
import javax.ws.rs.Consumes;
import javax.ws.rs.GET;
import javax.ws.rs.POST;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.Produces;
import javax.ws.rs.core.MediaType;
import org.apache.log4j.Logger;
import org.codehaus.jettison.json.JSONException;
import org.codehaus.jettison.json.JSONObject;
import core.ErrorConstants;
import core.classes.hr.HrAttendance;
import core.classes.opd.Queue;
import core.classes.opd.Visit;
import flexjson.JSONSerializer;
import flexjson.transformer.DateTransformer;
import lib.driver.api.driver_class.user.UserDBDriver;
import lib.driver.hr.driver_class.HrAttendanceDBDriver;
import lib.driver.opd.driver_class.QueueDBDriver;
import lib.driver.opd.driver_class.VisitDBDriver;
import core.classes.api.user.AdminPermission;
import core.classes.api.user.AdminUser;
import core.classes.api.user.AdminUserroles;
import core.resources.api.user.UserResource;
/**
* This class define all the generic REST Services necessary for handling the queue
*
* @author Prabhath Jayampathi
* @version 1.0
*/
@Path("Queue")
public class QueueResource {
final static Logger logger = Logger.getLogger(QueueResource.class);
//
public final static int MAX_PATIENT_PER_DAY = 5;
public static int rotationNumber = 0;
public static class QueueStatus {
public int user;
public static int qStatus = 0; // 0 : Open , 1 : Full , 2 : OnHold , 3 : Redirect
}
//
public static int qType = 0; // 0 : Regular , 1 : Visit
public static ArrayList<QueueStatus> queueStatusList = new ArrayList<QueueStatus>();
//
public static int lastAssignedDcotor = -1;
QueueDBDriver queueDBDriver = new QueueDBDriver();
HrAttendanceDBDriver hrAttendanceDBDriver = new HrAttendanceDBDriver();
/**
* @param qJson
* @return
* @throws JSONException
*/
@POST
@Path("/addPatientToQueue")
@Produces(MediaType.TEXT_PLAIN)
@Consumes(MediaType.APPLICATION_JSON)
public String addToQueue(JSONObject qJson) throws JSONException {
logger.info("add patient to queue");
UserDBDriver userDBDriver = new UserDBDriver();
Queue queue = new Queue();
try {
queue.setQueueRemarks(qJson.getString("queueRemarks"));
queue.setQueueTokenAssignTime(new Date());
queue.setQueueStatus("Waiting");
int patientID = qJson.getInt("patient");
int assignedBy = qJson.getInt("queueAssignedBy");
int assignedTo = userDBDriver.getUserByEmpId(qJson.getInt("queueAssignedTo")).getUserId();
lastAssignedDcotor = assignedTo;
if((new QueueDBDriver().getQueuePatientsByUserID(assignedTo).size()) == MAX_PATIENT_PER_DAY)
{
System.out.println("Making Q Full for " + assignedTo);
QueueStatus qs = new QueueStatus();
qs.user = assignedTo;
qs.qStatus = 1;
hrAttendanceDBDriver.UpdateAttendance(qs.qStatus, qs.user);
queueStatusList.add(qs);
JSONObject jsonobj = new JSONObject();
jsonobj.put("status","False");
jsonobj.put("full",assignedTo);
return jsonobj.toString();
}
else
{
queueDBDriver.addToQueue(queue, patientID, assignedBy, assignedTo);
logger.info("successfully queue added");
JSONSerializer jsonSerializer = new JSONSerializer();
JSONObject jsonobje=new JSONObject(jsonSerializer.include("patient").serialize(queue)).put("status","True").put("full","");
return jsonobje.toString();
}
} catch (JSONException e) {
logger.error("error adding queue: "+e.getMessage());
JSONObject jsonErrorObject = new JSONObject();
jsonErrorObject.put("errorcode", ErrorConstants.FILL_REQUIRED_FIELDS.getCode());
jsonErrorObject.put("message", ErrorConstants.FILL_REQUIRED_FIELDS.getMessage());
return jsonErrorObject.toString();
}catch (RuntimeException e)
{
logger.error("error adding queue: "+e.getMessage());
JSONObject jsonErrorObject = new JSONObject();
jsonErrorObject.put("errorcode", ErrorConstants.FILL_REQUIRED_FIELDS.getCode());
jsonErrorObject.put("message", ErrorConstants.FILL_REQUIRED_FIELDS.getMessage());
return jsonErrorObject.toString();
} catch (Exception e) {
logger.error("error adding queue: "+e.getMessage());
return null;
}
}
@POST
@Path("/addPatientToQueueAuto/{visittype}")
@Produces(MediaType.TEXT_PLAIN)
@Consumes(MediaType.APPLICATION_JSON)
public String addToQueueAuto(JSONObject qJson, @PathParam("visittype") int visittype) throws JSONException {
logger.info("add patient to queue automatically");
UserDBDriver userDBDriver = new UserDBDriver();
Queue queue = new Queue();
try {
queue.setQueueRemarks(qJson.getString("queueRemarks"));
queue.setQueueTokenAssignTime(new Date());
queue.setQueueStatus("Waiting");
int patientID = qJson.getInt("patient");
int assignedBy = qJson.getInt("queueAssignedBy");
List<HrAttendance> attendants = hrAttendanceDBDriver.getAllAvailableAttendanceByType(visittype);
int empId = 0;
if(rotationNumber < attendants.size())
{
empId = attendants.get(rotationNumber).getHrEmployee().getEmpId();
rotationNumber++;
}
else
{
if(attendants.size() == 0)
{
JSONObject jsonobj = new JSONObject();
jsonobj.put("status","False");
jsonobj.put("full","");
jsonobj.put("available", "false");
return jsonobj.toString();
}
rotationNumber = 0;
empId = attendants.get(rotationNumber).getHrEmployee().getEmpId();
}
AdminUser user = userDBDriver.getUserByEmpId(empId);
//////////
int assignedTo = user.getUserId();//qJson.getInt("queueAssignedTo");
lastAssignedDcotor = assignedTo;
if((new QueueDBDriver().getQueuePatientsByUserID(assignedTo).size()) == MAX_PATIENT_PER_DAY)
{
System.out.println("Making Q Full for " + assignedTo);
QueueStatus qs = new QueueStatus();
qs.user = assignedTo;
qs.qStatus = 1;
hrAttendanceDBDriver.UpdateAttendance(qs.qStatus, qs.user);
queueStatusList.add(qs);
JSONObject jsonobj = new JSONObject();
jsonobj.put("status","False");
jsonobj.put("full",assignedTo);
jsonobj.put("available", "true");
return jsonobj.toString();
}
else
{
queueDBDriver.addToQueue(queue, patientID, assignedBy, assignedTo);
logger.info("successfully queue added automatically");
JSONSerializer jsonSerializer = new JSONSerializer();
JSONObject jsonobje=new JSONObject(jsonSerializer.include("patient").exclude("queueAssignedTo.*").serialize(queue)).put("status","True").put("full","");
return jsonobje.toString();
}
} catch (JSONException e) {
logger.error("error adding queue automatically: "+e.getMessage());
JSONObject jsonErrorObject = new JSONObject();
jsonErrorObject.put("errorcode", ErrorConstants.FILL_REQUIRED_FIELDS.getCode());
jsonErrorObject.put("message", ErrorConstants.FILL_REQUIRED_FIELDS.getMessage());
return jsonErrorObject.toString();
}catch (RuntimeException e)
{
logger.error("error adding queue automatically: "+e.getMessage());
JSONObject jsonErrorObject = new JSONObject();
jsonErrorObject.put("errorcode", ErrorConstants.FILL_REQUIRED_FIELDS.getCode());
jsonErrorObject.put("message", ErrorConstants.FILL_REQUIRED_FIELDS.getMessage());
return jsonErrorObject.toString();
} catch (Exception e) {
logger.error("error adding queue: "+e.getMessage());
return null;
}
}
/**
* @param pID
* @return
* @throws JSONException
*/
@GET
@Path("/checkinPatient/{PID}")
@Produces(MediaType.TEXT_PLAIN)
public String checkinPatient(@PathParam("PID") int P) throws JSONException {
logger.info("checkin patient");
try {
int status = queueDBDriver.checkInPatient(P);
if (status == 1){
logger.info("successfully checked patient");
return String.valueOf(status);
}
else{
logger.info("patient not checked");
return String.valueOf(status);
}
}catch (RuntimeException e)
{
logger.error("error checking in patient: "+e.getMessage());
JSONObject jsonErrorObject = new JSONObject();
jsonErrorObject.put("errorcode", ErrorConstants.FILL_REQUIRED_FIELDS.getCode());
jsonErrorObject.put("message", ErrorConstants.FILL_REQUIRED_FIELDS.getMessage());
return jsonErrorObject.toString();
} catch (Exception e) {
logger.error("error checking in patient: "+e.getMessage());
return null;
}
}
/**
* @param pID
* @return
* @throws JSONException
*/
@GET
@Path("/checkoutPatient/{PID}/{userId}")
@Produces(MediaType.TEXT_PLAIN)
public String checkoutPatient(@PathParam("PID") int pID, @PathParam("userId") int userId) throws JSONException {
logger.info("checkout patient");
try {
int status = queueDBDriver.checkoutPatient(pID);
if((new QueueDBDriver().getQueuePatientsByUserID(userId).size()) < MAX_PATIENT_PER_DAY)
{
hrAttendanceDBDriver.UpdateAttendance(0, userId);
for (QueueStatus queuestatus : queueStatusList) {
if (queuestatus.user == userId) {
queuestatus.qStatus = 0;
}
}
}
if (status == 1){
logger.info("successfully checked patient");
return String.valueOf(status);
}
else
logger.info("patient not checked");
return String.valueOf(status);
} catch (RuntimeException e)
{
logger.error("error checking out patient: "+e.getMessage());
JSONObject jsonErrorObject = new JSONObject();
jsonErrorObject.put("errorcode", ErrorConstants.FILL_REQUIRED_FIELDS.getCode());
jsonErrorObject.put("message", ErrorConstants.FILL_REQUIRED_FIELDS.getMessage());
return jsonErrorObject.toString();
}catch (Exception e) {
logger.error("error checking out patient: "+e.getMessage());
return null;
}
}
@GET
@Path("/getQueuePatientsByUserID/{userid}")
@Produces(MediaType.APPLICATION_JSON)
public String getQueuePatientsByUserID(@PathParam("userid") int userid) throws JSONException {
logger.info("get queue patient by user id");
try {
List<Queue> queueRecord = queueDBDriver
.getQueuePatientsByUserID(userid);
JSONSerializer serializer = new JSONSerializer();
logger.info("successfully getting queue patient");
return serializer
.include("patient.patientGender", "patient.patientTitle",
"patient.patientFullName", "patient.patientID","patient.patientHIN",
"queueTokenNo", "queueStatus").exclude("*")
.serialize(queueRecord);
} catch (RuntimeException e)
{
logger.error("error getting queue: "+e.getMessage());
JSONObject jsonErrorObject = new JSONObject();
jsonErrorObject.put("errorcode", ErrorConstants.FILL_REQUIRED_FIELDS.getCode());
jsonErrorObject.put("message", ErrorConstants.FILL_REQUIRED_FIELDS.getMessage());
return jsonErrorObject.toString();
}catch (Exception e) {
logger.error("error getting queue: "+e.getMessage());
return null;
}
}
@GET
@Path("/getQueuePatientsByDoctorID/{doctorid}")
@Produces(MediaType.APPLICATION_JSON)
public String getQueuePatientsByDoctorID(@PathParam("doctorid") int doctorid) throws JSONException {
logger.info("get queue patient by user id");
try {
List<Queue> queueRecord = queueDBDriver
.getQueuePatientsByDoctorID(doctorid);
JSONSerializer serializer = new JSONSerializer();
logger.info("successfully getting queue patient");
return serializer
.include("patient.patientGender", "patient.patientTitle",
"patient.patientFullName", "patient.patientID","patient.patientHIN",
"queueTokenNo", "queueStatus").exclude("*")
.serialize(queueRecord);
} catch (RuntimeException e)
{
logger.error("error getting queue: "+e.getMessage());
JSONObject jsonErrorObject = new JSONObject();
jsonErrorObject.put("errorcode", ErrorConstants.FILL_REQUIRED_FIELDS.getCode());
jsonErrorObject.put("message", ErrorConstants.FILL_REQUIRED_FIELDS.getMessage());
return jsonErrorObject.toString();
}catch (Exception e) {
logger.error("error getting queue: "+e.getMessage());
return null;
}
}
@GET
@Path("/isPatientInQueue/{patientID}")
@Produces(MediaType.APPLICATION_JSON)
public String isPatientInQueue(@PathParam("patientID") int patientID) throws JSONException {
logger.info("is patient in queue");
try {
JSONSerializer serializer = new JSONSerializer();
Queue q = queueDBDriver.isPatientInQueue(patientID);
logger.info("successfully checked patient in queue");
return serializer
.include("patient.patientID","patient.patientFullName","patient.patientTitle", "queueStatus", "queueTokenNo","queueAssignedTo.hrEmployee.firstName","queueAssignedTo.hrEmployee.lastName")
.exclude("*").serialize(q);
} catch (RuntimeException e)
{
logger.error("error checking patient in queue: "+e.getMessage());
JSONObject jsonErrorObject = new JSONObject();
jsonErrorObject.put("errorcode", ErrorConstants.FILL_REQUIRED_FIELDS.getCode());
jsonErrorObject.put("message", ErrorConstants.FILL_REQUIRED_FIELDS.getMessage());
return jsonErrorObject.toString();
}catch (Exception e) {
logger.error("error checking patient in queue: "+e.getMessage());
return null;
}
}
@GET
@Path("/getCurrentInPatient/{doctor}")
@Produces(MediaType.APPLICATION_JSON)
public String getCurrentInPatient(@PathParam("doctor") int doctor) throws JSONException {
logger.info("get current in patient");
try {
JSONSerializer serializer = new JSONSerializer();
Queue q = queueDBDriver.getCurrentInPatient(doctor);
logger.info("successfully getting current patient");
return serializer
.include("patient.patientID", "queueStatus", "queueTokenNo")
.exclude("*").serialize(q);
} catch (RuntimeException e)
{
logger.error("error getting current patient: "+e.getMessage());
JSONObject jsonErrorObject = new JSONObject();
jsonErrorObject.put("errorcode", ErrorConstants.FILL_REQUIRED_FIELDS.getCode());
jsonErrorObject.put("message", ErrorConstants.FILL_REQUIRED_FIELDS.getMessage());
return jsonErrorObject.toString();
}catch (Exception e) {
logger.error("error getting current patient: "+e.getMessage());
return e.getMessage();
}
}
@GET
@Path("/getTreatedPatients/{userid}")
@Produces(MediaType.APPLICATION_JSON)
public String getTreatedPatients(@PathParam("userid") int userid) throws JSONException {
logger.info("get treated patients");
try {
List<Queue> queueRecord = queueDBDriver.getTreatedPatients(userid);
JSONSerializer serializer = new JSONSerializer();
logger.info("successfully getting treated patient");
return serializer.include("patient.patientID", "queueTokenNo")
.exclude("*").serialize(queueRecord);
} catch (RuntimeException e)
{
logger.error("error getting treated patient: "+e.getMessage());
JSONObject jsonErrorObject = new JSONObject();
jsonErrorObject.put("errorcode", ErrorConstants.FILL_REQUIRED_FIELDS.getCode());
jsonErrorObject.put("message", ErrorConstants.FILL_REQUIRED_FIELDS.getMessage());
return jsonErrorObject.toString();
}catch (Exception e) {
logger.error("error getting treated patient: "+e.getMessage());
return e.getMessage();
}
}
@GET
@Path("/redirectQueue/{userid}/{type}")
@Produces(MediaType.TEXT_PLAIN)
public String redirectQueue(@PathParam("userid") int userid,@PathParam("type") int type) throws JSONException {
logger.info("redirect queue");
HrAttendanceDBDriver hrAttendanceDB = new HrAttendanceDBDriver();
try {
if(QueueStatus.qStatus != 3)
{
int status = queueDBDriver.redirectQueue(userid,type);
hrAttendanceDB.UpdateAttendance(3, userid);
if (status == 1){
logger.info("successfully redirect queue");
return String.valueOf(status);
}
else{
logger.info("not redirect queue");
return String.valueOf(status);
}
}
else
{
QueueStatus.qStatus = 0;
for(QueueStatus queue : queueStatusList)
{
if(queue.user == userid)
{
queueStatusList.remove(queue);
break;
}
}
hrAttendanceDB.UpdateAttendance(0, userid);
return String.valueOf(QueueStatus.qStatus);
}
} catch (RuntimeException e)
{
logger.error("error getting redirect queue: "+e.getMessage());
JSONObject jsonErrorObject = new JSONObject();
jsonErrorObject.put("errorcode", ErrorConstants.FILL_REQUIRED_FIELDS.getCode());
jsonErrorObject.put("message", ErrorConstants.FILL_REQUIRED_FIELDS.getMessage());
return jsonErrorObject.toString();
}catch (Exception e) {
logger.error("error getting redirect queue: "+e.getMessage());
return null;
}
}
@GET
@Path("/getUserQStatus/{userid}")
@Produces(MediaType.TEXT_PLAIN)
public String getUserQStatus(@PathParam("userid") int userid) throws JSONException {
logger.info("get user queue status");
try {
int val = hrAttendanceDBDriver.getStatus(userid);
for (QueueStatus status : queueStatusList) {
if (status.user == userid)
val = hrAttendanceDBDriver.getStatus(userid);
logger.info("successfully getting queue status");
return String.valueOf(val);
}
return String.valueOf(val);
} catch (RuntimeException e)
{
logger.error("error getting queue status: "+e.getMessage());
JSONObject jsonErrorObject = new JSONObject();
jsonErrorObject.put("errorcode", ErrorConstants.FILL_REQUIRED_FIELDS.getCode());
jsonErrorObject.put("message", ErrorConstants.FILL_REQUIRED_FIELDS.getMessage());
return jsonErrorObject.toString();
}catch (Exception e) {
logger.error("error getting queue status: "+e.getMessage());
return "0";
}
}
@GET
@Path("/setQueueType")
@Produces(MediaType.TEXT_PLAIN)
public String setQueueType() throws JSONException {
logger.info("set queue type");
try {
if (qType == 0)
qType = 1;
else
qType = 0;
logger.info("successfully setting queue type");
return String.valueOf(qType);
} catch (RuntimeException e)
{
logger.error("error setting queue type: "+e.getMessage());
JSONObject jsonErrorObject = new JSONObject();
jsonErrorObject.put("errorcode", ErrorConstants.FILL_REQUIRED_FIELDS.getCode());
jsonErrorObject.put("message", ErrorConstants.FILL_REQUIRED_FIELDS.getMessage());
return jsonErrorObject.toString();
}catch (Exception e) {
logger.error("error setting queue type: "+e.getMessage());
return null;
}
}
@GET
@Path("/getQueueType")
@Produces(MediaType.TEXT_PLAIN)
public String getQueueType() throws JSONException {
logger.info("get queue type");
try {
return String.valueOf(qType);
}catch (RuntimeException e)
{
logger.error("error setting queue type: "+e.getMessage());
JSONObject jsonErrorObject = new JSONObject();
jsonErrorObject.put("errorcode", ErrorConstants.FILL_REQUIRED_FIELDS.getCode());
jsonErrorObject.put("message", ErrorConstants.FILL_REQUIRED_FIELDS.getMessage());
return jsonErrorObject.toString();
} catch (Exception e) {
logger.error("error setting queue type: "+e.getMessage());
return null;
}
}
@GET
@Path("/setQueueType/{type}/{userid}")
@Produces(MediaType.TEXT_PLAIN)
public String setQueueTypeForDoctor(@PathParam("type") int type,@PathParam("userid") int userid) throws JSONException {
logger.info("set queue type for doctor");
try {
int typeValue = hrAttendanceDBDriver.UpdateAttendanceType(type, userid);
logger.info("successfully setting queue type for doctor");
return String.valueOf(typeValue);
} catch (RuntimeException e)
{
logger.error("error setting queue type for doctor: "+e.getMessage());
JSONObject jsonErrorObject = new JSONObject();
jsonErrorObject.put("errorcode", ErrorConstants.FILL_REQUIRED_FIELDS.getCode());
jsonErrorObject.put("message", ErrorConstants.FILL_REQUIRED_FIELDS.getMessage());
return jsonErrorObject.toString();
}catch (Exception e) {
logger.error("error setting queue type for doctor: "+e.getMessage());
return null;
}
}
@GET
@Path("/getQueueType/{userid}")
@Produces(MediaType.TEXT_PLAIN)
public String getQueueTypeForDoctor(@PathParam("userid") int userid) throws JSONException {
logger.info("get queue type for doctor");
try {
return String.valueOf(hrAttendanceDBDriver.getType(userid));
}catch (RuntimeException e)
{
logger.error("error setting queue type for doctor: "+e.getMessage());
JSONObject jsonErrorObject = new JSONObject();
jsonErrorObject.put("errorcode", ErrorConstants.FILL_REQUIRED_FIELDS.getCode());
jsonErrorObject.put("message", ErrorConstants.FILL_REQUIRED_FIELDS.getMessage());
return jsonErrorObject.toString();
} catch (Exception e) {
logger.error("error setting queue type for doctor: "+e.getMessage());
return null;
}
}
@GET
@Path("/holdQueue/{userid}")
@Produces(MediaType.TEXT_PLAIN)
public String holdQueue(@PathParam("userid") int userid) throws JSONException {
logger.info("hold queue, UserId = " +userid);
HrAttendanceDBDriver hrAttendanceDB = new HrAttendanceDBDriver();
try {
boolean bExists = false;
for (QueueStatus status : queueStatusList) {
if (status.user == userid) {
bExists = true;
if (status.qStatus == 2)
{
hrAttendanceDB.UpdateAttendance(0, userid);
queueStatusList.remove(status);
return String.valueOf(status.qStatus);
}
else if (status.qStatus == 0)
{
status.qStatus = 2;
hrAttendanceDB.UpdateAttendance(status.qStatus, userid);
return String.valueOf(status.qStatus);
}
}
}
if(bExists == false)
{
QueueStatus qstat = new QueueStatus();
qstat.user = userid;
qstat.qStatus = 2;
hrAttendanceDB.UpdateAttendance(qstat.qStatus, userid);
queueStatusList.add(qstat);
}
logger.info("successfully holding queue");
//return "True";
return String.valueOf(userid);
} catch (RuntimeException e)
{
logger.error("error holding queue: "+e.getMessage());
JSONObject jsonErrorObject = new JSONObject();
jsonErrorObject.put("errorcode", ErrorConstants.FILL_REQUIRED_FIELDS.getCode());
jsonErrorObject.put("message", ErrorConstants.FILL_REQUIRED_FIELDS.getMessage());
return jsonErrorObject.toString();
}catch (Exception e) {
logger.error("error holding queue: "+e.getMessage());
return null;
}
}
/**
* @param pID
* @return
* @throws JSONException
*/
@GET
@Path("/getNextAssignDoctor/{patientID}/{date}")
@Produces(MediaType.APPLICATION_JSON)
public String getNextAssignDoctor(@PathParam("patientID") int patientID, @PathParam("date") String date) throws JSONException{
logger.info("get next assign doctor");
try {
JSONSerializer serializer = new JSONSerializer();
String roleName="Doctor";
List<HrAttendance> attendenceList = new HrAttendanceDBDriver().getAllAttendance(date);
List<AdminUser> adminUserList = new UserDBDriver().getUserDetailsByUserRole(roleName);
List<AdminUser> userList = new ArrayList<AdminUser>();
for(AdminUser user : adminUserList)
{
for(HrAttendance attendant : attendenceList)
{
if(attendant.getHrEmployee().getEmpId() == user.getHrEmployee().getEmpId())
{
userList.add(user);
}
}
}
System.out.println("queueStatusList " + queueStatusList.toString());
for(QueueStatus qstat : queueStatusList)
{
for(AdminUser user : userList)
{
if(qstat.user == user.getUserId())
{
System.out.println("Removing " + user.getUserName()+ " " + qstat.qStatus );
userList.remove(user);
break;
}
}
}
System.out.println("userList " + userList.toString());
if(qType == 1 )
{
Visit visit = new VisitDBDriver().retrieveRecent(patientID ).get(0);
if(visit !=null)
{
// recent visit doctor
AdminUser user = visit.getVisitDoctor();
for(int i=0;i<userList.size();i++)
{
if(userList.get(i).getUserId() == user.getUserId())
{
return serializer.include("hrEmployee.firstName","hrEmployee.lastName","hrEmployee.empId","userId").exclude("*").serialize(user);
}
}
// recent visit doctor is not available in the userlist
user = userList.get(0);
for(int i=0;i < userList.size();i++)
{
if(userList.get(i).getUserId() == lastAssignedDcotor)
{
user = (i + 1) < userList.size() ? userList.get(i+1) : userList.get(0);
}
}
return serializer.include("hrEmployee.firstName","hrEmployee.lastName","hrEmployee.empId","userId").exclude("*").serialize(user);
}else
{
// recent visit doctor is not available in the userlist
AdminUser user = userList.get(0);
for(int i=0;i < userList.size();i++)
{
if(userList.get(i).getUserId() == lastAssignedDcotor)
{
user = (i + 1) < userList.size() ? userList.get(i+1) : userList.get(0);
}
}
return serializer.include("hrEmployee.firstName","hrEmployee.lastName","hrEmployee.empId","userId").exclude("*").serialize(user);
}
} else
{
if(lastAssignedDcotor == -1)
{
AdminUser user = userList.get(0);
return serializer.include("hrEmployee.firstName","hrEmployee.lastName","hrEmployee.empId","userId").exclude("*").serialize(user);
}else
{
AdminUser user = userList.get(0);
for(int i=0;i < userList.size();i++)
{
if(userList.get(i).getUserId() == lastAssignedDcotor)
{
user = (i + 1) < userList.size() ? userList.get(i+1) : userList.get(0);
}
}
return serializer.include("hrEmployee.firstName","hrEmployee.lastName","hrEmployee.empId","userId").exclude("*").serialize(user);
}
}
} catch (RuntimeException e)
{
logger.error("error getting next assign doctor: "+e.getMessage());
JSONObject jsonErrorObject = new JSONObject();
jsonErrorObject.put("errorcode", ErrorConstants.FILL_REQUIRED_FIELDS.getCode());
jsonErrorObject.put("message", ErrorConstants.FILL_REQUIRED_FIELDS.getMessage());
return jsonErrorObject.toString();
}catch (Exception e) {
logger.error("error getting next assign doctor: "+e.getMessage());
return null;
}
}
//********
public int getNextAssignDoctorID(int patientID, int visitType){
try {
String roleName="Doctor";
DateFormat df = new SimpleDateFormat("yyyy-MM-dd");
Date today = Calendar.getInstance().getTime();
List<HrAttendance> attendenceList = new HrAttendanceDBDriver().getAllAttendanceByType(df.format(today), visitType);
List<AdminUser> adminUserList = new UserDBDriver().getUserDetailsByUserRole(roleName);
List<AdminUser> userList = new ArrayList<AdminUser>();
for(AdminUser user : adminUserList)
{
for(HrAttendance attendant : attendenceList)
{
if(attendant.getHrEmployee().getEmpId() == user.getHrEmployee().getEmpId())
{
userList.add(user);
}
}
}
System.out.println("queueStatusList " + queueStatusList.toString());
for(QueueStatus qstat : queueStatusList)
{
for(AdminUser user : userList)
{
if(qstat.user == user.getUserId())
{
System.out.println("Removing " + user.getUserName()+ " " + qstat.qStatus );
userList.remove(user);
break;
}
}
}
System.out.println("userList " + userList.toString());
if(qType == 1 )
{
Visit visit = new VisitDBDriver().retrieveRecent(patientID ).get(0);
if(visit !=null)
{
// recent visit doctor
AdminUser user = visit.getVisitDoctor();
for(int i=0;i<userList.size();i++)
{
if(userList.get(i).getUserId() == user.getUserId())
{
return user.getUserId();
}
}
// recent visit doctor is not available in the userlist
user = userList.get(0);
for(int i=0;i < userList.size();i++)
{
if(userList.get(i).getUserId() == lastAssignedDcotor)
{
user = (i + 1) < userList.size() ? userList.get(i+1) : userList.get(0);
}
}
return user.getUserId();
}else
{
// recent visit doctor is not available in the userlist
AdminUser user = userList.get(0);
for(int i=0;i < userList.size();i++)
{
if(userList.get(i).getUserId() == lastAssignedDcotor)
{
user = (i + 1) < userList.size() ? userList.get(i+1) : userList.get(0);
}
}
return user.getUserId();
}
} else
{
if(lastAssignedDcotor == -1)
{
AdminUser user = userList.get(0);
return user.getUserId();
}else
{
AdminUser user = userList.get(0);
for(int i=0;i < userList.size();i++)
{
if(userList.get(i).getUserId() == lastAssignedDcotor)
{
user = (i + 1) < userList.size() ? userList.get(i+1) : userList.get(0);
}
}
return user.getUserId();
}
}
} catch (Exception e) {
logger.error("error getting next assign doctor: "+e.getMessage());
return -1;
}
}
}
| |
package com.sothree.slidinguppanel;
import android.content.Context;
import android.content.res.TypedArray;
import android.graphics.Canvas;
import android.graphics.Paint;
import android.graphics.PixelFormat;
import android.graphics.Rect;
import android.graphics.drawable.Drawable;
import android.os.Parcel;
import android.os.Parcelable;
import android.support.v4.view.MotionEventCompat;
import android.support.v4.view.ViewCompat;
import android.support.v4.widget.ViewDragHelper;
import android.util.AttributeSet;
import android.util.Log;
import android.view.MotionEvent;
import android.view.SoundEffectConstants;
import android.view.View;
import android.view.ViewConfiguration;
import android.view.ViewGroup;
import android.view.accessibility.AccessibilityEvent;
public class SlidingUpPanelLayout extends ViewGroup {
private static final String TAG = SlidingUpPanelLayout.class.getSimpleName();
/**
* Default peeking out panel height
*/
private static final int DEFAULT_PANEL_HEIGHT = 68; // dp;
/**
* Default height of the shadow above the peeking out panel
*/
private static final int DEFAULT_SHADOW_HEIGHT = 4; // dp;
/**
* If no fade color is given by default it will fade to 80% gray.
*/
private static final int DEFAULT_FADE_COLOR = 0x99000000;
/**
* Minimum velocity that will be detected as a fling
*/
private static final int MIN_FLING_VELOCITY = 400; // dips per second
/**
* The fade color used for the panel covered by the slider. 0 = no fading.
*/
private int mCoveredFadeColor = DEFAULT_FADE_COLOR;
/**
* The paint used to dim the main layout when sliding
*/
private final Paint mCoveredFadePaint = new Paint();
/**
* Drawable used to draw the shadow between panes.
*/
private Drawable mShadowDrawable;
/**
* The size of the overhang in pixels.
*/
private int mPanelHeight;
/**
* The size of the shadow in pixels.
*/
private final int mShadowHeight;
/**
* True if a panel can slide with the current measurements
*/
private boolean mCanSlide;
/**
* If provided, the panel can be dragged by only this view. Otherwise, the entire panel can be
* used for dragging.
*/
private View mDragView;
/**
* The child view that can slide, if any.
*/
private View mSlideableView;
/**
* How far the panel is offset from its expanded position.
* range [0, 1] where 0 = expanded, 1 = collapsed.
*/
private float mSlideOffset;
/**
* How far in pixels the slideable panel may move.
*/
private int mSlideRange;
/**
* A panel view is locked into internal scrolling or another condition that
* is preventing a drag.
*/
private boolean mIsUnableToDrag;
/**
* Flag indicating that sliding feature is enabled\disabled
*/
private boolean mIsSlidingEnabled;
/**
* Flag indicating if a drag view can have its own touch events. If set
* to true, a drag view can scroll horizontally and have its own click listener.
*
* Default is set to false.
*/
private boolean mIsUsingDragViewTouchEvents;
/**
* Threshold to tell if there was a scroll touch event.
*/
private int mScrollTouchSlop;
private float mInitialMotionX;
private float mInitialMotionY;
private boolean mDragViewHit;
private float mAnchorPoint = 0.f;
private PanelSlideListener mPanelSlideListener;
private final ViewDragHelper mDragHelper;
/**
* Stores whether or not the pane was expanded the last time it was slideable.
* If expand/collapse operations are invoked this state is modified. Used by
* instance state save/restore.
*/
private boolean mPreservedExpandedState;
private boolean mFirstLayout = true;
private final Rect mTmpRect = new Rect();
/**
* Listener for monitoring events about sliding panes.
*/
public interface PanelSlideListener {
/**
* Called when a sliding pane's position changes.
* @param panel The child view that was moved
* @param slideOffset The new offset of this sliding pane within its range, from 0-1
*/
public void onPanelSlide(View panel, float slideOffset);
/**
* Called when a sliding pane becomes slid completely collapsed. The pane may or may not
* be interactive at this point depending on if it's shown or hidden
* @param panel The child view that was slid to an collapsed position, revealing other panes
*/
public void onPanelCollapsed(View panel);
/**
* Called when a sliding pane becomes slid completely expanded. The pane is now guaranteed
* to be interactive. It may now obscure other views in the layout.
* @param panel The child view that was slid to a expanded position
*/
public void onPanelExpanded(View panel);
public void onPanelAnchored(View panel);
}
/**
* No-op stubs for {@link PanelSlideListener}. If you only want to implement a subset
* of the listener methods you can extend this instead of implement the full interface.
*/
public static class SimplePanelSlideListener implements PanelSlideListener {
@Override
public void onPanelSlide(View panel, float slideOffset) {
}
@Override
public void onPanelCollapsed(View panel) {
}
@Override
public void onPanelExpanded(View panel) {
}
@Override
public void onPanelAnchored(View panel) {
}
}
public SlidingUpPanelLayout(Context context) {
this(context, null);
}
public SlidingUpPanelLayout(Context context, AttributeSet attrs) {
this(context, attrs, 0);
}
public SlidingUpPanelLayout(Context context, AttributeSet attrs, int defStyle) {
super(context, attrs, defStyle);
final float density = context.getResources().getDisplayMetrics().density;
mPanelHeight = (int) (DEFAULT_PANEL_HEIGHT * density + 0.5f);
mShadowHeight = (int) (DEFAULT_SHADOW_HEIGHT * density + 0.5f);
setWillNotDraw(false);
mDragHelper = ViewDragHelper.create(this, 0.5f, new DragHelperCallback());
mDragHelper.setMinVelocity(MIN_FLING_VELOCITY * density);
mCanSlide = true;
mIsSlidingEnabled = true;
setCoveredFadeColor(DEFAULT_FADE_COLOR);
ViewConfiguration vc = ViewConfiguration.get(context);
mScrollTouchSlop = vc.getScaledTouchSlop();
}
/**
* Set the color used to fade the pane covered by the sliding pane out when the pane
* will become fully covered in the expanded state.
*
* @param color An ARGB-packed color value
*/
public void setCoveredFadeColor(int color) {
mCoveredFadeColor = color;
invalidate();
}
/**
* @return The ARGB-packed color value used to fade the fixed pane
*/
public int getCoveredFadeColor() {
return mCoveredFadeColor;
}
/**
* Set the collapsed panel height in pixels
*
* @param val A height in pixels
*/
public void setPanelHeight(int val) {
mPanelHeight = val;
requestLayout();
}
/**
* @return The current collapsed panel height
*/
public int getPanelHeight() {
return mPanelHeight;
}
public void setPanelSlideListener(PanelSlideListener listener) {
mPanelSlideListener = listener;
}
/**
* Set the draggable view portion. Use to null, to allow the whole panel to be draggable
*
* @param dragView A view that will be used to drag the panel.
*/
public void setDragView(View dragView) {
mDragView = dragView;
}
/**
* Set an anchor point where the panel can stop during sliding
*
* @param anchorPoint A value between 0 and 1, determining the position of the anchor point
* starting from the top of the layout.
*/
public void setAnchorPoint(float anchorPoint) {
if (anchorPoint > 0 && anchorPoint < 1)
mAnchorPoint = anchorPoint;
}
/**
* Set the shadow for the sliding panel
*
*/
public void setShadowDrawable(Drawable drawable) {
mShadowDrawable = drawable;
}
void dispatchOnPanelSlide(View panel) {
if (mPanelSlideListener != null) {
mPanelSlideListener.onPanelSlide(panel, mSlideOffset);
}
}
void dispatchOnPanelExpanded(View panel) {
if (mPanelSlideListener != null) {
mPanelSlideListener.onPanelExpanded(panel);
}
sendAccessibilityEvent(AccessibilityEvent.TYPE_WINDOW_STATE_CHANGED);
}
void dispatchOnPanelCollapsed(View panel) {
if (mPanelSlideListener != null) {
mPanelSlideListener.onPanelCollapsed(panel);
}
sendAccessibilityEvent(AccessibilityEvent.TYPE_WINDOW_STATE_CHANGED);
}
void dispatchOnPanelAnchored(View panel) {
if (mPanelSlideListener != null) {
mPanelSlideListener.onPanelAnchored(panel);
}
sendAccessibilityEvent(AccessibilityEvent.TYPE_WINDOW_STATE_CHANGED);
}
void updateObscuredViewVisibility() {
if (getChildCount() == 0) {
return;
}
final int leftBound = getPaddingLeft();
final int rightBound = getWidth() - getPaddingRight();
final int topBound = getPaddingTop();
final int bottomBound = getHeight() - getPaddingBottom();
final int left;
final int right;
final int top;
final int bottom;
if (mSlideableView != null && hasOpaqueBackground(mSlideableView)) {
left = mSlideableView.getLeft();
right = mSlideableView.getRight();
top = mSlideableView.getTop();
bottom = mSlideableView.getBottom();
} else {
left = right = top = bottom = 0;
}
View child = getChildAt(0);
final int clampedChildLeft = Math.max(leftBound, child.getLeft());
final int clampedChildTop = Math.max(topBound, child.getTop());
final int clampedChildRight = Math.min(rightBound, child.getRight());
final int clampedChildBottom = Math.min(bottomBound, child.getBottom());
final int vis;
if (clampedChildLeft >= left && clampedChildTop >= top &&
clampedChildRight <= right && clampedChildBottom <= bottom) {
vis = INVISIBLE;
} else {
vis = VISIBLE;
}
child.setVisibility(vis);
}
void setAllChildrenVisible() {
for (int i = 0, childCount = getChildCount(); i < childCount; i++) {
final View child = getChildAt(i);
if (child.getVisibility() == INVISIBLE) {
child.setVisibility(VISIBLE);
}
}
}
private static boolean hasOpaqueBackground(View v) {
final Drawable bg = v.getBackground();
if (bg != null) {
return bg.getOpacity() == PixelFormat.OPAQUE;
}
return false;
}
@Override
protected void onAttachedToWindow() {
super.onAttachedToWindow();
mFirstLayout = true;
}
@Override
protected void onDetachedFromWindow() {
super.onDetachedFromWindow();
mFirstLayout = true;
}
@Override
protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
final int widthMode = MeasureSpec.getMode(widthMeasureSpec);
final int widthSize = MeasureSpec.getSize(widthMeasureSpec);
final int heightMode = MeasureSpec.getMode(heightMeasureSpec);
final int heightSize = MeasureSpec.getSize(heightMeasureSpec);
if (widthMode != MeasureSpec.EXACTLY) {
throw new IllegalStateException("Width must have an exact value or MATCH_PARENT");
} else if (heightMode != MeasureSpec.EXACTLY) {
throw new IllegalStateException("Height must have an exact value or MATCH_PARENT");
}
int layoutHeight = heightSize - getPaddingTop() - getPaddingBottom();
int panelHeight = mPanelHeight;
final int childCount = getChildCount();
if (childCount > 2) {
Log.e(TAG, "onMeasure: More than two child views are not supported.");
} else if (getChildAt(1).getVisibility() == GONE) {
panelHeight = 0;
}
// We'll find the current one below.
mSlideableView = null;
mCanSlide = false;
// First pass. Measure based on child LayoutParams width/height.
for (int i = 0; i < childCount; i++) {
final View child = getChildAt(i);
final LayoutParams lp = (LayoutParams) child.getLayoutParams();
int height = layoutHeight;
if (child.getVisibility() == GONE) {
lp.dimWhenOffset = false;
continue;
}
if (i == 1) {
lp.slideable = true;
lp.dimWhenOffset = true;
mSlideableView = child;
mCanSlide = true;
} else {
height -= panelHeight;
}
int childWidthSpec;
if (lp.width == LayoutParams.WRAP_CONTENT) {
childWidthSpec = MeasureSpec.makeMeasureSpec(widthSize, MeasureSpec.AT_MOST);
} else if (lp.width == LayoutParams.MATCH_PARENT) {
childWidthSpec = MeasureSpec.makeMeasureSpec(widthSize, MeasureSpec.EXACTLY);
} else {
childWidthSpec = MeasureSpec.makeMeasureSpec(lp.width, MeasureSpec.EXACTLY);
}
int childHeightSpec;
if (lp.height == LayoutParams.WRAP_CONTENT) {
childHeightSpec = MeasureSpec.makeMeasureSpec(height, MeasureSpec.AT_MOST);
} else if (lp.height == LayoutParams.MATCH_PARENT) {
childHeightSpec = MeasureSpec.makeMeasureSpec(height, MeasureSpec.EXACTLY);
} else {
childHeightSpec = MeasureSpec.makeMeasureSpec(lp.height, MeasureSpec.EXACTLY);
}
child.measure(childWidthSpec, childHeightSpec);
}
setMeasuredDimension(widthSize, heightSize);
}
@Override
protected void onLayout(boolean changed, int l, int t, int r, int b) {
final int paddingLeft = getPaddingLeft();
final int paddingTop = getPaddingTop();
final int childCount = getChildCount();
int yStart = paddingTop;
int nextYStart = yStart;
if (mFirstLayout) {
mSlideOffset = mCanSlide && mPreservedExpandedState ? 0.f : 1.f;
}
for (int i = 0; i < childCount; i++) {
final View child = getChildAt(i);
if (child.getVisibility() == GONE) {
continue;
}
final LayoutParams lp = (LayoutParams) child.getLayoutParams();
int childHeight = child.getMeasuredHeight();
if (lp.slideable) {
mSlideRange = childHeight - mPanelHeight;
yStart += (int) (mSlideRange * mSlideOffset);
} else {
yStart = nextYStart;
}
final int childTop = yStart;
final int childBottom = childTop + childHeight;
final int childLeft = paddingLeft;
final int childRight = childLeft + child.getMeasuredWidth();
child.layout(childLeft, childTop, childRight, childBottom);
nextYStart += child.getHeight();
}
if (mFirstLayout) {
updateObscuredViewVisibility();
}
mFirstLayout = false;
}
@Override
protected void onSizeChanged(int w, int h, int oldw, int oldh) {
super.onSizeChanged(w, h, oldw, oldh);
// Recalculate sliding panes and their details
if (h != oldh) {
mFirstLayout = true;
}
}
/**
* Set sliding enabled flag
* @param enabled flag value
*/
public void setSlidingEnabled(boolean enabled) {
mIsSlidingEnabled = enabled;
}
/**
* Set if the drag view can have its own touch events. If set
* to true, a drag view can scroll horizontally and have its own click listener.
*
* Default is set to false.
*/
public void setEnableDragViewTouchEvents(boolean enabled) {
mIsUsingDragViewTouchEvents = enabled;
}
private boolean isDragViewHit(int x, int y) {
View v = mDragView != null ? mDragView : mSlideableView;
if (v == null) return false;
int[] viewLocation = new int[2];
v.getLocationOnScreen(viewLocation);
int[] parentLocation = new int[2];
this.getLocationOnScreen(parentLocation);
int screenX = parentLocation[0] + x;
int screenY = parentLocation[1] + y;
return screenX >= viewLocation[0] && screenX < viewLocation[0] + v.getWidth() &&
screenY >= viewLocation[1] && screenY < viewLocation[1] + v.getHeight();
}
@Override
public void requestChildFocus(View child, View focused) {
super.requestChildFocus(child, focused);
if (!isInTouchMode() && !mCanSlide) {
mPreservedExpandedState = child == mSlideableView;
}
}
@Override
public boolean onInterceptTouchEvent(MotionEvent ev) {
final int action = MotionEventCompat.getActionMasked(ev);
if (!mCanSlide || !mIsSlidingEnabled || (mIsUnableToDrag && action != MotionEvent.ACTION_DOWN)) {
mDragHelper.cancel();
return super.onInterceptTouchEvent(ev);
}
if (action == MotionEvent.ACTION_CANCEL || action == MotionEvent.ACTION_UP) {
mDragHelper.cancel();
return false;
}
final float x = ev.getX();
final float y = ev.getY();
boolean interceptTap = false;
switch (action) {
case MotionEvent.ACTION_DOWN: {
mIsUnableToDrag = false;
mInitialMotionX = x;
mInitialMotionY = y;
mDragViewHit = isDragViewHit((int) x, (int) y);
if (mDragViewHit && !mIsUsingDragViewTouchEvents) {
interceptTap = true;
}
break;
}
case MotionEvent.ACTION_MOVE: {
final float adx = Math.abs(x - mInitialMotionX);
final float ady = Math.abs(y - mInitialMotionY);
final int dragSlop = mDragHelper.getTouchSlop();
// Handle any horizontal scrolling on the drag view.
if (mIsUsingDragViewTouchEvents) {
if (adx > mScrollTouchSlop && ady < mScrollTouchSlop) {
return super.onInterceptTouchEvent(ev);
}
// Intercept the touch if the drag view has any vertical scroll.
// onTouchEvent will determine if the view should drag vertically.
else if (ady > mScrollTouchSlop) {
interceptTap = mDragViewHit;
}
}
if (ady > dragSlop && adx > ady) {
mDragHelper.cancel();
mIsUnableToDrag = true;
return false;
}
break;
}
}
final boolean interceptForDrag = mDragViewHit && mDragHelper.shouldInterceptTouchEvent(ev);
return interceptForDrag || interceptTap;
}
@Override
public boolean onTouchEvent(MotionEvent ev) {
if (!mCanSlide || !mIsSlidingEnabled) {
return super.onTouchEvent(ev);
}
mDragHelper.processTouchEvent(ev);
final int action = ev.getAction();
boolean wantTouchEvents = true;
switch (action & MotionEventCompat.ACTION_MASK) {
case MotionEvent.ACTION_DOWN: {
final float x = ev.getX();
final float y = ev.getY();
mInitialMotionX = x;
mInitialMotionY = y;
break;
}
case MotionEvent.ACTION_UP: {
final float x = ev.getX();
final float y = ev.getY();
final float dx = x - mInitialMotionX;
final float dy = y - mInitialMotionY;
final int slop = mDragHelper.getTouchSlop();
if (dx * dx + dy * dy < slop * slop &&
isDragViewHit((int) x, (int) y)) {
View v = mDragView != null ? mDragView : mSlideableView;
v.playSoundEffect(SoundEffectConstants.CLICK);
if (!isExpanded() && !isAnchored()) {
expandPane(mSlideableView, 0, mAnchorPoint);
} else {
collapsePane();
}
break;
}
break;
}
}
return wantTouchEvents;
}
private boolean expandPane(View pane, int initialVelocity, float mSlideOffset) {
if (mFirstLayout || smoothSlideTo(mSlideOffset, initialVelocity)) {
mPreservedExpandedState = true;
return true;
}
return false;
}
private boolean collapsePane(View pane, int initialVelocity) {
if (mFirstLayout || smoothSlideTo(1.f, initialVelocity)) {
mPreservedExpandedState = false;
return true;
}
return false;
}
/**
* Collapse the sliding pane if it is currently slideable. If first layout
* has already completed this will animate.
*
* @return true if the pane was slideable and is now collapsed/in the process of collapsing
*/
public boolean collapsePane() {
return collapsePane(mSlideableView, 0);
}
/**
* Expand the sliding pane if it is currently slideable. If first layout
* has already completed this will animate.
*
* @return true if the pane was slideable and is now expanded/in the process of expading
*/
public boolean expandPane() {
return expandPane(0);
}
/**
* Partially expand the sliding pane up to a specific offset
*
* @param mSlideOffset Value between 0 and 1, where 0 is completely expanded.
* @return true if the pane was slideable and is now expanded/in the process of expading
*/
public boolean expandPane(float mSlideOffset) {
if (!isPaneVisible()) {
showPane();
}
return expandPane(mSlideableView, 0, mSlideOffset);
}
/**
* Check if the layout is completely expanded.
*
* @return true if sliding panels are completely expanded
*/
public boolean isExpanded() {
return mFirstLayout && mPreservedExpandedState
|| !mFirstLayout && mCanSlide && mSlideOffset == 0;
}
/**
* Check if the layout is anchored in an intermediate point.
*
* @return true if sliding panels are anchored
*/
public boolean isAnchored() {
int anchoredTop = (int)(mAnchorPoint*mSlideRange);
return !mFirstLayout && mCanSlide
&& mSlideOffset == (float)anchoredTop/(float)mSlideRange;
}
/**
* Check if the content in this layout cannot fully fit side by side and therefore
* the content pane can be slid back and forth.
*
* @return true if content in this layout can be expanded
*/
public boolean isSlideable() {
return mCanSlide;
}
public boolean isPaneVisible() {
if (getChildCount() < 2) {
return false;
}
View slidingPane = getChildAt(1);
return slidingPane.getVisibility() == View.VISIBLE;
}
public void showPane() {
if (getChildCount() < 2) {
return;
}
View slidingPane = getChildAt(1);
slidingPane.setVisibility(View.VISIBLE);
requestLayout();
}
public void hidePane() {
if (mSlideableView == null) {
return;
}
mSlideableView.setVisibility(View.GONE);
requestLayout();
}
private void onPanelDragged(int newTop) {
final int topBound = getPaddingTop();
mSlideOffset = (float) (newTop - topBound) / mSlideRange;
dispatchOnPanelSlide(mSlideableView);
}
@Override
protected boolean drawChild(Canvas canvas, View child, long drawingTime) {
final LayoutParams lp = (LayoutParams) child.getLayoutParams();
boolean result;
final int save = canvas.save(Canvas.CLIP_SAVE_FLAG);
boolean drawScrim = false;
if (mCanSlide && !lp.slideable && mSlideableView != null) {
// Clip against the slider; no sense drawing what will immediately be covered.
canvas.getClipBounds(mTmpRect);
mTmpRect.bottom = Math.min(mTmpRect.bottom, mSlideableView.getTop());
canvas.clipRect(mTmpRect);
if (mSlideOffset < 1) {
drawScrim = true;
}
}
result = super.drawChild(canvas, child, drawingTime);
canvas.restoreToCount(save);
if (drawScrim) {
final int baseAlpha = (mCoveredFadeColor & 0xff000000) >>> 24;
final int imag = (int) (baseAlpha * (1 - mSlideOffset));
final int color = imag << 24 | (mCoveredFadeColor & 0xffffff);
mCoveredFadePaint.setColor(color);
canvas.drawRect(mTmpRect, mCoveredFadePaint);
}
return result;
}
/**
* Smoothly animate mDraggingPane to the target X position within its range.
*
* @param slideOffset position to animate to
* @param velocity initial velocity in case of fling, or 0.
*/
boolean smoothSlideTo(float slideOffset, int velocity) {
if (!mCanSlide) {
// Nothing to do.
return false;
}
final int topBound = getPaddingTop();
int y = (int) (topBound + slideOffset * mSlideRange);
if (mDragHelper.smoothSlideViewTo(mSlideableView, mSlideableView.getLeft(), y)) {
setAllChildrenVisible();
ViewCompat.postInvalidateOnAnimation(this);
return true;
}
return false;
}
@Override
public void computeScroll() {
if (mDragHelper.continueSettling(true)) {
if (!mCanSlide) {
mDragHelper.abort();
return;
}
ViewCompat.postInvalidateOnAnimation(this);
}
}
@Override
public void draw(Canvas c) {
super.draw(c);
if (mSlideableView == null) {
// No need to draw a shadow if we don't have one.
return;
}
final int right = mSlideableView.getRight();
final int top = mSlideableView.getTop() - mShadowHeight;
final int bottom = mSlideableView.getTop();
final int left = mSlideableView.getLeft();
if (mShadowDrawable != null) {
mShadowDrawable.setBounds(left, top, right, bottom);
mShadowDrawable.draw(c);
}
}
/**
* Tests scrollability within child views of v given a delta of dx.
*
* @param v View to test for horizontal scrollability
* @param checkV Whether the view v passed should itself be checked for scrollability (true),
* or just its children (false).
* @param dx Delta scrolled in pixels
* @param x X coordinate of the active touch point
* @param y Y coordinate of the active touch point
* @return true if child views of v can be scrolled by delta of dx.
*/
protected boolean canScroll(View v, boolean checkV, int dx, int x, int y) {
if (v instanceof ViewGroup) {
final ViewGroup group = (ViewGroup) v;
final int scrollX = v.getScrollX();
final int scrollY = v.getScrollY();
final int count = group.getChildCount();
// Count backwards - let topmost views consume scroll distance first.
for (int i = count - 1; i >= 0; i--) {
final View child = group.getChildAt(i);
if (x + scrollX >= child.getLeft() && x + scrollX < child.getRight() &&
y + scrollY >= child.getTop() && y + scrollY < child.getBottom() &&
canScroll(child, true, dx, x + scrollX - child.getLeft(),
y + scrollY - child.getTop())) {
return true;
}
}
}
return checkV && ViewCompat.canScrollHorizontally(v, -dx);
}
@Override
protected ViewGroup.LayoutParams generateDefaultLayoutParams() {
return new LayoutParams();
}
@Override
protected ViewGroup.LayoutParams generateLayoutParams(ViewGroup.LayoutParams p) {
return p instanceof MarginLayoutParams
? new LayoutParams((MarginLayoutParams) p)
: new LayoutParams(p);
}
@Override
protected boolean checkLayoutParams(ViewGroup.LayoutParams p) {
return p instanceof LayoutParams && super.checkLayoutParams(p);
}
@Override
public ViewGroup.LayoutParams generateLayoutParams(AttributeSet attrs) {
return new LayoutParams(getContext(), attrs);
}
@Override
protected Parcelable onSaveInstanceState() {
Parcelable superState = super.onSaveInstanceState();
SavedState ss = new SavedState(superState);
ss.isExpanded = isSlideable() ? isExpanded() : mPreservedExpandedState;
return ss;
}
@Override
protected void onRestoreInstanceState(Parcelable state) {
SavedState ss = (SavedState) state;
super.onRestoreInstanceState(ss.getSuperState());
if (ss.isExpanded) {
expandPane();
} else {
collapsePane();
}
mPreservedExpandedState = ss.isExpanded;
}
private class DragHelperCallback extends ViewDragHelper.Callback {
@Override
public boolean tryCaptureView(View child, int pointerId) {
if (mIsUnableToDrag) {
return false;
}
return ((LayoutParams) child.getLayoutParams()).slideable;
}
@Override
public void onViewDragStateChanged(int state) {
if (mDragHelper.getViewDragState() == ViewDragHelper.STATE_IDLE) {
if (mSlideOffset == 0) {
updateObscuredViewVisibility();
dispatchOnPanelExpanded(mSlideableView);
mPreservedExpandedState = true;
} else if (isAnchored()) {
updateObscuredViewVisibility();
dispatchOnPanelAnchored(mSlideableView);
mPreservedExpandedState = true;
} else {
dispatchOnPanelCollapsed(mSlideableView);
mPreservedExpandedState = false;
}
}
}
@Override
public void onViewCaptured(View capturedChild, int activePointerId) {
// Make all child views visible in preparation for sliding things around
setAllChildrenVisible();
}
@Override
public void onViewPositionChanged(View changedView, int left, int top, int dx, int dy) {
onPanelDragged(top);
invalidate();
}
@Override
public void onViewReleased(View releasedChild, float xvel, float yvel) {
int top = getPaddingTop();
if (mAnchorPoint != 0) {
int anchoredTop = (int)(mAnchorPoint*mSlideRange);
float anchorOffset = (float)anchoredTop/(float)mSlideRange;
if (yvel > 0 || (yvel == 0 && mSlideOffset >= (1f+anchorOffset)/2)) {
top += mSlideRange;
} else if (yvel == 0 && mSlideOffset < (1f+anchorOffset)/2
&& mSlideOffset >= anchorOffset/2) {
top += mSlideRange * mAnchorPoint;
}
} else if (yvel > 0 || (yvel == 0 && mSlideOffset > 0.5f)) {
top += mSlideRange;
}
mDragHelper.settleCapturedViewAt(releasedChild.getLeft(), top);
invalidate();
}
@Override
public int getViewVerticalDragRange(View child) {
return mSlideRange;
}
@Override
public int clampViewPositionVertical(View child, int top, int dy) {
final int topBound = getPaddingTop();
final int bottomBound = topBound + mSlideRange;
final int newLeft = Math.min(Math.max(top, topBound), bottomBound);
return newLeft;
}
}
public static class LayoutParams extends ViewGroup.MarginLayoutParams {
private static final int[] ATTRS = new int[] {
android.R.attr.layout_weight
};
/**
* True if this pane is the slideable pane in the layout.
*/
boolean slideable;
/**
* True if this view should be drawn dimmed
* when it's been offset from its default position.
*/
boolean dimWhenOffset;
Paint dimPaint;
public LayoutParams() {
super(MATCH_PARENT, MATCH_PARENT);
}
public LayoutParams(int width, int height) {
super(width, height);
}
public LayoutParams(android.view.ViewGroup.LayoutParams source) {
super(source);
}
public LayoutParams(MarginLayoutParams source) {
super(source);
}
public LayoutParams(LayoutParams source) {
super(source);
}
public LayoutParams(Context c, AttributeSet attrs) {
super(c, attrs);
final TypedArray a = c.obtainStyledAttributes(attrs, ATTRS);
a.recycle();
}
}
static class SavedState extends BaseSavedState {
boolean isExpanded;
SavedState(Parcelable superState) {
super(superState);
}
private SavedState(Parcel in) {
super(in);
isExpanded = in.readInt() != 0;
}
@Override
public void writeToParcel(Parcel out, int flags) {
super.writeToParcel(out, flags);
out.writeInt(isExpanded ? 1 : 0);
}
public static final Parcelable.Creator<SavedState> CREATOR =
new Parcelable.Creator<SavedState>() {
@Override
public SavedState createFromParcel(Parcel in) {
return new SavedState(in);
}
@Override
public SavedState[] newArray(int size) {
return new SavedState[size];
}
};
}
}
| |
package org.apache.mesos.elasticsearch.scheduler;
import com.beust.jcommander.JCommander;
import com.beust.jcommander.Parameter;
import org.apache.log4j.Logger;
import org.apache.mesos.Protos;
import org.apache.mesos.elasticsearch.common.Discovery;
import org.apache.mesos.elasticsearch.common.cli.ElasticsearchCLIParameter;
import org.apache.mesos.elasticsearch.common.cli.ZookeeperCLIParameter;
import org.apache.mesos.elasticsearch.common.cli.validators.CLIValidators;
import org.apache.mesos.elasticsearch.common.util.NetworkUtils;
import org.apache.mesos.elasticsearch.common.zookeeper.formatter.IpPortsListZKFormatter;
import org.apache.mesos.elasticsearch.common.zookeeper.formatter.MesosZKFormatter;
import org.apache.mesos.elasticsearch.common.zookeeper.formatter.ZKFormatter;
import org.apache.mesos.elasticsearch.common.zookeeper.parser.ZKAddressParser;
import org.apache.mesos.elasticsearch.scheduler.state.ClusterState;
import java.net.InetSocketAddress;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.HashMap;
import java.util.stream.Collectors;
import static java.util.Arrays.asList;
/**
* Holder object for framework configuration.
*/
@SuppressWarnings("PMD.TooManyFields")
public class Configuration {
// **** ELASTICSEARCH
public static final String ELASTICSEARCH_CPU = "--elasticsearchCpu";
public static final String ELASTICSEARCH_RAM = "--elasticsearchRam";
public static final String ELASTICSEARCH_DISK = "--elasticsearchDisk";
// **** WEB UI
public static final String WEB_UI_PORT = "--webUiPort";
public static final String FRAMEWORK_NAME = "--frameworkName";
public static final String EXECUTOR_NAME = "--executorName";
public static final String EXECUTOR_LABELS = "--executorLabels";
public static final String DATA_DIR = "--dataDir";
public static final String DEFAULT_HOST_DATA_DIR = "/var/lib/mesos/slave/elasticsearch";
// DCOS Certification requirement 01
public static final String FRAMEWORK_FAILOVER_TIMEOUT = "--frameworkFailoverTimeout";
// DCOS Certification requirement 13
public static final String FRAMEWORK_ROLE = "--frameworkRole";
public static final String EXECUTOR_IMAGE = "--elasticsearchDockerImage";
public static final String EXECUTOR_BINARY = "--elasticsearchBinaryUrl";
public static final String DEFAULT_EXECUTOR_IMAGE = "elasticsearch:latest";
public static final String EXECUTOR_FORCE_PULL_IMAGE = "--executorForcePullImage";
public static final String FRAMEWORK_PRINCIPAL = "--frameworkPrincipal";
public static final String FRAMEWORK_SECRET_PATH = "--frameworkSecretPath";
public static final String ES_TAR = "public/elasticsearch.tar.gz";
public static final String ES_BINARY = "./elasticsearch-*/bin/elasticsearch";
private static final Logger LOGGER = Logger.getLogger(Configuration.class);
public static final String FRAMEWORK_USE_DOCKER = "--frameworkUseDocker";
public static final String JAVA_HOME = "--javaHome";
public static final String USE_IP_ADDRESS = "--useIpAddress";
public static final String ELASTICSEARCH_PORTS = "--elasticsearchPorts";
public static final String CONTAINER_PATH_DATA = "/usr/share/elasticsearch/data";
public static final String CONTAINER_PATH_CONF = "/usr/share/elasticsearch/config";
public static final String CONTAINER_PATH_CONF_YML = CONTAINER_PATH_CONF + "/elasticsearch.yml";
public static final String HOST_SANDBOX = "./."; // Due to some protobuf weirdness. Requires './.' Not just '.'
public static final String HOST_PATH_HOME = HOST_SANDBOX + "/es_home";
public static final String HOST_PATH_CONF = HOST_SANDBOX;
// **** External Volumes
public static final String EXTERNAL_VOLUME_DRIVER = "--externalVolumeDriver";
public static final String EXTERNAL_VOLUME_OPTIONS = "--externalVolumeOptions";
// **** ZOOKEEPER
private final ZookeeperCLIParameter zookeeperCLI = new ZookeeperCLIParameter();
private final ElasticsearchCLIParameter elasticsearchCLI = new ElasticsearchCLIParameter();
@Parameter(names = {ELASTICSEARCH_CPU}, description = "The amount of CPU resource to allocate to the elasticsearch instance.", validateValueWith = CLIValidators.PositiveDouble.class)
private double cpus = 1.0;
@Parameter(names = {ELASTICSEARCH_RAM}, description = "The amount of ram resource to allocate to the elasticsearch instance (MB).", validateValueWith = CLIValidators.PositiveDouble.class)
private double mem = 256;
@Parameter(names = {ELASTICSEARCH_DISK}, description = "The amount of Disk resource to allocate to the elasticsearch instance (MB).", validateValueWith = CLIValidators.PositiveDouble.class)
private double disk = 1024;
@Parameter(names = {WEB_UI_PORT}, description = "TCP port for web ui interface.", validateValueWith = CLIValidators.PositiveInteger.class)
private int webUiPort = 31100; // Default is more likely to work on a default Mesos installation
@Parameter(names = {ELASTICSEARCH_PORTS}, description = "Override Mesos provided ES HTTP and transport ports. Format `HTTP_PORT,TRANSPORT_PORT` (comma delimited, both required).", validateWith = CLIValidators.NumericListOfSizeTwo.class)
private String elasticsearchPorts = ""; // Defaults to Mesos specified ports.
// **** FRAMEWORK
private String version = "1.0.1";
@Parameter(names = {FRAMEWORK_NAME}, description = "The name given to the framework.", validateWith = CLIValidators.NotEmptyString.class)
private String frameworkName = "elasticsearch";
@Parameter(names = {EXECUTOR_NAME}, description = "The name given to the executor task.", validateWith = CLIValidators.NotEmptyString.class)
private String executorName = "elasticsearch-executor";
@Parameter(names = {EXECUTOR_LABELS}, description = "One or more labels given to the executor task." +
"E.g. 'environment=prod bananas=apples'", variableArity = true)
private List<String> executorLabels = new ArrayList<>();
@Parameter(names = {DATA_DIR}, description = "The host data directory used by Docker volumes in the executors. [DOCKER MODE ONLY]")
private String dataDir = DEFAULT_HOST_DATA_DIR;
@Parameter(names = {FRAMEWORK_FAILOVER_TIMEOUT}, description = "The time before Mesos kills a scheduler and tasks if it has not recovered (ms).", validateValueWith = CLIValidators.PositiveDouble.class)
private double frameworkFailoverTimeout = 2592000; // Mesos will kill framework after 1 month if marathon does not restart.
@Parameter(names = {FRAMEWORK_ROLE}, description = "Used to group frameworks for allocation decisions, depending on the allocation policy being used.", validateWith = CLIValidators.NotEmptyString.class)
private String frameworkRole = "*"; // This is the default if none is passed to Mesos
@Parameter(names = {EXECUTOR_IMAGE}, description = "The elasticsearch docker image to use. E.g. 'elasticsearch:latest' [DOCKER MODE ONLY]", validateWith = CLIValidators.NotEmptyString.class)
private String executorImage = DEFAULT_EXECUTOR_IMAGE;
@Parameter(names = {EXECUTOR_BINARY}, description = "The elasticsearch binary to use (Must be tar.gz format). " +
"E.g. 'https://download.elasticsearch.org/elasticsearch/release/org/elasticsearch/distribution/tar/elasticsearch/2.2.0/elasticsearch-2.2.0.tar.gz' [JAR MODE ONLY]", validateWith = CLIValidators.NotEmptyString.class)
private String executorBinary = "";
@Parameter(names = {EXECUTOR_FORCE_PULL_IMAGE}, arity = 1, description = "Option to force pull the executor image. [DOCKER MODE ONLY]")
private Boolean executorForcePullImage = false;
@Parameter(names = {FRAMEWORK_PRINCIPAL}, description = "The principal to use when registering the framework (username).")
private String frameworkPrincipal = "";
@Parameter(names = {FRAMEWORK_SECRET_PATH}, description = "The path to the file which contains the secret for the principal (password). Password in file must not have a newline.")
private String frameworkSecretPath = "";
@Parameter(names = {FRAMEWORK_USE_DOCKER}, arity = 1, description = "The framework will use docker if true, or jar files if false. If false, the user must ensure that the scheduler jar is available to all slaves.")
private Boolean isFrameworkUseDocker = true;
private InetSocketAddress frameworkFileServerAddress;
@Parameter(names = {JAVA_HOME}, description = "When starting in jar mode, if java is not on the path, you can specify the path here. [JAR MODE ONLY]", validateWith = CLIValidators.NotEmptyString.class)
private String javaHome = "";
@Parameter(names = {USE_IP_ADDRESS}, arity = 1, description = "If true, the framework will resolve the local ip address. If false, it uses the hostname.")
private Boolean isUseIpAddress = false;
// **** External Volumes
@Parameter(names = {EXTERNAL_VOLUME_DRIVER}, description = "Use external volume storage driver. By default, nodes will use volumes on host.")
private String externalVolumeDriver = "";
@Parameter(names = {EXTERNAL_VOLUME_OPTIONS}, description = "External volume driver options.")
private String externalVolumeOption = "";
// ****************** Runtime configuration **********************
public Configuration(String... args) {
final JCommander jCommander = new JCommander();
jCommander.addObject(zookeeperCLI);
jCommander.addObject(elasticsearchCLI);
jCommander.addObject(this);
try {
jCommander.parse(args); // Parse command line args into configuration class.
} catch (com.beust.jcommander.ParameterException ex) {
System.out.println(ex);
jCommander.setProgramName("(Options preceded by an asterisk are required)");
jCommander.usage();
throw ex;
}
}
public double getCpus() {
return cpus;
}
public double getMem() {
return mem;
}
public double getDisk() {
return disk;
}
public int getElasticsearchNodes() {
return elasticsearchCLI.getElasticsearchNodes();
}
public void setElasticsearchNodes(int numberOfNodes) throws IllegalArgumentException {
elasticsearchCLI.setElasticsearchNodes(numberOfNodes);
}
public String getElasticsearchSettingsLocation() {
return elasticsearchCLI.getElasticsearchSettingsLocation();
}
public String getElasticsearchClusterName() {
return elasticsearchCLI.getElasticsearchClusterName();
}
public int getWebUiPort() {
return webUiPort;
}
public String getVersion() {
return version;
}
public String getFrameworkName() {
return frameworkName;
}
public String getTaskName() {
return executorName;
}
public Map<String, String> getTaskLabels() {
HashMap<String, String> map = new HashMap<>();
for (String keyValue : executorLabels) {
String[] kvp = keyValue.split("=", 2);
if (kvp.length == 2) {
map.put(kvp[0], kvp[1]);
}
}
return map;
}
public String getDataDir() {
return dataDir;
}
public double getFailoverTimeout() {
return frameworkFailoverTimeout;
}
public String getFrameworkRole() {
return frameworkRole;
}
public String getExecutorImage() {
return executorImage;
}
public Boolean getExecutorForcePullImage() {
return executorForcePullImage;
}
public Boolean getIsUseIpAddress() {
return isUseIpAddress;
}
public String getElasticsearchBinary() {
return executorBinary;
}
// ******* Helper methods
public String getMesosStateZKURL() {
ZKFormatter mesosStateZKFormatter = new IpPortsListZKFormatter(new ZKAddressParser());
return mesosStateZKFormatter.format(zookeeperCLI.getZookeeperMesosUrl());
}
public String getMesosZKURL() {
ZKFormatter mesosZKFormatter = new MesosZKFormatter(new ZKAddressParser());
return mesosZKFormatter.format(zookeeperCLI.getZookeeperMesosUrl());
}
public ZookeeperCLIParameter getZookeeperCLI() {
return zookeeperCLI;
}
public ElasticsearchCLIParameter getElasticsearchCLI() {
return elasticsearchCLI;
}
public String getFrameworkSecretPath() {
return frameworkSecretPath;
}
public String getFrameworkPrincipal() {
return frameworkPrincipal;
}
public Boolean isFrameworkUseDocker() {
return isFrameworkUseDocker;
}
public String getFrameworkFileServerAddress() {
String result = "";
if (frameworkFileServerAddress != null) {
return NetworkUtils.addressToString(frameworkFileServerAddress, getIsUseIpAddress());
}
return result;
}
public String webUiAddress() {
return NetworkUtils.addressToString(NetworkUtils.hostSocket(getWebUiPort()), getIsUseIpAddress());
}
public void setFrameworkFileServerAddress(InetSocketAddress addr) {
if (addr != null) {
frameworkFileServerAddress = addr;
} else {
LOGGER.error("Could not set webserver address. Was null.");
}
}
public String getJavaHome() {
if (!javaHome.isEmpty()) {
return javaHome.replaceAll("java$", "").replaceAll("/$", "") + "/";
} else {
return "";
}
}
public List<Integer> getElasticsearchPorts() {
if (elasticsearchPorts.isEmpty()) {
return Collections.emptyList();
}
String[] portsRaw = elasticsearchPorts.replace(" ", "").split(",");
ArrayList<Integer> portsList = new ArrayList<>(2);
for (String port : portsRaw) {
portsList.add(Integer.parseInt(port));
}
return portsList;
}
public String getExternalVolumeDriver() {
return externalVolumeDriver;
}
public String getExternalVolumeOption() {
return externalVolumeOption;
}
public String nativeCommand(List<String> arguments) {
String folders = getDataDir() + " " + HOST_SANDBOX;
String mkdir = "mkdir -p " + folders + "; ";
String chown = "chown -R nobody " + folders + "; ";
return mkdir +
chown +
" su -s /bin/sh -c \""
+ Configuration.ES_BINARY
+ " "
+ arguments.stream().collect(Collectors.joining(" "))
+ "\" nobody";
}
public List<String> esArguments(ClusterState clusterState, Protos.DiscoveryInfo discoveryInfo, Protos.SlaveID slaveID) {
List<String> args = new ArrayList<>();
List<Protos.TaskInfo> taskList = clusterState.getTaskList();
String hostAddress = "";
if (taskList.size() > 0) {
Protos.TaskInfo taskInfo = taskList.get(0);
String taskId = taskInfo.getTaskId().getValue();
InetSocketAddress transportAddress = clusterState.getGuiTaskList().get(taskId).getTransportAddress();
hostAddress = NetworkUtils.addressToString(transportAddress, getIsUseIpAddress()).replace("http://", "");
}
addIfNotEmpty(args, "--default.discovery.zen.ping.unicast.hosts", hostAddress);
args.add("--default.http.port=" + discoveryInfo.getPorts().getPorts(Discovery.CLIENT_PORT_INDEX).getNumber());
args.add("--default.transport.tcp.port=" + discoveryInfo.getPorts().getPorts(Discovery.TRANSPORT_PORT_INDEX).getNumber());
args.add("--default.cluster.name=" + getElasticsearchClusterName());
args.add("--default.node.master=true");
args.add("--default.node.data=true");
args.add("--default.node.local=false");
args.add("--default.index.number_of_replicas=0");
args.add("--default.index.auto_expand_replicas=0-all");
if (!isFrameworkUseDocker()) {
String taskSpecificDataDir = taskSpecificHostDir(slaveID);
args.add("--path.home=" + HOST_PATH_HOME); // Cannot be overidden
args.add("--default.path.data=" + taskSpecificDataDir);
args.add("--path.conf=" + HOST_PATH_CONF); // Cannot be overidden
} else {
args.add("--path.data=" + CONTAINER_PATH_DATA); // Cannot be overidden
}
args.add("--default.bootstrap.mlockall=true");
args.add("--default.network.bind_host=0.0.0.0");
args.add("--default.network.publish_host=_non_loopback:ipv4_");
args.add("--default.gateway.recover_after_nodes=1");
args.add("--default.gateway.expected_nodes=1");
args.add("--default.indices.recovery.max_bytes_per_sec=100mb");
args.add("--default.discovery.type=zen");
args.add("--default.discovery.zen.fd.ping_timeout=30s");
args.add("--default.discovery.zen.fd.ping_interval=1s");
args.add("--default.discovery.zen.fd.ping_retries=30");
args.add("--default.discovery.zen.ping.multicast.enabled=false");
return args;
}
public String taskSpecificHostDir(Protos.SlaveID slaveID) {
return getDataDir() + "/" + getElasticsearchClusterName() + "/" + slaveID.getValue();
}
private void addIfNotEmpty(List<String> args, String key, String value) {
if (!value.isEmpty()) {
args.addAll(asList(key, value));
}
}
public String dataVolumeName(Long nodeId) {
return getFrameworkName() + nodeId + "data";
}
}
| |
/**
* <copyright>
* </copyright>
*
*
*/
package eu.hyvar.context.contextValidity.resource.hyvalidityformula.mopp;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import org.eclipse.core.resources.IFile;
import org.eclipse.core.resources.IMarker;
import org.eclipse.core.resources.IResource;
import org.eclipse.core.resources.ResourcesPlugin;
import org.eclipse.core.runtime.CoreException;
import org.eclipse.core.runtime.IProgressMonitor;
import org.eclipse.core.runtime.IStatus;
import org.eclipse.core.runtime.Platform;
import org.eclipse.core.runtime.Status;
import org.eclipse.core.runtime.jobs.ISchedulingRule;
import org.eclipse.core.runtime.jobs.Job;
import org.eclipse.emf.ecore.EObject;
import org.eclipse.emf.ecore.impl.BasicEObjectImpl;
import org.eclipse.emf.ecore.resource.Resource;
import org.eclipse.emf.ecore.util.EcoreValidator;
/**
* Helper class to add markers to text files based on EMF's
* <code>ResourceDiagnostic</code>. If a resource contains
* <code>eu.hyvar.context.contextValidity.resource.hyvalidityformula.IHyvalidityfor
* mulaTextDiagnostic</code>s it uses the more precise information of this
* extended diagnostic type.
*/
public class HyvalidityformulaMarkerHelper {
/**
* The extension id of the custom marker type that is used by this text resource.
*/
public static final String MARKER_TYPE = eu.hyvar.context.contextValidity.resource.hyvalidityformula.mopp.HyvalidityformulaPlugin.PLUGIN_ID + ".problem";
/**
* The total number of markers per file is restricted with this constant.
* Restriction is needed because the performance of Eclipse decreases drastically
* if large amounts of markers are added to files.
*/
public static int MAXIMUM_MARKERS = 500;
/**
* We use a queue to aggregate commands that create or remove markers. This is
* basically for performance reasons. Without the queue we would need to create a
* job for each marker creation/removal, which creates tons of threads and takes
* very long time.
*/
private final static MarkerCommandQueue COMMAND_QUEUE = new MarkerCommandQueue();
public static class MutexRule implements ISchedulingRule {
public boolean isConflicting(ISchedulingRule rule) {
return rule == this;
}
public boolean contains(ISchedulingRule rule) {
return rule == this;
}
}
private static class MarkerCommandQueue {
private List<eu.hyvar.context.contextValidity.resource.hyvalidityformula.IHyvalidityformulaCommand<Object>> commands = new ArrayList<eu.hyvar.context.contextValidity.resource.hyvalidityformula.IHyvalidityformulaCommand<Object>>();
private MutexRule schedulingRule = new MutexRule();
public void addCommand(eu.hyvar.context.contextValidity.resource.hyvalidityformula.IHyvalidityformulaCommand<Object> command) {
synchronized(commands) {
commands.add(command);
// we only need to schedule a job, if the queue was empty
if (commands.size() == 1) {
scheduleRunCommandsJob();
}
}
}
private void scheduleRunCommandsJob() {
Job job = new Job(eu.hyvar.context.contextValidity.resource.hyvalidityformula.HyvalidityformulaResourceBundle.UPDATING_MARKERS_JOB_NAME) {
@Override
protected IStatus run(IProgressMonitor monitor) {
runCommands();
return Status.OK_STATUS;
}
};
job.setRule(schedulingRule);
job.schedule();
}
public void runCommands() {
List<eu.hyvar.context.contextValidity.resource.hyvalidityformula.IHyvalidityformulaCommand<Object>> commandsToProcess = new ArrayList<eu.hyvar.context.contextValidity.resource.hyvalidityformula.IHyvalidityformulaCommand<Object>>();
synchronized(commands) {
commandsToProcess.addAll(commands);
commands.clear();
}
for (eu.hyvar.context.contextValidity.resource.hyvalidityformula.IHyvalidityformulaCommand<Object> command : commandsToProcess) {
command.execute(null);
}
}
}
/**
* <p>
* Creates a marker from the given diagnostics object and attaches the marker to
* the resource. Markers are created and removed asynchronously. Thus, they may
* not appear when calls to this method return. But, the order of marker additions
* and removals is preserved.
* </p>
*
* @param resource The resource that is the file to mark.
* @param diagnostic The diagnostic with information for the marker.
*/
public void mark(Resource resource, eu.hyvar.context.contextValidity.resource.hyvalidityformula.IHyvalidityformulaTextDiagnostic diagnostic) {
final IFile file = getFile(resource);
if (file == null) {
return;
}
createMarkerFromDiagnostic(file, diagnostic);
}
protected void createMarkerFromDiagnostic(final IFile file, final eu.hyvar.context.contextValidity.resource.hyvalidityformula.IHyvalidityformulaTextDiagnostic diagnostic) {
final eu.hyvar.context.contextValidity.resource.hyvalidityformula.IHyvalidityformulaProblem problem = diagnostic.getProblem();
eu.hyvar.context.contextValidity.resource.hyvalidityformula.HyvalidityformulaEProblemType problemType = problem.getType();
final String markerID = getMarkerID(problemType);
COMMAND_QUEUE.addCommand(new eu.hyvar.context.contextValidity.resource.hyvalidityformula.IHyvalidityformulaCommand<Object>() {
public boolean execute(Object context) {
try {
// if there are too many markers, we do not add new ones
if (file.findMarkers(markerID, false, IResource.DEPTH_ZERO).length >= MAXIMUM_MARKERS) {
return true;
}
IMarker marker = file.createMarker(markerID);
if (problem.getSeverity() == eu.hyvar.context.contextValidity.resource.hyvalidityformula.HyvalidityformulaEProblemSeverity.ERROR) {
marker.setAttribute(IMarker.SEVERITY, IMarker.SEVERITY_ERROR);
} else {
marker.setAttribute(IMarker.SEVERITY, IMarker.SEVERITY_WARNING);
}
marker.setAttribute(IMarker.MESSAGE, diagnostic.getMessage());
eu.hyvar.context.contextValidity.resource.hyvalidityformula.IHyvalidityformulaTextDiagnostic textDiagnostic = (eu.hyvar.context.contextValidity.resource.hyvalidityformula.IHyvalidityformulaTextDiagnostic) diagnostic;
marker.setAttribute(IMarker.LINE_NUMBER, textDiagnostic.getLine());
marker.setAttribute(IMarker.CHAR_START, textDiagnostic.getCharStart());
marker.setAttribute(IMarker.CHAR_END, textDiagnostic.getCharEnd() + 1);
if (diagnostic instanceof eu.hyvar.context.contextValidity.resource.hyvalidityformula.mopp.HyvalidityformulaResource.ElementBasedTextDiagnostic) {
EObject element = ((eu.hyvar.context.contextValidity.resource.hyvalidityformula.mopp.HyvalidityformulaResource.ElementBasedTextDiagnostic) diagnostic).getElement();
String elementURI = getObjectURI(element);
if (elementURI != null) {
marker.setAttribute(EcoreValidator.URI_ATTRIBUTE, elementURI);
}
}
Collection<eu.hyvar.context.contextValidity.resource.hyvalidityformula.IHyvalidityformulaQuickFix> quickFixes = textDiagnostic.getProblem().getQuickFixes();
Collection<Object> sourceIDs = new ArrayList<Object>();
if (quickFixes != null) {
for (eu.hyvar.context.contextValidity.resource.hyvalidityformula.IHyvalidityformulaQuickFix quickFix : quickFixes) {
if (quickFix != null) {
sourceIDs.add(quickFix.getContextAsString());
}
}
}
if (!sourceIDs.isEmpty()) {
marker.setAttribute(IMarker.SOURCE_ID, eu.hyvar.context.contextValidity.resource.hyvalidityformula.util.HyvalidityformulaStringUtil.explode(sourceIDs, "|"));
}
} catch (CoreException ce) {
handleException(ce);
}
return true;
}
});
}
/**
* <p>
* Removes all markers from the given resource regardless of their type. Markers
* are created and removed asynchronously. Thus, they may not appear when calls to
* this method return. But, the order of marker additions and removals is
* preserved.
* </p>
*
* @param resource The resource where to delete markers from
*/
public void unmark(Resource resource) {
for (eu.hyvar.context.contextValidity.resource.hyvalidityformula.HyvalidityformulaEProblemType nextType : eu.hyvar.context.contextValidity.resource.hyvalidityformula.HyvalidityformulaEProblemType.values()) {
unmark(resource, nextType);
}
}
/**
* <p>
* Removes all markers of the given type from the given resource. Markers are
* created and removed asynchronously. Thus, they may not appear when calls to
* this method return. But, the order of marker additions and removals is
* preserved.
* </p>
*
* @param resource The resource where to delete markers from
* @param problemType The type of problem to remove
*/
public void unmark(Resource resource, eu.hyvar.context.contextValidity.resource.hyvalidityformula.HyvalidityformulaEProblemType problemType) {
final IFile file = getFile(resource);
if (file == null) {
return;
}
final String markerType = getMarkerID(problemType);
COMMAND_QUEUE.addCommand(new eu.hyvar.context.contextValidity.resource.hyvalidityformula.IHyvalidityformulaCommand<Object>() {
public boolean execute(Object context) {
try {
file.deleteMarkers(markerType, false, IResource.DEPTH_ZERO);
} catch (CoreException ce) {
handleException(ce);
}
return true;
}
});
}
/**
* <p>
* Removes all markers that were caused by the given object from the resource.
* Markers are created and removed asynchronously. Thus, they may not appear when
* calls to this method return. But, the order of marker additions and removals is
* preserved.
* </p>
*
* @param resource The resource where to delete markers from
* @param causingObject The cause of the problems to remove
*/
public void unmark(Resource resource, final EObject causingObject) {
final IFile file = getFile(resource);
if (file == null) {
return;
}
final String markerID = getMarkerID(eu.hyvar.context.contextValidity.resource.hyvalidityformula.HyvalidityformulaEProblemType.UNKNOWN);
final String causingObjectURI = getObjectURI(causingObject);
if (causingObjectURI == null) {
return;
}
COMMAND_QUEUE.addCommand(new eu.hyvar.context.contextValidity.resource.hyvalidityformula.IHyvalidityformulaCommand<Object>() {
public boolean execute(Object context) {
try {
IMarker[] markers = file.findMarkers(markerID, true, IResource.DEPTH_ZERO);
for (IMarker marker : markers) {
if (causingObjectURI.equals(marker.getAttribute(EcoreValidator.URI_ATTRIBUTE))) {
marker.delete();
}
}
} catch (CoreException ce) {
handleException(ce);
}
return true;
}
});
}
/**
* Returns the ID of the marker type that is used to indicate problems of the
* given type.
*/
public String getMarkerID(eu.hyvar.context.contextValidity.resource.hyvalidityformula.HyvalidityformulaEProblemType problemType) {
String markerID = MARKER_TYPE;
String typeID = problemType.getID();
if (!"".equals(typeID)) {
markerID += "." + typeID;
}
return markerID;
}
/**
* Tries to determine the file for the given resource. If the platform is not
* running, the resource is not a platform resource, or the resource cannot be
* found in the workspace, this method returns <code>null</code>.
*/
protected IFile getFile(Resource resource) {
if (resource == null || !Platform.isRunning()) {
return null;
}
String platformString = resource.getURI().toPlatformString(true);
if (platformString == null) {
return null;
}
IFile file = (IFile) ResourcesPlugin.getWorkspace().getRoot().findMember(platformString);
return file;
}
/**
* Returns an URI that identifies the given object.
*/
protected String getObjectURI(EObject object) {
if (object == null) {
return null;
}
if (object.eIsProxy() && object instanceof BasicEObjectImpl) {
return ((BasicEObjectImpl) object).eProxyURI().toString();
}
Resource eResource = object.eResource();
if (eResource == null) {
return null;
}
return eResource.getURI().toString() + "#" + eResource.getURIFragment(object);
}
protected void handleException(CoreException ce) {
if (ce.getMessage().matches("Marker.*not found.")) {
// ignore
}else if (ce.getMessage().matches("Resource.*does not exist.")) {
// ignore
} else {
new eu.hyvar.context.contextValidity.resource.hyvalidityformula.util.HyvalidityformulaRuntimeUtil().logError("Error while removing markers from resource:", ce);
}
}
/**
* <p>
* Removes all markers of the given type from the given resource. Markers are
* created and removed asynchronously. Thus, they may not appear when calls to
* this method return. But, the order of marker additions and removals is
* preserved.
* </p>
*
* @param resource The resource where to delete markers from
* @param markerId The id of the marker type to remove
*/
public void removeAllMarkers(final IResource resource, final String markerId) {
if (resource == null) {
return;
}
COMMAND_QUEUE.addCommand(new eu.hyvar.context.contextValidity.resource.hyvalidityformula.IHyvalidityformulaCommand<Object>() {
public boolean execute(Object context) {
try {
resource.deleteMarkers(markerId, false, IResource.DEPTH_ZERO);
} catch (CoreException ce) {
handleException(ce);
}
return true;
}
});
}
public void createMarker(final IResource resource, final String markerId, final Map<String, Object> markerAttributes) {
if (resource == null) {
return;
}
COMMAND_QUEUE.addCommand(new eu.hyvar.context.contextValidity.resource.hyvalidityformula.IHyvalidityformulaCommand<Object>() {
public boolean execute(Object context) {
try {
IMarker marker = resource.createMarker(markerId);
for (String key : markerAttributes.keySet()) {
marker.setAttribute(key, markerAttributes.get(key));
}
return true;
} catch (CoreException e) {
eu.hyvar.context.contextValidity.resource.hyvalidityformula.mopp.HyvalidityformulaPlugin.logError("Can't create marker.", e);
return false;
}
}
});
}
public void beginDeferMarkerUpdates() {
}
public void endDeferMarkerUpdates() {
}
public void runCommands() {
COMMAND_QUEUE.runCommands();
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.internal.util.tostring;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.UUID;
import java.util.concurrent.locks.ReadWriteLock;
import org.apache.ignite.IgniteLogger;
import org.apache.ignite.internal.util.typedef.internal.S;
import org.apache.ignite.testframework.junits.common.GridCommonAbstractTest;
import org.apache.ignite.testframework.junits.common.GridCommonTest;
/**
* Tests for {@link GridToStringBuilder}.
*/
@GridCommonTest(group = "Utils")
public class GridToStringBuilderSelfTest extends GridCommonAbstractTest {
/**
* @throws Exception If failed.
*/
public void testToString() throws Exception {
TestClass1 obj = new TestClass1();
IgniteLogger log = log();
log.info(obj.toStringManual());
log.info(obj.toStringAutomatic());
assert obj.toStringManual().equals(obj.toStringAutomatic());
}
/**
* @throws Exception If failed.
*/
public void testToStringWithAdditions() throws Exception {
TestClass1 obj = new TestClass1();
IgniteLogger log = log();
String manual = obj.toStringWithAdditionalManual();
log.info(manual);
String automatic = obj.toStringWithAdditionalAutomatic();
log.info(automatic);
assert manual.equals(automatic);
}
/**
* @throws Exception If failed.
*/
public void testToStringCheckSimpleRecursionPrevention() throws Exception {
ArrayList<Object> list1 = new ArrayList<>();
ArrayList<Object> list2 = new ArrayList<>();
list2.add(list1);
list1.add(list2);
GridToStringBuilder.toString(ArrayList.class, list1);
GridToStringBuilder.toString(ArrayList.class, list2);
}
/**
* @throws Exception If failed.
*/
public void testToStringCheckAdvancedRecursionPrevention() throws Exception {
fail("https://issues.apache.org/jira/browse/IGNITE-602");
ArrayList<Object> list1 = new ArrayList<>();
ArrayList<Object> list2 = new ArrayList<>();
list2.add(list1);
list1.add(list2);
GridToStringBuilder.toString(ArrayList.class, list1, "name", list2);
GridToStringBuilder.toString(ArrayList.class, list2, "name", list1);
}
/**
* JUnit.
*/
public void testToStringPerformance() {
TestClass1 obj = new TestClass1();
IgniteLogger log = log();
// Warm up.
obj.toStringAutomatic();
long start = System.currentTimeMillis();
for (int i = 0; i < 100000; i++)
obj.toStringManual();
log.info("Manual toString() took: " + (System.currentTimeMillis() - start) + "ms");
start = System.currentTimeMillis();
for (int i = 0; i < 100000; i++)
obj.toStringAutomatic();
log.info("Automatic toString() took: " + (System.currentTimeMillis() - start) + "ms");
}
/**
* Test class.
*/
private static class TestClass1 {
/** */
@SuppressWarnings("unused")
@GridToStringOrder(0)
private String id = "1234567890";
/** */
@SuppressWarnings("unused")
private int intVar;
/** */
@SuppressWarnings("unused")
@GridToStringInclude(sensitive = true)
private long longVar;
/** */
@SuppressWarnings("unused")
@GridToStringOrder(1)
private final UUID uuidVar = UUID.randomUUID();
/** */
@SuppressWarnings("unused")
private boolean boolVar;
/** */
@SuppressWarnings("unused")
private byte byteVar;
/** */
@SuppressWarnings("unused")
private String name = "qwertyuiopasdfghjklzxcvbnm";
/** */
@SuppressWarnings("unused")
private final Integer finalInt = 2;
/** */
@SuppressWarnings("unused")
private List<String> strList;
/** */
@SuppressWarnings("unused")
@GridToStringInclude
private Map<String, String> strMap;
/** */
@SuppressWarnings("unused")
private final Object obj = new Object();
/** */
@SuppressWarnings("unused")
private ReadWriteLock lock;
/**
* @return Manual string.
*/
String toStringManual() {
StringBuilder buf = new StringBuilder();
buf.append(getClass().getSimpleName()).append(" [");
buf.append("id=").append(id).append(", ");
buf.append("uuidVar=").append(uuidVar).append(", ");
buf.append("intVar=").append(intVar).append(", ");
if (S.INCLUDE_SENSITIVE)
buf.append("longVar=").append(longVar).append(", ");
buf.append("boolVar=").append(boolVar).append(", ");
buf.append("byteVar=").append(byteVar).append(", ");
buf.append("name=").append(name).append(", ");
buf.append("finalInt=").append(finalInt).append(", ");
buf.append("strMap=").append(strMap);
buf.append("]");
return buf.toString();
}
/**
* @return Automatic string.
*/
String toStringAutomatic() {
return S.toString(TestClass1.class, this);
}
/**
* @return Automatic string with additional parameters.
*/
String toStringWithAdditionalAutomatic() {
return S.toString(TestClass1.class, this, "newParam1", 1, false, "newParam2", 2, true);
}
/**
* @return Manual string with additional parameters.
*/
String toStringWithAdditionalManual() {
StringBuilder s = new StringBuilder(toStringManual());
s.setLength(s.length() - 1);
s.append(", newParam1=").append(1);
if (S.INCLUDE_SENSITIVE)
s.append(", newParam2=").append(2);
s.append(']');
return s.toString();
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.geode.internal.cache.wan;
import io.micrometer.core.instrument.Counter;
import io.micrometer.core.instrument.MeterRegistry;
import org.apache.geode.StatisticDescriptor;
import org.apache.geode.StatisticsFactory;
import org.apache.geode.distributed.internal.DistributionStats;
import org.apache.geode.internal.cache.tier.sockets.CacheServerStats;
import org.apache.geode.internal.statistics.meters.LegacyStatCounter;
public class GatewayReceiverStats extends CacheServerStats {
private static final String typeName = "GatewayReceiverStatistics";
// ////////////////// Statistic "Id" Fields ////////////////////
// /** Name of the events queued statistic */
// private static final String FAILOVER_BATCHES_RECEIVED = "failoverBatchesReceived";
/**
* Name of the events not queued because conflated statistic
*/
private static final String DUPLICATE_BATCHES_RECEIVED = "duplicateBatchesReceived";
/**
* Name of the event queue time statistic
*/
private static final String OUT_OF_ORDER_BATCHES_RECEIVED = "outoforderBatchesReceived";
/**
* Name of the event queue size statistic
*/
private static final String EARLY_ACKS = "earlyAcks";
/**
* Name of the events distributed statistic
*/
private static final String EVENTS_RECEIVED = "eventsReceived";
/**
* Name of the events exceeding alert threshold statistic
*/
private static final String CREAT_REQUESTS = "createRequests";
/**
* Name of the batch distribution time statistic
*/
private static final String UPDATE_REQUESTS = "updateRequest";
/**
* Name of the batches distributed statistic
*/
private static final String DESTROY_REQUESTS = "destroyRequest";
/**
* Name of the batches redistributed statistic
*/
private static final String UNKNOWN_OPERATIONS_RECEIVED = "unknowsOperationsReceived";
/**
* Name of the unprocessed events added by primary statistic
*/
private static final String EXCEPTIONS_OCCURRED = "exceptionsOccurred";
/**
* Name of the events retried
*/
private static final String EVENTS_RETRIED = "eventsRetried";
private final MeterRegistry meterRegistry;
// /** Id of the events queued statistic */
// private int failoverBatchesReceivedId;
/**
* Id of the events not queued because conflated statistic
*/
private int duplicateBatchesReceivedId;
/**
* Id of the event queue time statistic
*/
private int outoforderBatchesReceivedId;
/**
* Id of the event queue size statistic
*/
private int earlyAcksId;
/**
* Id of the events distributed statistic
*/
private int eventsReceivedId;
private final Counter eventsReceivedCounter;
private static final String EVENTS_RECEIVED_COUNTER_NAME =
"geode.gateway.receiver.events";
private static final String EVENTS_RECEIVED_COUNTER_DESCRIPTION =
"total number events across the batched received by this GatewayReceiver";
private static final String EVENTS_RECEIVED_COUNTER_UNITS = "operations";
/**
* Id of the events exceeding alert threshold statistic
*/
private int createRequestId;
/**
* Id of the batch distribution time statistic
*/
private int updateRequestId;
/**
* Id of the batches distributed statistic
*/
private int destroyRequestId;
/**
* Id of the batches redistributed statistic
*/
private int unknowsOperationsReceivedId;
/**
* Id of the unprocessed events added by primary statistic
*/
private int exceptionsOccurredId;
/**
* Id of the events retried statistic
*/
private int eventsRetriedId;
// ///////////////////// Constructors ///////////////////////
public static GatewayReceiverStats createGatewayReceiverStats(StatisticsFactory f,
String ownerName, MeterRegistry meterRegistry) {
StatisticDescriptor[] descriptors = new StatisticDescriptor[] {
f.createIntCounter(DUPLICATE_BATCHES_RECEIVED,
"number of batches which have already been seen by this GatewayReceiver",
"nanoseconds"),
f.createIntCounter(OUT_OF_ORDER_BATCHES_RECEIVED,
"number of batches which are out of order on this GatewayReceiver", "operations"),
f.createIntCounter(EARLY_ACKS, "number of early acknowledgements sent to gatewaySenders",
"operations"),
f.createLongCounter(EVENTS_RECEIVED,
EVENTS_RECEIVED_COUNTER_DESCRIPTION,
EVENTS_RECEIVED_COUNTER_UNITS),
f.createIntCounter(CREAT_REQUESTS,
"total number of create operations received by this GatewayReceiver", "operations"),
f.createIntCounter(UPDATE_REQUESTS,
"total number of update operations received by this GatewayReceiver", "operations"),
f.createIntCounter(DESTROY_REQUESTS,
"total number of destroy operations received by this GatewayReceiver", "operations"),
f.createIntCounter(UNKNOWN_OPERATIONS_RECEIVED,
"total number of unknown operations received by this GatewayReceiver", "operations"),
f.createIntCounter(EXCEPTIONS_OCCURRED,
"number of exceptions occurred while porcessing the batches", "operations"),
f.createIntCounter(EVENTS_RETRIED,
"total number events retried by this GatewayReceiver due to exceptions", "operations")};
return new GatewayReceiverStats(f, ownerName, typeName, descriptors, meterRegistry);
}
public GatewayReceiverStats(StatisticsFactory f, String ownerName, String typeName,
StatisticDescriptor[] descriptiors, MeterRegistry meterRegistry) {
super(f, ownerName, typeName, descriptiors);
// Initialize id fields
// failoverBatchesReceivedId = statType.nameToId(FAILOVER_BATCHES_RECEIVED);
duplicateBatchesReceivedId = statType.nameToId(DUPLICATE_BATCHES_RECEIVED);
outoforderBatchesReceivedId = statType.nameToId(OUT_OF_ORDER_BATCHES_RECEIVED);
earlyAcksId = statType.nameToId(EARLY_ACKS);
eventsReceivedId = statType.nameToId(EVENTS_RECEIVED);
createRequestId = statType.nameToId(CREAT_REQUESTS);
updateRequestId = statType.nameToId(UPDATE_REQUESTS);
destroyRequestId = statType.nameToId(DESTROY_REQUESTS);
unknowsOperationsReceivedId = statType.nameToId(UNKNOWN_OPERATIONS_RECEIVED);
exceptionsOccurredId = statType.nameToId(EXCEPTIONS_OCCURRED);
eventsRetriedId = statType.nameToId(EVENTS_RETRIED);
this.meterRegistry = meterRegistry;
eventsReceivedCounter = LegacyStatCounter.builder(EVENTS_RECEIVED_COUNTER_NAME)
.longStatistic(stats, eventsReceivedId)
.description(EVENTS_RECEIVED_COUNTER_DESCRIPTION)
.baseUnit(EVENTS_RECEIVED_COUNTER_UNITS)
.register(meterRegistry);
}
// /////////////////// Instance Methods /////////////////////
// /**
// * Increments the number of failover batches received by 1.
// */
// public void incFailoverBatchesReceived() {
// this.stats.incInt(failoverBatchesReceivedId, 1);
// }
//
// public int getFailoverBatchesReceived() {
// return this.stats.getInt(failoverBatchesReceivedId);
// }
/**
* Increments the number of duplicate batches received by 1.
*/
public void incDuplicateBatchesReceived() {
this.stats.incInt(duplicateBatchesReceivedId, 1);
}
public int getDuplicateBatchesReceived() {
return this.stats.getInt(duplicateBatchesReceivedId);
}
/**
* Increments the number of out of order batches received by 1.
*/
public void incOutoforderBatchesReceived() {
this.stats.incInt(outoforderBatchesReceivedId, 1);
}
public int getOutoforderBatchesReceived() {
return this.stats.getInt(outoforderBatchesReceivedId);
}
/**
* Increments the number of early acks by 1.
*/
public void incEarlyAcks() {
this.stats.incInt(earlyAcksId, 1);
}
public int getEarlyAcks() {
return this.stats.getInt(earlyAcksId);
}
/**
* Increments the number of events received by 1.
*/
public void incEventsReceived(int delta) {
eventsReceivedCounter.increment(delta);
}
public int getEventsReceived() {
return (int) eventsReceivedCounter.count();
}
/**
* Increments the number of create requests by 1.
*/
public void incCreateRequest() {
this.stats.incInt(createRequestId, 1);
}
public int getCreateRequest() {
return this.stats.getInt(createRequestId);
}
/**
* Increments the number of update requests by 1.
*/
public void incUpdateRequest() {
this.stats.incInt(updateRequestId, 1);
}
public int getUpdateRequest() {
return this.stats.getInt(updateRequestId);
}
/**
* Increments the number of destroy request received by 1.
*/
public void incDestroyRequest() {
this.stats.incInt(destroyRequestId, 1);
}
public int getDestroyRequest() {
return this.stats.getInt(destroyRequestId);
}
/**
* Increments the number of unknown operations received by 1.
*/
public void incUnknowsOperationsReceived() {
this.stats.incInt(unknowsOperationsReceivedId, 1);
}
public int getUnknowsOperationsReceived() {
return this.stats.getInt(unknowsOperationsReceivedId);
}
/**
* Increments the number of exceptions occurred by 1.
*/
public void incExceptionsOccurred() {
this.stats.incInt(exceptionsOccurredId, 1);
}
public int getExceptionsOccurred() {
return this.stats.getInt(exceptionsOccurredId);
}
/**
* Increments the number of events received by 1.
*/
public void incEventsRetried() {
this.stats.incInt(eventsRetriedId, 1);
}
public int getEventsRetried() {
return this.stats.getInt(eventsRetriedId);
}
/**
* Returns the current time (ns).
*
* @return the current time (ns)
*/
public long startTime() {
return DistributionStats.getStatTime();
}
@Override
public void close() {
meterRegistry.remove(eventsReceivedCounter);
super.close();
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.