gt stringclasses 1
value | context stringlengths 2.05k 161k |
|---|---|
package org.sakaiproject.content.providers;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.Date;
import java.util.List;
import java.util.Map;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.sakaiproject.api.app.scheduler.DelayedInvocation;
import org.sakaiproject.api.app.scheduler.ScheduledInvocationCommand;
import org.sakaiproject.api.app.scheduler.ScheduledInvocationManager;
import org.sakaiproject.authz.api.SecurityAdvisor;
import org.sakaiproject.authz.api.SecurityService;
import org.sakaiproject.db.api.SqlReader;
import org.sakaiproject.db.api.SqlReaderFinishedException;
import org.sakaiproject.db.api.SqlService;
import org.sakaiproject.event.api.Event;
import org.sakaiproject.event.api.EventDelayHandler;
import org.sakaiproject.event.api.EventTrackingService;
import org.sakaiproject.time.api.Time;
import org.sakaiproject.user.api.User;
import org.sakaiproject.user.api.UserDirectoryService;
import org.sakaiproject.user.api.UserNotDefinedException;
public class BaseEventDelayHandler implements EventDelayHandler, ScheduledInvocationCommand
{
private boolean autoDdl;
private static final Log LOG = LogFactory.getLog(BaseEventDelayHandler.class);
private SqlService sqlService;
private ScheduledInvocationManager schedInvocMgr;
private UserDirectoryService userDirectoryService;
private EventTrackingService eventService;
private SecurityService securityService;
/** contains a map of the database dependent handler. */
protected Map<String, BaseEventDelayHandlerSql> databaseBeans;
/** contains database dependent code. */
protected BaseEventDelayHandlerSql baseEventDelayHandlerSql;
public void setDatabaseBeans(Map<String, BaseEventDelayHandlerSql> databaseBeans)
{
this.databaseBeans = databaseBeans;
}
public BaseEventDelayHandlerSql getBaseEventDelayHandlerSql()
{
return baseEventDelayHandlerSql;
}
/**
* sets which bean containing database dependent code should be used depending on the database vendor.
*/
public void setBaseEventDelayHandlerSqlSql(String vendor)
{
this.baseEventDelayHandlerSql = (databaseBeans.containsKey(vendor) ? databaseBeans.get(vendor) : databaseBeans.get("default"));
}
public void setUserDirectoryService(UserDirectoryService userDirectoryService)
{
this.userDirectoryService = userDirectoryService;
}
public void setEventService(EventTrackingService eventService)
{
this.eventService = eventService;
}
public void setSqlService(SqlService sqlService)
{
this.sqlService = sqlService;
}
public void setSecurityService(SecurityService securityService) {
this.securityService = securityService;
}
public void setSchedInvocMgr(ScheduledInvocationManager schedInvocMgr)
{
this.schedInvocMgr = schedInvocMgr;
}
public void setAutoDdl(boolean autoDdl)
{
this.autoDdl = autoDdl;
}
public void init()
{
setBaseEventDelayHandlerSqlSql(sqlService.getVendor());
if (autoDdl)
{
// load the base ddl
sqlService.ddl(this.getClass().getClassLoader(), "sakai_event_delay");
}
eventService.setEventDelayHandler(this);
}
/* (non-Javadoc)
* @see org.sakaiproject.content.providers.EventDelayHandler#readDelay(java.lang.String)
*/
@SuppressWarnings("unchecked")
public Event readDelay(String delayId)
{
List<Event> results = sqlService.dbRead(baseEventDelayHandlerSql.getDelayReadSql(), new Long[] { Long.parseLong(delayId) },
new SqlReader()
{
public Object readSqlResultRecord(ResultSet result)
throws SqlReaderFinishedException
{
Event e = null;
try
{
// EVENT_DELAY_ID, EVENT, EVENT_CODE, PRIORITY, REF, USER_ID
e = new ReEvent(result.getString(2), "m".equals(result.getString(3)), result
.getInt(4), result.getString(5), result.getString(6));
}
catch (SQLException se)
{
LOG.error("Error trying to build event on read", se);
}
return e;
}
});
Event e = null;
if (results.size() > 0)
e = results.get(0);
return e;
}
/**
* Read an event delay and delete it from the db.
*
* @param delayId
* @return The event found.
*/
public Event popEventDelay(String delayId)
{
Event e = readDelay(delayId);
deleteDelayById(delayId);
return e;
}
/* (non-Javadoc)
* @see org.sakaiproject.content.providers.EventDelayHandler#findDelayIds(org.sakaiproject.event.api.Event, java.lang.String)
*/
@SuppressWarnings("unchecked")
public List<String> findDelayIds(Event event, String userId)
{
Object[] fields = new Object[] { event.getEvent(), event.getModify() ? "m" : "a", event.getPriority(),
event.getResource(), userId };
List<String> ids = sqlService.dbRead(baseEventDelayHandlerSql.getDelayFindFineSql(), fields, null);
return ids;
}
/* (non-Javadoc)
* @see org.sakaiproject.content.providers.EventDelayHandler#findDelayIds(org.sakaiproject.event.api.Event)
*/
@SuppressWarnings("unchecked")
public List<String> findDelayIds(Event event)
{
Object[] fields = new Object[] { event.getEvent(), event.getModify() ? "m" : "a", event.getPriority(),
event.getResource() };
List<String> ids = sqlService.dbRead(baseEventDelayHandlerSql.getDelayFindEventSql(), fields, null);
return ids;
}
/* (non-Javadoc)
* @see org.sakaiproject.content.providers.EventDelayHandler#findDelayIds(java.lang.String, java.lang.String)
*/
@SuppressWarnings("unchecked")
public List<String> findDelayIds(String resource, String event)
{
Object[] fields = new Object[] { resource, event };
List<String> ids = sqlService.dbRead(baseEventDelayHandlerSql.getDelayFindByRefEventSql(), fields, null);
return ids;
}
/* (non-Javadoc)
* @see org.sakaiproject.content.providers.EventDelayHandler#findDelayIds(java.lang.String)
*/
@SuppressWarnings("unchecked")
public List<String> findDelayIds(String resource)
{
Object[] fields = new Object[] { resource };
List<String> ids = sqlService.dbRead(baseEventDelayHandlerSql.getDelayFindByRefSql(), fields, null);
return ids;
}
/* (non-Javadoc)
* @see org.sakaiproject.content.providers.EventDelayHandler#createDelay(org.sakaiproject.event.api.Event)
*/
public String createDelay(Event event, Time fireTime)
{
return createDelay(event, event.getUserId(), fireTime);
}
/* (non-Javadoc)
* @see org.sakaiproject.content.providers.EventDelayHandler#createDelay(org.sakaiproject.event.api.Event, java.lang.String)
*/
@SuppressWarnings("unchecked")
public String createDelay(Event event, String userId, Time fireTime)
{
// delete previous like delays
deleteDelay(event);
Object[] fields = new Object[] { event.getEvent(), event.getModify() ? "m" : "a", event.getPriority(),
event.getResource(), userId };
sqlService.dbWrite(baseEventDelayHandlerSql.getDelayWriteSql(), fields);
List<String> ids = sqlService.dbRead(baseEventDelayHandlerSql.getDelayFindFineSql(), fields, null);
String id = null;
if (ids.size() > 0)
id = (String) ids.get(0);
// Schedule the new delayed invocation
LOG.info("Creating new delayed event [" + id + "]");
schedInvocMgr.createDelayedInvocation(fireTime, BaseEventDelayHandler.class.getName(), id);
return id;
}
/* (non-Javadoc)
* @see org.sakaiproject.content.providers.EventDelayHandler#deleteDelayById(java.lang.String)
*/
public boolean deleteDelayById(String delayId)
{
// Remove any existing notifications for this notification
DelayedInvocation[] prevInvocs = schedInvocMgr.findDelayedInvocations(
BaseEventDelayHandler.class.getName(), delayId);
if (prevInvocs != null && prevInvocs.length > 0)
{
for (DelayedInvocation invoc : prevInvocs)
{
LOG.debug("Deleting delayed event [" + invoc.contextId + "]");
schedInvocMgr.deleteDelayedInvocation(invoc.uuid);
}
}
boolean ret = sqlService
.dbWrite(baseEventDelayHandlerSql.getDelayDeleteSql(), new Object[] { Long.parseLong(delayId) });
return ret;
}
/* (non-Javadoc)
* @see org.sakaiproject.content.providers.EventDelayHandler#deleteDelay(org.sakaiproject.event.api.Event)
*/
public boolean deleteDelay(Event e)
{
boolean ret = true;
List<String> ids = findDelayIds(e);
for (String id : ids)
{
ret &= deleteDelayById(id);
}
return ret;
}
/* (non-Javadoc)
* @see org.sakaiproject.content.providers.EventDelayHandler#deleteDelay(java.lang.String, java.lang.String)
*/
public boolean deleteDelay(String resource, String event)
{
boolean ret = true;
List<String> ids = findDelayIds(resource, event);
for (String id : ids)
{
ret &= deleteDelayById(id);
}
return ret;
}
/* (non-Javadoc)
* @see org.sakaiproject.content.providers.EventDelayHandler#deleteDelay(java.lang.String)
*/
public boolean deleteDelay(String resource)
{
boolean ret = true;
List<String> ids = findDelayIds(resource);
for (String id : ids)
{
ret &= deleteDelayById(id);
}
return ret;
}
/**
* Deserializes the context into an event and refires the event.
*/
public void execute(String opaqueContext)
{
// need to instantiate components locally because this class is instantiated by the
// scheduled invocation manager and not pulled from spring context.
final Event event = popEventDelay(opaqueContext);
if (event != null) {
LOG.info("Refiring delayed event [" + opaqueContext + "]");
// Set up security advisor
try
{
User user = userDirectoryService.getUser(event.getUserId());
securityService.pushAdvisor(new SecurityAdvisor() {
public SecurityAdvice isAllowed(String userId, String function, String reference) {
if (securityService.unlock(event.getUserId(), function, reference)) {
return SecurityAdvice.ALLOWED;
}
return SecurityAdvice.PASS;
}
});
eventService.post(event, user);
}
catch (UserNotDefinedException unde)
{
// can't find the user so refire the event without user impersonation
eventService.post(event);
} finally {
// Clear security advisor
securityService.popAdvisor();
}
} else {
LOG.warn("Delayed event not found [" + opaqueContext + "]");
}
}
/**
* Local implementation of Event to allow the setting of all fields when refiring an event after
* it has been scheduled to run later than it was originally fired.
*/
protected static class ReEvent implements Event
{
private String event;
private boolean modify;
private int priority;
private String resource;
private String context;
private String sessionId;
private String userId;
public ReEvent(String event, boolean modify, int priority, String resource, String userId)
{
this.event = event;
this.modify = modify;
this.priority = priority;
this.resource = resource;
this.userId = userId;
}
public String getEvent()
{
return event;
}
public boolean getModify()
{
return modify;
}
public int getPriority()
{
return priority;
}
public String getResource()
{
return resource;
}
public String getSessionId()
{
return sessionId;
}
public String getUserId()
{
return userId;
}
public String getContext()
{
return context;
}
public Date getEventTime() {
return null;
}
}
}
| |
/*
* Copyright 2017 StreamSets Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.streamsets.pipeline.lib.jdbc;
import com.google.common.base.Joiner;
import com.google.common.base.Strings;
import com.google.common.cache.LoadingCache;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Multimap;
import com.google.common.util.concurrent.UncheckedExecutionException;
import com.streamsets.pipeline.api.Batch;
import com.streamsets.pipeline.api.BatchContext;
import com.streamsets.pipeline.api.ErrorCode;
import com.streamsets.pipeline.api.Field;
import com.streamsets.pipeline.api.PushSource;
import com.streamsets.pipeline.api.Record;
import com.streamsets.pipeline.api.Stage;
import com.streamsets.pipeline.api.StageException;
import com.streamsets.pipeline.api.base.OnRecordErrorException;
import com.streamsets.pipeline.api.el.ELEval;
import com.streamsets.pipeline.api.el.ELVars;
import com.streamsets.pipeline.api.impl.Utils;
import com.streamsets.pipeline.lib.el.ELUtils;
import com.streamsets.pipeline.lib.event.NoMoreDataEvent;
import com.streamsets.pipeline.lib.jdbc.multithread.ConnectionManager;
import com.streamsets.pipeline.lib.jdbc.multithread.DatabaseVendor;
import com.streamsets.pipeline.lib.jdbc.multithread.TableContextUtil;
import com.streamsets.pipeline.lib.operation.OperationType;
import com.streamsets.pipeline.stage.common.ErrorRecordHandler;
import com.streamsets.pipeline.stage.common.HeaderAttributeConstants;
import com.streamsets.pipeline.stage.destination.jdbc.Groups;
import com.streamsets.pipeline.stage.origin.jdbc.CommonSourceConfigBean;
import com.streamsets.pipeline.stage.origin.jdbc.table.QuoteChar;
import com.zaxxer.hikari.HikariConfig;
import com.zaxxer.hikari.HikariDataSource;
import microsoft.sql.DateTimeOffset;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.Reader;
import java.math.BigDecimal;
import java.sql.Blob;
import java.sql.Clob;
import java.sql.Connection;
import java.sql.DatabaseMetaData;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.ResultSetMetaData;
import java.sql.SQLException;
import java.sql.SQLXML;
import java.sql.Statement;
import java.sql.Timestamp;
import java.sql.Types;
import java.time.Instant;
import java.time.LocalDate;
import java.time.OffsetDateTime;
import java.time.OffsetTime;
import java.time.ZoneOffset;
import java.time.format.DateTimeFormatter;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import static com.streamsets.pipeline.lib.jdbc.HikariPoolConfigBean.MILLISECONDS;
/**
* Utility classes for working with JDBC
*/
public class JdbcUtil {
private static final Logger LOG = LoggerFactory.getLogger(JdbcUtil.class);
/**
* Position in ResultSet for column and primary key metadata of the column name.
*
* @see java.sql.DatabaseMetaData#getColumns
* @see java.sql.DatabaseMetaData#getPrimaryKeys
*/
private static final int COLUMN_NAME = 4;
private static final String EL_PREFIX = "${";
private static final String CUSTOM_MAPPINGS = "columnNames";
/**
* Column name for extracting table name for imported keys
*
* @see java.sql.DatabaseMetaData#getImportedKeys(String, String, String)
*/
private static final String PK_TABLE_NAME = "PKTABLE_NAME";
public static final String TABLE_NAME = "tableNameTemplate";
public static final String SCHEMA_NAME = "schema";
/**
* List of RDBMs that do not differentiate between databases and schemas.
* Use lower-case for any new value.
*/
private static final String[] RDBMS_WITHOUT_SCHEMAS = {"mysql", "mariadb", "memsql", "teradata"};
/**
* Parameterized SQL statements to the database.
*
* @see java.sql.Connection#prepareStatement
*/
private static final Joiner joiner = Joiner.on(", ");
private static final Joiner joinerColumn = Joiner.on(" = ?, ");
private static final Joiner joinerWhereClause = Joiner.on(" = ? AND ");
private static final Joiner joinerWithQuote = Joiner.on("\", \"");
private static final Joiner joinerColumnWithQuote = Joiner.on("\" = ?, \"");
private static final Joiner joinerWhereClauseWitheQuote = Joiner.on("\" = ? AND \"");
private static final String[] METADATA_TABLE_TYPE = new String[]{"TABLE"};
private static final String[] METADATA_TABLE_VIEW_TYPE = new String[]{"TABLE", "VIEW"};
/**
* The query to select the min value for a particular offset column
*/
public static final String MIN_OFFSET_VALUE_QUERY = "SELECT MIN(%s) FROM %s";
/**
* The query to select the max value for a particular offset column
*/
public static final String MAX_OFFSET_VALUE_QUERY = "SELECT MAX(%s) FROM %s";
/**
* The index within the result set for the column that contains the min or max offset value
*/
private static final int MIN_MAX_OFFSET_VALUE_QUERY_RESULT_SET_INDEX = 1;
public static final int NANOS_TO_MILLIS_ADJUSTMENT = 1_000_000;
public static final String FIELD_ATTRIBUTE_NANOSECONDS = "nanoSeconds";
public JdbcUtil() {
}
/**
* <p>Mapping of sqlStates that when encountered should determine that we will send a record to the
* error pipeline. All other SQL states will result in a StageException.
* </p>
* <p>
* Errors that result in the record to error pipeline should generally be due to invalid data.
* Other exceptions are either an error in our system or the database, and should cause a StageException.
* </p>
* <p>
* To minimize the initial size of this mapping, SqlState error classes are listed here and not the full error
* codes as there are many.
* </p>
*/
private static final Map<String, String> STANDARD_DATA_ERROR_SQLSTATES = ImmutableMap.of(
"21", "Cardinality violation",
"22", "Data exception",
"23", "Constraint violation",
"42", "Syntax error or access rule violation",
"44", "WITH CHECK OPTION violation"
);
/**
* Oracle specific SQL States: https://docs.oracle.com/cd/E15817_01/appdev.111/b31228/appd.htm
*/
private static final Set<String> ORACLE_DATA_SQLSTATES = ImmutableSet.of(
"72000"
);
/**
* MySQL does not use standard SQL States for some errors
* handle those as a special case. See MySQL doc:
* Server Error Codes and Messages
*/
private static final String MYSQL_GENERAL_ERROR = "HY000";
private static final Map<String, String> MYSQL_DATA_ERROR_ERROR_CODES = ImmutableMap.of(
"1364", "Field '%s' doesn't have a default value",
"1366", "Incorrect %s value: '%s' for column '%s' at row %ld",
"1391", "Key part '%s' length cannot be 0"
);
public static String[] getMetadataTableViewType() {
return METADATA_TABLE_VIEW_TYPE;
}
public boolean isDataError(List<String> customDataSqlCodes, String connectionString, SQLException ex) {
String sqlState = Strings.nullToEmpty(ex.getSQLState());
String errorCode = String.valueOf(ex.getErrorCode());
while (StringUtils.isEmpty(sqlState) && ex.getNextException() != null) {
ex = ex.getNextException();
sqlState= Strings.nullToEmpty(ex.getSQLState());
errorCode = String.valueOf(ex.getErrorCode());
}
if(customDataSqlCodes.contains(sqlState)) {
return true;
} else if (sqlState.equals(MYSQL_GENERAL_ERROR) && connectionString.contains(":mysql")) {
return MYSQL_DATA_ERROR_ERROR_CODES.containsKey(errorCode);
} else if (connectionString.contains(":oracle:") && ORACLE_DATA_SQLSTATES.contains(sqlState)) {
return true;
} else if (sqlState.length() >= 2 && STANDARD_DATA_ERROR_SQLSTATES.containsKey(sqlState.substring(0, 2))) {
return true;
}
return false;
}
/**
* Formats the error message of a {@link java.sql.SQLException} for human consumption.
*
* @param ex SQLException
* @return Formatted string with database-specific error code, error message, and SQLState
*/
public String formatSqlException(SQLException ex) {
StringBuilder sb = new StringBuilder();
Set<String> messages = new HashSet<>();
for (Throwable e : ex) {
if (e instanceof SQLException) {
String message = e.getMessage();
if (!messages.add(message)) {
continue;
}
sb.append("SQLState: " + ((SQLException) e).getSQLState() + "\n")
.append("Error Code: " + ((SQLException) e).getErrorCode() + "\n")
.append("Message: " + message + "\n");
Throwable t = ex.getCause();
while (t != null) {
if (messages.add(t.getMessage())) {
sb.append("Cause: " + t + "\n");
}
t = t.getCause();
}
}
}
return sb.toString();
}
/**
* Wrapper for {@link Connection#getCatalog()} to solve problems with RDBMs for which catalog and schema are the same
* thing. For these RDBMs, it returns the schema name when it is not-null and not-empty; otherwise, it returns the
* value of java.sql.Connection.getCatalog(). For other RDBMs, it returns always the value of
* java.sql.Connection.getCatalog().
*
* @param connection An open JDBC connection
* @param schema The schema name we want to use
* @return The current catalog or the schema name passed as argument
* @throws SQLException
*/
private String getCatalog(Connection connection, String schema) throws SQLException {
if (Strings.isNullOrEmpty(schema)) {
return connection.getCatalog();
}
String name = connection.getMetaData().getDatabaseProductName().toLowerCase();
for (String d : RDBMS_WITHOUT_SCHEMAS) {
if (name.contains(d)) {
return schema;
}
}
return connection.getCatalog();
}
/**
* Wrapper for {@link java.sql.DatabaseMetaData#getColumns(String, String, String, String)} that detects
* the format of the supplied tableName.
*
* @param connection An open JDBC connection
* @param tableName table name that is optionally fully qualified with a schema in the form schema.tableName
* @return ResultSet containing the column metadata
*
* @throws SQLException
*/
public ResultSet getColumnMetadata(Connection connection, String schema, String tableName) throws SQLException {
DatabaseMetaData metadata = connection.getMetaData();
// Get all columns for this table
return metadata.getColumns(getCatalog(connection, schema), schema, tableName, null);
}
/**
* Wrapper for {@link java.sql.DatabaseMetaData#getTables(String, String, String, String[])}
*
* @param connection open JDBC connection
* @param schema schema name, can be null
* @param tableName table name or pattern, optionally fully qualified in the form schema.tableName
* @return ResultSet containing the table and view metadata
*
* @throws SQLException
*/
public ResultSet getTableAndViewMetadata(
Connection connection,
String schema,
String tableName
) throws SQLException {
return connection.getMetaData().getTables(
getCatalog(connection, schema),
schema,
tableName,
METADATA_TABLE_VIEW_TYPE
);
}
/**
* Wrapper for {@link java.sql.DatabaseMetaData#getTables(String, String, String, String[])}
*
* @param connection open JDBC connection
* @param schema schema name, can be null
* @param tableName table name or pattern, optionally fully qualified in the form schema.tableName
* @return ResultSet containing the table metadata
*
* @throws SQLException
*/
public ResultSet getTableMetadata(Connection connection, String schema, String tableName) throws SQLException {
DatabaseMetaData metadata = connection.getMetaData();
return metadata.getTables(getCatalog(connection, schema), schema, tableName, METADATA_TABLE_TYPE);
}
/**
* Wrapper for {@link java.sql.DatabaseMetaData#getPrimaryKeys(String, String, String)}
*
* @param connection An open JDBC connection
* @param tableName table name that is optionally fully qualified with a schema in the form schema.tableName
* @return List of primary key column names for a table
*
* @throws SQLException
*/
public List<String> getPrimaryKeys(Connection connection, String schema, String tableName) throws SQLException {
String table = tableName;
DatabaseMetaData metadata = connection.getMetaData();
List<String> keys = new ArrayList<>();
try (ResultSet result = metadata.getPrimaryKeys(getCatalog(connection, schema), schema, table)) {
while (result.next()) {
keys.add(result.getString(COLUMN_NAME));
}
}
return keys;
}
public static Map<String, String> getMinimumOffsetValues(
DatabaseVendor vendor,
Connection connection,
String schema,
String tableName,
QuoteChar quoteChar,
Collection<String> offsetColumnNames
) throws SQLException {
return getMinMaxOffsetValueHelper(
MIN_OFFSET_VALUE_QUERY,
vendor,
connection,
schema,
tableName,
quoteChar,
offsetColumnNames
);
}
public static Map<String, String> getMaximumOffsetValues(
DatabaseVendor vendor,
Connection connection,
String schema,
String tableName,
QuoteChar quoteChar,
Collection<String> offsetColumnNames
) throws SQLException {
return getMinMaxOffsetValueHelper(
MAX_OFFSET_VALUE_QUERY,
vendor,
connection,
schema,
tableName,
quoteChar,
offsetColumnNames
);
}
private static long getEpochMillisFromSqlDate(java.sql.Date date) {
return date.toLocalDate().atStartOfDay(ZoneOffset.UTC).toInstant().toEpochMilli();
}
private static long getEpochMillisFromSqlTime(java.sql.Time time) {
return time.toLocalTime().toSecondOfDay() * 1000L;
}
private static Map<String, String> getMinMaxOffsetValueHelper(
String minMaxQuery,
DatabaseVendor vendor,
Connection connection,
String schema,
String tableName,
QuoteChar quoteChar,
Collection<String> offsetColumnNames
) throws SQLException {
Map<String, String> minMaxOffsetValues = new HashMap<>();
final String qualifiedTableName = TableContextUtil.getQuotedQualifiedTableName(
schema,
tableName,
quoteChar.getQuoteCharacter()
);
for (String offsetColumn : offsetColumnNames) {
final String qualifiedOffsetColumn = TableContextUtil.getQuotedObjectName(offsetColumn, quoteChar.getQuoteCharacter());
final String minMaxOffsetQuery = String.format(minMaxQuery, qualifiedOffsetColumn, qualifiedTableName);
LOG.debug("Issuing {} offset query: {}",
minMaxQuery.equals(MIN_OFFSET_VALUE_QUERY) ? "MINIMUM" : "MAXIMUM", minMaxOffsetQuery);
try (
Statement st = connection.createStatement();
ResultSet rs = st.executeQuery(minMaxOffsetQuery)
) {
if (rs.next()) {
String minMaxValue = null;
final int colType = rs.getMetaData().getColumnType(MIN_MAX_OFFSET_VALUE_QUERY_RESULT_SET_INDEX);
switch (vendor) {
case ORACLE:
if(TableContextUtil.VENDOR_PARTITIONABLE_TYPES.get(DatabaseVendor.ORACLE).contains(colType)) {
switch (colType) {
case TableContextUtil.TYPE_ORACLE_TIMESTAMP_WITH_LOCAL_TIME_ZONE:
case TableContextUtil.TYPE_ORACLE_TIMESTAMP_WITH_TIME_ZONE:
OffsetDateTime offsetDateTime = rs.getObject(MIN_MAX_OFFSET_VALUE_QUERY_RESULT_SET_INDEX, OffsetDateTime.class);
if(offsetDateTime != null) {
minMaxValue = offsetDateTime.toZonedDateTime().format(DateTimeFormatter.ISO_OFFSET_DATE_TIME);
}
break;
default:
throw new IllegalStateException(Utils.format("Unexpected type: {}", colType));
}
}
break;
case SQL_SERVER:
if(TableContextUtil.VENDOR_PARTITIONABLE_TYPES.get(DatabaseVendor.SQL_SERVER).contains(colType)) {
if (colType == TableContextUtil.TYPE_SQL_SERVER_DATETIMEOFFSET) {
DateTimeOffset dateTimeOffset = rs.getObject(MIN_MAX_OFFSET_VALUE_QUERY_RESULT_SET_INDEX, DateTimeOffset.class);
if (dateTimeOffset != null) {
minMaxValue = dateTimeOffset.getOffsetDateTime().toZonedDateTime().format(DateTimeFormatter.ISO_OFFSET_DATE_TIME);
}
}
}
break;
}
if(minMaxValue == null) {
switch (colType) {
case Types.DATE:
java.sql.Date date = rs.getDate(MIN_MAX_OFFSET_VALUE_QUERY_RESULT_SET_INDEX);
if (date != null) {
minMaxValue = String.valueOf(
getEpochMillisFromSqlDate(date)
);
}
break;
case Types.TIME:
java.sql.Time time = rs.getTime(MIN_MAX_OFFSET_VALUE_QUERY_RESULT_SET_INDEX);
if (time != null) {
minMaxValue = String.valueOf(
getEpochMillisFromSqlTime(time)
);
}
break;
case Types.TIMESTAMP:
Timestamp timestamp = rs.getTimestamp(MIN_MAX_OFFSET_VALUE_QUERY_RESULT_SET_INDEX);
if (timestamp != null) {
final Instant instant = timestamp.toInstant();
minMaxValue = String.valueOf(instant.toEpochMilli());
}
break;
default:
minMaxValue = rs.getString(MIN_MAX_OFFSET_VALUE_QUERY_RESULT_SET_INDEX);
break;
}
}
if (minMaxValue != null) {
minMaxOffsetValues.put(offsetColumn, minMaxValue);
}
} else {
LOG.warn("Unable to get minimum offset value using query {}; result set had no rows", minMaxOffsetQuery);
}
}
}
return minMaxOffsetValues;
}
/**
* Wrapper for {@link java.sql.DatabaseMetaData#getImportedKeys(String, String, String)}
*
* @param connection An open JDBC connection
* @param tableName table name that is optionally fully qualified with a schema in the form schema.tableName
* @return List of Table Names whose primary key are referred as foreign key by the table tableName
*
* @throws SQLException
*/
public Set<String> getReferredTables(Connection connection, String schema, String tableName) throws SQLException {
DatabaseMetaData metadata = connection.getMetaData();
ResultSet result = metadata.getImportedKeys(getCatalog(connection, schema), schema, tableName);
Set<String> referredTables = new HashSet<>();
while (result.next()) {
referredTables.add(result.getString(PK_TABLE_NAME));
}
return referredTables;
}
public void setColumnSpecificHeaders(
Record record,
Set<String> knownTableNames,
ResultSetMetaData metaData,
String jdbcNameSpacePrefix
) throws SQLException {
Record.Header header = record.getHeader();
Set<String> tableNames = new HashSet<>();
for (int i=1; i<=metaData.getColumnCount(); i++) {
header.setAttribute(jdbcNameSpacePrefix + metaData.getColumnLabel(i) + ".jdbcType", String.valueOf(metaData.getColumnType(i)));
// Additional headers per various types
switch(metaData.getColumnType(i)) {
case Types.DECIMAL:
case Types.NUMERIC:
header.setAttribute(jdbcNameSpacePrefix + metaData.getColumnLabel(i) + ".scale", String.valueOf(metaData.getScale(i)));
header.setAttribute(jdbcNameSpacePrefix + metaData.getColumnLabel(i) + ".precision", String.valueOf(metaData.getPrecision(i)));
break;
}
String tableName = metaData.getTableName(i);
// Store the column's table name (if not empty)
if (StringUtils.isNotEmpty(tableName)) {
tableNames.add(tableName);
}
}
if (tableNames.isEmpty()) {
tableNames.addAll(knownTableNames);
}
header.setAttribute(jdbcNameSpacePrefix + "tables", Joiner.on(",").join(tableNames));
}
private String getClobString(Clob data, int maxClobSize) throws IOException, SQLException {
if (data == null) {
return null;
}
StringBuilder sb = new StringBuilder();
int bufLen = 1024;
char[] cbuf = new char[bufLen];
// Read up to max clob length
long maxRemaining = maxClobSize;
int count;
try(Reader r = data.getCharacterStream()) {
while ((count = r.read(cbuf)) > -1 && maxRemaining > 0) {
// If c is more then the remaining chars we want to read, read only as many are available
if (count > maxRemaining) {
count = (int) maxRemaining;
}
sb.append(cbuf, 0, count);
// decrement available according to the number of chars we've read
maxRemaining -= count;
}
}
return sb.toString();
}
private byte[] getBlobBytes(Blob data, int maxBlobSize) throws IOException, SQLException {
if (data == null) {
return null;
}
ByteArrayOutputStream os = new ByteArrayOutputStream();
int bufLen = 1024;
byte[] buf = new byte[bufLen];
// Read up to max blob length
long maxRemaining = maxBlobSize;
int count;
try(InputStream is = data.getBinaryStream()) {
while ((count = is.read(buf)) > -1 && maxRemaining > 0) {
// If count is more then the remaining bytes we want to read, read only as many are available
if (count > maxRemaining) {
count = (int) maxRemaining;
}
os.write(buf, 0, count);
// decrement available according to the number of bytes we've read
maxRemaining -= count;
}
}
return os.toByteArray();
}
public Field resultToField(
ResultSetMetaData md,
ResultSet rs,
int columnIndex,
int maxClobSize,
int maxBlobSize,
UnknownTypeAction unknownTypeAction
) throws SQLException, IOException, StageException {
return resultToField(
md,
rs,
columnIndex,
maxClobSize,
maxBlobSize,
DataType.USE_COLUMN_TYPE,
unknownTypeAction,
false,
DatabaseVendor.UNKNOWN
);
}
public Field resultToField(
ResultSetMetaData md,
ResultSet rs,
int columnIndex,
int maxClobSize,
int maxBlobSize,
DataType userSpecifiedType,
UnknownTypeAction unknownTypeAction,
boolean timestampToString,
DatabaseVendor vendor
) throws SQLException, IOException, StageException {
Field field;
if (userSpecifiedType != DataType.USE_COLUMN_TYPE) {
// If user specifies the data type, overwrite the column type returned by database.
field = Field.create(Field.Type.valueOf(userSpecifiedType.getLabel()), rs.getObject(columnIndex));
} else {
// Firstly resolve some vendor specific types - we are careful in case that someone will be clashing
if(vendor == DatabaseVendor.ORACLE) {
switch (md.getColumnType(columnIndex)) {
case TableContextUtil.TYPE_ORACLE_BINARY_FLOAT:
float floatValue = rs.getFloat(columnIndex);
return Field.create(Field.Type.FLOAT, rs.wasNull() ? null : floatValue);
case TableContextUtil.TYPE_ORACLE_BINARY_DOUBLE:
double doubleValue = rs.getDouble(columnIndex);
return Field.create(Field.Type.DOUBLE, rs.wasNull() ? null : doubleValue);
case TableContextUtil.TYPE_ORACLE_TIMESTAMP_WITH_TIME_ZONE:
case TableContextUtil.TYPE_ORACLE_TIMESTAMP_WITH_LOCAL_TIME_ZONE:
OffsetDateTime offsetDateTime = rs.getObject(columnIndex, OffsetDateTime.class);
if (offsetDateTime == null) {
return timestampToString ?
Field.create(Field.Type.STRING, null) :
Field.create(Field.Type.ZONED_DATETIME, null);
}
if (timestampToString) {
return Field.create(Field.Type.STRING, offsetDateTime.toZonedDateTime().toString());
}
// Zoned Datetime can handle high precision
return Field.create(Field.Type.ZONED_DATETIME, offsetDateTime.toZonedDateTime());
case Types.SQLXML:
SQLXML xml = rs.getSQLXML(columnIndex);
return Field.create(Field.Type.STRING, xml == null ? null : xml.getString());
}
} else if (vendor == DatabaseVendor.SQL_SERVER) {
if (md.getColumnType(columnIndex) == TableContextUtil.TYPE_SQL_SERVER_DATETIMEOFFSET) {
DateTimeOffset dateTimeOffset = rs.getObject(columnIndex, DateTimeOffset.class);
if (dateTimeOffset == null) {
return timestampToString ?
Field.create(Field.Type.STRING, null) :
Field.create(Field.Type.ZONED_DATETIME, null);
}
if (timestampToString) {
return Field.create(Field.Type.STRING, dateTimeOffset.toString());
}
return Field.create(Field.Type.ZONED_DATETIME, dateTimeOffset.getOffsetDateTime().toZonedDateTime());
}
} else if (vendor.isOneOf(DatabaseVendor.MYSQL, DatabaseVendor.MARIADB)) {
// For MySQL we have to deal with unsigned types since the JDBC driver won't transfer them properly for us
// Tinyint and mediumint (unsigned) are alright since MySQL will auto expand them for purpose of JDBC.
int columnType = md.getColumnType(columnIndex);
String columnTypeName = md.getColumnTypeName(columnIndex);
if(columnType == Types.SMALLINT && columnTypeName.endsWith("UNSIGNED")) {
int value = rs.getInt(columnIndex);
return Field.create(Field.Type.INTEGER, rs.wasNull() ? null : value);
}
if(columnType == Types.INTEGER && columnTypeName.endsWith("UNSIGNED")) {
long value = rs.getLong(columnIndex);
return Field.create(Field.Type.LONG, rs.wasNull() ? null : value);
}
if(columnType == Types.BIGINT && columnTypeName.endsWith("UNSIGNED")) {
BigDecimal value = rs.getBigDecimal(columnIndex);
return Field.create(Field.Type.DECIMAL, rs.wasNull() ? null : value);
}
}
// All types as of JDBC 2.0 are here:
// https://docs.oracle.com/javase/8/docs/api/constant-values.html#java.sql.Types.ARRAY
// Good source of recommended mappings is here:
// http://www.cs.mun.ca/java-api-1.5/guide/jdbc/getstart/mapping.html
switch (md.getColumnType(columnIndex)) {
case Types.BIGINT:
field = Field.create(Field.Type.LONG, rs.getObject(columnIndex));
break;
case Types.BINARY:
case Types.LONGVARBINARY:
case Types.VARBINARY:
field = Field.create(Field.Type.BYTE_ARRAY, rs.getBytes(columnIndex));
break;
case Types.BIT:
case Types.BOOLEAN:
field = Field.create(Field.Type.BOOLEAN, rs.getObject(columnIndex));
break;
case Types.CHAR:
case Types.LONGNVARCHAR:
case Types.LONGVARCHAR:
case Types.NCHAR:
case Types.NVARCHAR:
case Types.VARCHAR:
field = Field.create(Field.Type.STRING, rs.getObject(columnIndex));
break;
case Types.CLOB:
case Types.NCLOB:
field = Field.create(Field.Type.STRING, getClobString(rs.getClob(columnIndex), maxClobSize));
break;
case Types.BLOB:
field = Field.create(Field.Type.BYTE_ARRAY, getBlobBytes(rs.getBlob(columnIndex), maxBlobSize));
break;
case Types.DATE:
field = Field.create(Field.Type.DATE, rs.getDate(columnIndex));
break;
case Types.DECIMAL:
case Types.NUMERIC:
field = Field.create(Field.Type.DECIMAL, rs.getBigDecimal(columnIndex));
field.setAttribute(HeaderAttributeConstants.ATTR_SCALE, String.valueOf(rs.getMetaData().getScale(columnIndex)));
field.setAttribute(HeaderAttributeConstants.ATTR_PRECISION, String.valueOf(rs.getMetaData().getPrecision(columnIndex)));
break;
case Types.DOUBLE:
field = Field.create(Field.Type.DOUBLE, rs.getObject(columnIndex));
break;
case Types.FLOAT:
case Types.REAL:
field = Field.create(Field.Type.FLOAT, rs.getObject(columnIndex));
break;
case Types.INTEGER:
field = Field.create(Field.Type.INTEGER, rs.getObject(columnIndex));
break;
case Types.ROWID:
field = Field.create(Field.Type.STRING, rs.getRowId(columnIndex).toString());
break;
case Types.SMALLINT:
case Types.TINYINT:
field = Field.create(Field.Type.SHORT, rs.getObject(columnIndex));
break;
case Types.TIME:
field = Field.create(Field.Type.TIME, rs.getObject(columnIndex));
break;
case Types.TIMESTAMP:
final Timestamp timestamp = rs.getTimestamp(columnIndex);
if (timestampToString) {
field = Field.create(Field.Type.STRING, timestamp == null ? null : timestamp.toString());
} else {
field = Field.create(Field.Type.DATETIME, timestamp);
if (timestamp != null) {
setNanosecondsinAttribute(timestamp.getNanos(), field);
}
}
break;
// Ugly hack until we can support LocalTime, LocalDate, LocalDateTime, etc.
case Types.TIME_WITH_TIMEZONE:
OffsetTime offsetTime = rs.getObject(columnIndex, OffsetTime.class);
field = Field.create(Field.Type.TIME, Date.from(offsetTime.atDate(LocalDate.MIN).toInstant()));
break;
case Types.TIMESTAMP_WITH_TIMEZONE:
OffsetDateTime offsetDateTime = rs.getObject(columnIndex, OffsetDateTime.class);
field = Field.create(Field.Type.ZONED_DATETIME, offsetDateTime.toZonedDateTime());
break;
//case Types.REF_CURSOR: // JDK8 only
case Types.SQLXML:
case Types.STRUCT:
case Types.ARRAY:
case Types.DATALINK:
case Types.DISTINCT:
case Types.JAVA_OBJECT:
case Types.NULL:
case Types.OTHER:
case Types.REF:
default:
if(unknownTypeAction == null) {
return null;
}
switch (unknownTypeAction) {
case STOP_PIPELINE:
throw new StageException(JdbcErrors.JDBC_37, md.getColumnType(columnIndex), md.getColumnLabel(columnIndex));
case CONVERT_TO_STRING:
Object value = rs.getObject(columnIndex);
if(value != null) {
field = Field.create(Field.Type.STRING, rs.getObject(columnIndex).toString());
} else {
field = Field.create(Field.Type.STRING, null);
}
break;
default:
throw new IllegalStateException("Unknown action: " + unknownTypeAction);
}
}
}
return field;
}
public static void setNanosecondsinAttribute(int nanoseconds, Field field) {
final long actualNanos = nanoseconds % NANOS_TO_MILLIS_ADJUSTMENT;
if (actualNanos > 0) {
field.setAttribute(FIELD_ATTRIBUTE_NANOSECONDS, String.valueOf(actualNanos));
}
}
public LinkedHashMap<String, Field> resultSetToFields(
ResultSet rs,
int maxClobSize,
int maxBlobSize,
Map<String, DataType> columnsToTypes,
ErrorRecordHandler errorRecordHandler,
UnknownTypeAction unknownTypeAction
) throws SQLException, StageException {
return resultSetToFields(
rs,
maxClobSize,
maxBlobSize,
columnsToTypes,
errorRecordHandler,
unknownTypeAction,
null,
false,
DatabaseVendor.UNKNOWN
);
}
public LinkedHashMap<String, Field> resultSetToFields(
ResultSet rs,
CommonSourceConfigBean commonSourceBean,
ErrorRecordHandler errorRecordHandler,
UnknownTypeAction unknownTypeAction,
DatabaseVendor vendor
) throws SQLException, StageException {
return resultSetToFields(
rs,
commonSourceBean.maxClobSize,
commonSourceBean.maxBlobSize,
Collections.emptyMap(),
errorRecordHandler,
unknownTypeAction,
null,
commonSourceBean.convertTimestampToString,
vendor
);
}
public LinkedHashMap<String, Field> resultSetToFields(
ResultSet rs,
CommonSourceConfigBean commonSourceBean,
ErrorRecordHandler errorRecordHandler,
UnknownTypeAction unknownTypeAction,
Set<String> recordHeader,
DatabaseVendor vendor
) throws SQLException, StageException {
return resultSetToFields(
rs,
commonSourceBean.maxClobSize,
commonSourceBean.maxBlobSize,
Collections.emptyMap(),
errorRecordHandler,
unknownTypeAction,
recordHeader,
commonSourceBean.convertTimestampToString,
vendor
);
}
public LinkedHashMap<String, Field> resultSetToFields(
ResultSet rs,
int maxClobSize,
int maxBlobSize,
Map<String, DataType> columnsToTypes,
ErrorRecordHandler errorRecordHandler,
UnknownTypeAction unknownTypeAction,
Set<String> recordHeader,
boolean timestampToString,
DatabaseVendor vendor
) throws SQLException, StageException {
ResultSetMetaData md = rs.getMetaData();
LinkedHashMap<String, Field> fields = new LinkedHashMap<>(md.getColumnCount());
for (int i = 1; i <= md.getColumnCount(); i++) {
try {
if (recordHeader == null || !recordHeader.contains(md.getColumnName(i))) {
DataType dataType = columnsToTypes.get(md.getColumnName(i));
Field field = resultToField(
md,
rs,
i,
maxClobSize,
maxBlobSize,
dataType == null ? DataType.USE_COLUMN_TYPE : dataType,
unknownTypeAction,
timestampToString,
vendor
);
fields.put(md.getColumnLabel(i), field);
}
} catch (IOException|SQLException e) {
LOG.debug("Can't read from JDBC: {}", e.getMessage(), e);
errorRecordHandler.onError(JdbcErrors.JDBC_03, md.getColumnName(i), md.getColumnType(i), rs.getObject(i), e);
}
}
return fields;
}
private HikariConfig createDataSourceConfig(
HikariPoolConfigBean hikariConfigBean,
boolean autoCommit,
boolean readOnly
) throws StageException {
HikariConfig config = new HikariConfig();
config.setJdbcUrl(hikariConfigBean.getConnectionString());
if (hikariConfigBean.useCredentials()){
config.setUsername(hikariConfigBean.getUsername().get());
config.setPassword(hikariConfigBean.getPassword().get());
}
config.setAutoCommit(autoCommit);
config.setReadOnly(readOnly);
config.setMaximumPoolSize(hikariConfigBean.maximumPoolSize);
config.setMinimumIdle(hikariConfigBean.minIdle);
config.setConnectionTimeout(hikariConfigBean.connectionTimeout * MILLISECONDS);
config.setIdleTimeout(hikariConfigBean.idleTimeout * MILLISECONDS);
config.setMaxLifetime(hikariConfigBean.maxLifetime * MILLISECONDS);
if (!StringUtils.isEmpty(hikariConfigBean.driverClassName)) {
config.setDriverClassName(hikariConfigBean.driverClassName);
}
if (!StringUtils.isEmpty(hikariConfigBean.connectionTestQuery)) {
config.setConnectionTestQuery(hikariConfigBean.connectionTestQuery);
}
if(hikariConfigBean.transactionIsolation != TransactionIsolationLevel.DEFAULT) {
config.setTransactionIsolation(hikariConfigBean.transactionIsolation.name());
}
if(StringUtils.isNotEmpty(hikariConfigBean.initialQuery)) {
config.setConnectionInitSql(hikariConfigBean.initialQuery);
}
config.setDataSourceProperties(hikariConfigBean.getDriverProperties());
return config;
}
public HikariDataSource createDataSourceForWrite(
HikariPoolConfigBean hikariConfigBean,
String schemaNameTemplate,
String tableNameTemplate,
boolean caseSensitive,
List<Stage.ConfigIssue> issues,
List<JdbcFieldColumnParamMapping> customMappings,
Stage.Context context,
boolean tableAutoCreate
) throws SQLException, StageException {
HikariDataSource dataSource = new HikariDataSource(createDataSourceConfig(hikariConfigBean,
hikariConfigBean.isAutoCommit(), false));
// Can only validate schema+table configuration when the user specified plain constant values and table auto
// create is not set
if (isPlainString(schemaNameTemplate) && isPlainString(tableNameTemplate) && !tableAutoCreate) {
try (
Connection connection = dataSource.getConnection();
ResultSet res = getTableMetadata(connection, schemaNameTemplate, tableNameTemplate);
) {
if (!res.next()) {
issues.add(context.createConfigIssue(Groups.JDBC.name(), TABLE_NAME, JdbcErrors.JDBC_16, tableNameTemplate));
} else {
try(ResultSet columns = getColumnMetadata(connection, schemaNameTemplate, tableNameTemplate)) {
Set<String> columnNames = new HashSet<>();
while (columns.next()) {
columnNames.add(columns.getString(4));
}
for (JdbcFieldColumnParamMapping customMapping : customMappings) {
if (customMapping.columnName.isEmpty()) {
issues.add(context.createConfigIssue(Groups.JDBC.name(),
CUSTOM_MAPPINGS,
JdbcErrors.JDBC_59
));
}
if (!columnNames.contains(customMapping.columnName)) {
issues.add(context.createConfigIssue(Groups.JDBC.name(),
CUSTOM_MAPPINGS,
JdbcErrors.JDBC_07,
customMapping.field,
customMapping.columnName
));
}
}
}
}
}
}
return dataSource;
}
public HikariDataSource createDataSourceForWrite(
HikariPoolConfigBean hikariConfigBean,
String schemaNameTemplate,
String tableNameTemplate,
boolean caseSensitive,
List<Stage.ConfigIssue> issues,
List<JdbcFieldColumnParamMapping> customMappings,
Stage.Context context
) throws SQLException, StageException {
return createDataSourceForWrite(
hikariConfigBean,
schemaNameTemplate,
tableNameTemplate,
caseSensitive,
issues,
customMappings,
context,
false
);
}
public HikariDataSource createDataSourceForRead(
HikariPoolConfigBean hikariConfigBean
) throws StageException {
HikariDataSource dataSource;
try {
dataSource = new HikariDataSource(createDataSourceConfig(
hikariConfigBean, hikariConfigBean.isAutoCommit(),
hikariConfigBean.readOnly
));
} catch (RuntimeException e) {
LOG.error(JdbcErrors.JDBC_06.getMessage(), e);
throw new StageException(JdbcErrors.JDBC_06, e.toString(), e);
}
return dataSource;
}
public void closeQuietly(AutoCloseable c) {
try {
if (null != c) {
c.close();
}
} catch (Exception ignored) {
}
}
/**
* Write records to potentially different schemas and tables using EL expressions, and handle errors.
* @param batch batch of SDC records
* @param schemaTableClassifier classifier to group records according to the schema and table names, resolving the
* EL expressions involved.
* @param recordWriters JDBC record writer cache
* @param errorRecordHandler error record handler
* @param perRecord indicate record or batch update
* @param tableCreator handler which creates the table if it does not exist yet
* @throws StageException
*/
public void write(
Batch batch,
SchemaTableClassifier schemaTableClassifier,
LoadingCache<SchemaAndTable, JdbcRecordWriter> recordWriters,
ErrorRecordHandler errorRecordHandler,
boolean perRecord,
JdbcTableCreator tableCreator
) throws StageException {
Multimap<SchemaAndTable, Record> partitions = schemaTableClassifier.classify(batch);
for (SchemaAndTable key : partitions.keySet()) {
tableCreator.create(key.getSchemaName(), key.getTableName());
Iterator<Record> recordIterator = partitions.get(key).iterator();
write(recordIterator, key, recordWriters, errorRecordHandler, perRecord);
}
}
/**
* Write records to potentially different schemas and tables using EL expressions, and handle errors.
*
* @param batch batch of SDC records
* @param schemaTableClassifier classifier to group records according to the schema and table names, resolving the
* EL expressions involved.
* @param recordWriters JDBC record writer cache
* @param errorRecordHandler error record handler
* @param perRecord indicate record or batch update
* @throws StageException
*/
public void write(
Batch batch,
SchemaTableClassifier schemaTableClassifier,
LoadingCache<SchemaAndTable, JdbcRecordWriter> recordWriters,
ErrorRecordHandler errorRecordHandler,
boolean perRecord
) throws StageException {
Multimap<SchemaAndTable, Record> partitions = schemaTableClassifier.classify(batch);
for (SchemaAndTable key : partitions.keySet()) {
Iterator<Record> recordIterator = partitions.get(key).iterator();
write(recordIterator, key, recordWriters, errorRecordHandler, perRecord);
}
}
/**
* Write records to the evaluated tables and handle errors.
*
* @param batch batch of SDC records
* @param tableNameEval table name EL eval
* @param tableNameVars table name EL vars
* @param tableNameTemplate table name template
* @param recordWriters JDBC record writer cache
* @param errorRecordHandler error record handler
* @param perRecord indicate record or batch update
* @throws StageException
*/
public void write(
Batch batch,
ELEval tableNameEval,
ELVars tableNameVars,
String tableNameTemplate,
LoadingCache<String, JdbcRecordWriter> recordWriters,
ErrorRecordHandler errorRecordHandler,
boolean perRecord
) throws StageException {
Multimap<String, Record> partitions = ELUtils.partitionBatchByExpression(
tableNameEval,
tableNameVars,
tableNameTemplate,
batch
);
for (String tableName : partitions.keySet()) {
Iterator<Record> recordIterator = partitions.get(tableName).iterator();
write(recordIterator, tableName, recordWriters, errorRecordHandler, perRecord);
}
}
/**
* Write records to a JDBC destination using the recordWriter specified by key, and handle errors
*
* @param recordIterator iterator of SDC records
* @param key key to select the recordWriter
* @param recordWriters JDBC record writer cache
* @param errorRecordHandler error record handler
* @param perRecord indicate record or batch update
* @throws StageException
*/
public <T> void write(
Iterator<Record> recordIterator,
T key,
LoadingCache<T, JdbcRecordWriter> recordWriters,
ErrorRecordHandler errorRecordHandler,
boolean perRecord
) throws StageException {
final JdbcRecordWriter jdbcRecordWriter;
try {
jdbcRecordWriter = recordWriters.getUnchecked(key);
} catch (UncheckedExecutionException ex) {
final Throwable throwable = ex.getCause();
final ErrorCode errorCode;
final Object[] messageParams;
if (throwable instanceof StageException) {
StageException stageEx = (StageException) ex.getCause();
errorCode = stageEx.getErrorCode();
messageParams = stageEx.getParams();
} else {
errorCode = JdbcErrors.JDBC_301;
messageParams = new Object[] {ex.getMessage(), ex.getCause()};
}
// Failed to create RecordWriter, report all as error records.
while (recordIterator.hasNext()) {
Record record = recordIterator.next();
errorRecordHandler.onError(new OnRecordErrorException(record, errorCode, messageParams));
}
return;
}
List<OnRecordErrorException> errors = perRecord
? jdbcRecordWriter.writePerRecord(recordIterator)
: jdbcRecordWriter.writeBatch(recordIterator);
for (OnRecordErrorException error : errors) {
errorRecordHandler.onError(error);
}
}
/**
* Determines whether the actualSqlType is one of the sqlTypes list
* @param actualSqlType the actual sql type
* @param sqlTypes arbitrary list of sql types
* @return true if actual Sql Type is one of the sql Types else false.
*/
public boolean isSqlTypeOneOf(int actualSqlType, int... sqlTypes) {
for (int sqlType : sqlTypes) {
if (sqlType == actualSqlType) {
return true;
}
}
return false;
}
public String generateQuery(
int opCode,
String tableName,
List<String> primaryKeys,
List<String> primaryKeyParams,
Map<String, String> columns,
int numRecords,
boolean caseSensitive,
boolean multiRow,
Record record
) throws OnRecordErrorException {
String query;
String valuePlaceholder;
String valuePlaceholders;
if(opCode != OperationType.INSERT_CODE && primaryKeys.isEmpty()){
LOG.error("Primary key columns are missing in records: {}", primaryKeys);
throw new OnRecordErrorException(record, JdbcErrors.JDBC_62, tableName);
}
if (!caseSensitive) {
switch (opCode) {
case OperationType.INSERT_CODE:
valuePlaceholder = String.format("(%s)", joiner.join(columns.values()));
valuePlaceholders = org.apache.commons.lang3.StringUtils.repeat(valuePlaceholder, ", ", numRecords);
query = String.format(
"INSERT INTO %s (%s) VALUES %s",
tableName,
joiner.join(columns.keySet()),
valuePlaceholders
);
break;
case OperationType.DELETE_CODE:
valuePlaceholder = String.format("(%s)", joiner.join(primaryKeyParams));
valuePlaceholders = org.apache.commons.lang3.StringUtils.repeat(valuePlaceholder, ", ", numRecords);
if (multiRow) {
query = String.format(
"DELETE FROM %s WHERE (%s) IN (%s)",
tableName,
joiner.join(primaryKeys),
valuePlaceholders
);
} else {
query = String.format(
"DELETE FROM %s WHERE %s = ?",
tableName,
joinerWhereClause.join(primaryKeys)
);
}
break;
case OperationType.UPDATE_CODE:
query = String.format(
"UPDATE %s SET %s = ? WHERE %s = ?",
tableName,
joinerColumn.join(columns.keySet()),
joinerWhereClause.join(primaryKeys)
);
break;
default:
// Should be checked earlier. Shouldn't reach here
LOG.error("Unsupported Operation code: {}}", opCode);
throw new OnRecordErrorException(record, JdbcErrors.JDBC_70, opCode);
}
} else {
switch (opCode) {
case OperationType.INSERT_CODE:
valuePlaceholder = String.format("(%s)", joiner.join(columns.values()));
valuePlaceholders = org.apache.commons.lang3.StringUtils.repeat(valuePlaceholder, ", ", numRecords);
query = String.format(
"INSERT INTO %s (\"%s\") VALUES %s",
tableName,
joinerWithQuote.join(columns.keySet()),
valuePlaceholders
);
break;
case OperationType.DELETE_CODE:
valuePlaceholder = String.format("(%s)", joiner.join(primaryKeyParams));
valuePlaceholders = org.apache.commons.lang3.StringUtils.repeat(valuePlaceholder, ", ", numRecords);
if (multiRow) {
query = String.format(
"DELETE FROM %s WHERE (\"%s\") IN (%s)",
tableName,
joinerWithQuote.join(primaryKeys),
valuePlaceholders
);
} else {
query = String.format(
"DELETE FROM %s WHERE \"%s\" = ?",
tableName,
joinerWhereClauseWitheQuote.join(primaryKeys)
);
}
break;
case OperationType.UPDATE_CODE:
query = String.format(
"UPDATE %s SET \"%s\" = ? WHERE \"%s\" = ?",
tableName,
joinerColumnWithQuote.join(columns.keySet()),
joinerWhereClauseWitheQuote.join(primaryKeys)
);
break;
default:
// Should be checked earlier. Shouldn't reach here
LOG.error("Unsupported Operation code: {}}", opCode);
throw new OnRecordErrorException(record, JdbcErrors.JDBC_70, opCode);
}
}
return query;
}
public PreparedStatement getPreparedStatement(
List<JdbcFieldColumnMapping> generatedColumnMappings,
String query,
Connection connection
) throws SQLException {
PreparedStatement statement;
if (generatedColumnMappings != null) {
String[] generatedColumns = new String[generatedColumnMappings.size()];
for (int i = 0; i < generatedColumnMappings.size(); i++) {
generatedColumns[i] = generatedColumnMappings.get(i).columnName;
}
statement = connection.prepareStatement(query, generatedColumns);
} else {
statement = connection.prepareStatement(query);
}
return statement;
}
public String logError(SQLException e) {
String formattedError = formatSqlException(e);
LOG.error(formattedError, e);
return formattedError;
}
/**
* Generates the no-more-data event
*/
public void generateNoMoreDataEvent(PushSource.Context context) {
LOG.info("No More data to process, Triggered No More Data Event");
BatchContext batchContext = context.startBatch();
NoMoreDataEvent.EVENT_CREATOR.create(context, batchContext).createAndSend();
context.processBatch(batchContext);
}
/**
* @return true if the value is an EL string
*/
public boolean isElString(String value) {
return value != null && value.startsWith("${");
}
/**
* @return true if the value is a not null string with no EL embedded
*/
public boolean isPlainString(String value) {
return value != null && !value.contains(EL_PREFIX);
}
public void logDatabaseAndDriverInfo(ConnectionManager connectionManager) throws SQLException {
DatabaseMetaData databaseMetaData = connectionManager.getConnection().getMetaData();
LOG.info("Database Product name: {}", databaseMetaData.getDatabaseProductName());
LOG.info("Database product version: {}", databaseMetaData.getDatabaseProductVersion());
LOG.info("Driver name: {}", databaseMetaData.getDriverName());
LOG.info("Driver version: {}", databaseMetaData.getDriverVersion());
}
}
| |
/**
* Copyright (C) 2012 KRM Associates, Inc. healtheme@krminc.com
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package com.krminc.phr.api.converter;
import com.krminc.phr.domain.Immunization;
import java.net.URI;
import java.util.Date;
import javax.xml.bind.annotation.XmlRootElement;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlTransient;
import javax.xml.bind.annotation.XmlAttribute;
import javax.ws.rs.core.UriBuilder;
import javax.persistence.EntityManager;
import javax.xml.bind.annotation.adapters.XmlJavaTypeAdapter;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.krminc.phr.api.converter.util.ConverterUtils;
import com.krminc.phr.domain.DataSource;
import com.krminc.phr.domain.HealthRecord;
/**
*
* @author dshaw
*/
@XmlRootElement(name = "immunization")
public class ImmunizationConverter {
final Logger logger = LoggerFactory.getLogger(ImmunizationConverter.class);
private Immunization entity;
private URI uri;
private int expandLevel;
public boolean hasError = false;
/** Creates a new instance of ImmunizationConverter */
public ImmunizationConverter() {
entity = new Immunization();
}
/**
* Creates a new instance of ImmunizationConverter.
*
* @param entity associated entity
* @param uri associated uri
* @param expandLevel indicates the number of levels the entity graph should be expanded@param isUriExtendable indicates whether the uri can be extended
*/
public ImmunizationConverter(Immunization entity, URI uri, int expandLevel, boolean isUriExtendable) {
this.entity = entity;
this.uri = (isUriExtendable) ? UriBuilder.fromUri(uri).path(entity.getImmunizationId() + "/").build() : uri;
this.expandLevel = expandLevel;
}
/**
* Creates a new instance of ImmunizationConverter.
*
* @param entity associated entity
* @param uri associated uri
* @param expandLevel indicates the number of levels the entity graph should be expanded
*/
public ImmunizationConverter(Immunization entity, URI uri, int expandLevel) {
this(entity, uri, expandLevel, false);
}
/**
* Getter for immunizationId.
*
* @return value for immunizationId
*/
@XmlElement
public Long getImmunizationId() {
return (expandLevel > 0) ? entity.getImmunizationId() : null;
}
/**
* Setter for immunizationId.
*
* @param value the value to set
*/
public void setImmunizationId(Long value) {
try {
entity.setImmunizationId(value);
}
catch(Exception ex) {
hasError = true;
}
}
/**
* Getter for dataSourceId.
*
* @return value for dataSourceId
*/
@XmlElement
public Long getDataSourceId() {
return (expandLevel > 0) ? entity.getDataSourceId() : null;
}
/**
* Setter for dataSourceId.
*
* @param value the value to set
*/
public void setDataSourceId(Long value) {
try {
if (value != 1) {
throw new Exception();
}
entity.setDataSourceId(value);
}
catch(Exception ex) {
hasError = true;
}
}
/**
* Getter for DateReceived.
*
* @return value for DateReceived
*/
@XmlElement
@XmlJavaTypeAdapter(DateAdapter.class)
public Date getDateReceived() {
return (expandLevel > 0) ? entity.getDateReceived() : null;
}
/**
* Setter for DateReceived.
*
* @param value the value to set
*/
public void setDateReceived(Date value) {
try {
entity.setDateReceived(value);
}
catch(Exception ex) {
hasError = true;
}
}
/**
* Getter for addedDate.
*
* @return value for addedDate
*/
@XmlElement
@XmlJavaTypeAdapter(DateAdapter.class)
public Date getDateAdded() {
return (expandLevel > 0) ? entity.getDateAdded() : null;
}
/**
* Getter for immunizationType.
*
* @return value for immunizationType
*/
@XmlElement
public String getImmunizationType() {
return (expandLevel > 0) ? entity.getImmunizationType() : null;
}
/**
* Setter for immunizationType.
*
* @param value the value to set
*/
public void setImmunizationType(String value) {
try {
value = ConverterUtils.prepareInput(value);
entity.setImmunizationType(value);
}
catch(Exception ex) {
hasError = true;
}
}
/**
* Getter for method.
*
* @return value for method
*/
@XmlElement
public Integer getMethod() {
return (expandLevel > 0) ? entity.getMethod() : null;
}
/**
* Setter for method.
*
* @param value the value to set
*/
public void setMethod(String value) {
try {
value = ConverterUtils.prepareInput(value);
this.setMethod(Integer.parseInt(value));
}
catch(Exception ex) {
hasError = true;
}
}
/**
* Setter for method.
*
* @param value the value to set
*/
public void setMethod(Integer value) {
try {
if (value >=0 && value <=4) {
entity.setMethod(value);
} else {
throw new Exception();
}
}
catch(Exception ex) {
hasError = true;
}
}
/**
* Getter for method.
*
* @return value for method
*/
@XmlElement
public String getFullMethod() {
return (expandLevel > 0) ? entity.getFullMethod() : null;
}
/**
* Getter for reaction.
*
* @return value for reaction
*/
@XmlElement
public String getReaction() {
return (expandLevel > 0) ? entity.getReaction() : null;
}
/**
* Setter for reaction.
*
* @param value the value to set
*/
public void setReaction(String value) {
try {
value = ConverterUtils.prepareInput(value);
entity.setReaction(value);
}
catch(Exception ex) {
hasError = true;
}
}
/**
* Getter for comments.
*
* @return value for comments
*/
@XmlElement
public String getComments() {
return (expandLevel > 0) ? entity.getComments() : null;
}
/**
* Setter for comments.
*
* @param value the value to set
*/
public void setComments(String value) {
try {
value = ConverterUtils.prepareInput(value);
entity.setComments(value);
}
catch(Exception ex) {
hasError = true;
}
}
/**
* Getter for HealthRecordId.
*
* @return value for HealthRecordId
*/
@XmlElement
public Long getHealthRecordId() {
return (expandLevel > 0) ? entity.getHealthRecordId() : null;
}
/**
* Setter for HealthRecordId.
*
* @param value the value to set
*/
public void setHealthRecordId(Long value) {
try {
entity.setHealthRecordId(value);
}
catch(Exception ex) {
hasError = true;
}
}
/**
* Getter for sourceId.
*
* @return value for sourceId
*/
@XmlElement
public Long getSourceId() {
return (expandLevel > 0) ? entity.getSourceId() : null;
}
/**
* Setter for sourceId.
*
* @param value the value to set
*/
public void setSourceId(Long value) {
try {
if (value != 1) {
throw new Exception();
}
entity.setSourceId(value);
}
catch(Exception ex) {
hasError = true;
}
}
/**
* Getter for mask.
*
* @return value for mask
*/
@XmlElement
public String getMask() {
return (expandLevel > 0) ? entity.getMask() : null;
}
/**
* Setter for mask.
*
* @param value the value to set
*/
public void setMask(String value) {
try {
if (ConverterUtils.isValidMask(value)) {
entity.setMask(value.trim());
} else {
throw new Exception();
}
}
catch(Exception ex) {
hasError = true;
}
}
/**
* Returns the URI associated with this converter.
*
* @return the uri
*/
@XmlAttribute
public URI getUri() {
return uri;
}
/**
* Sets the URI for this reference converter.
*
*/
public void setUri(URI uri) {
try {
this.uri = uri;
}
catch(Exception ex) {
hasError = true;
}
}
/**
* Returns the Immunization entity.
*
* @return an entity
*/
@XmlTransient
public Immunization getEntity() {
if (entity.getImmunizationId() == null) {
ImmunizationConverter converter = UriResolver.getInstance().resolve(ImmunizationConverter.class, uri);
if (converter != null) {
entity = converter.getEntity();
}
}
return entity;
}
/**
* Returns the resolved Immunization entity.
*
* @return an resolved entity
*/
public Immunization resolveEntity(EntityManager em) {
HealthRecord healthRecord = entity.getHealthRecord();
if (healthRecord != null) {
entity.setHealthRecord(em.getReference(HealthRecord.class, healthRecord.getHealthRecordId()));
}
return entity;
}
}
| |
/*
* Copyright (c) 2014-2015 VMware, Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed
* under the License is distributed on an "AS IS" BASIS, without warranties or
* conditions of any kind, EITHER EXPRESS OR IMPLIED. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.vmware.xenon.common;
import java.net.URI;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.UUID;
import java.util.concurrent.ConcurrentSkipListMap;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.function.Consumer;
import java.util.function.Function;
import org.junit.After;
import org.junit.Test;
import com.vmware.xenon.common.Service.Action;
import com.vmware.xenon.common.ServiceSubscriptionState.ServiceSubscriber;
import com.vmware.xenon.common.http.netty.NettyHttpServiceClient;
import com.vmware.xenon.common.test.MinimalTestServiceState;
import com.vmware.xenon.common.test.TestContext;
import com.vmware.xenon.common.test.VerificationHost;
import com.vmware.xenon.services.common.ExampleService;
import com.vmware.xenon.services.common.ExampleService.ExampleServiceState;
import com.vmware.xenon.services.common.MinimalTestService;
import com.vmware.xenon.services.common.NodeGroupService.NodeGroupConfig;
import com.vmware.xenon.services.common.ServiceUriPaths;
public class TestSubscriptions extends BasicTestCase {
private final int NODE_COUNT = 2;
public int serviceCount = 100;
public long updateCount = 10;
public long iterationCount = 0;
@Override
public void beforeHostStart(VerificationHost host) {
host.setMaintenanceIntervalMicros(TimeUnit.MILLISECONDS
.toMicros(VerificationHost.FAST_MAINT_INTERVAL_MILLIS));
}
@After
public void tearDown() {
this.host.tearDown();
this.host.tearDownInProcessPeers();
}
private void setUpPeers() throws Throwable {
this.host.setUpPeerHosts(this.NODE_COUNT);
this.host.joinNodesAndVerifyConvergence(this.NODE_COUNT);
}
@Test
public void remoteAndReliableSubscriptionsLoop() throws Throwable {
for (int i = 0; i < this.iterationCount; i++) {
tearDown();
this.host = createHost();
initializeHost(this.host);
beforeHostStart(this.host);
this.host.start();
remoteAndReliableSubscriptions();
}
}
@Test
public void remoteAndReliableSubscriptions() throws Throwable {
setUpPeers();
// pick one host to post to
VerificationHost serviceHost = this.host.getPeerHost();
URI factoryUri = UriUtils.buildUri(serviceHost, ExampleService.FACTORY_LINK);
this.host.waitForReplicatedFactoryServiceAvailable(factoryUri);
// test host to receive notifications
VerificationHost localHost = this.host;
int serviceCount = 1;
// create example service documents across all nodes
List<URI> exampleURIs = serviceHost.createExampleServices(serviceHost, serviceCount, null);
TestContext oneUseNotificationCtx = this.host.testCreate(1);
StatelessService notificationTarget = new StatelessService() {
@Override
public void handleRequest(Operation update) {
update.complete();
if (update.getAction().equals(Action.PATCH)) {
if (update.getUri().getHost() == null) {
oneUseNotificationCtx.fail(new IllegalStateException(
"Notification URI does not have host specified"));
return;
}
oneUseNotificationCtx.complete();
}
}
};
String[] ownerHostId = new String[1];
URI uri = exampleURIs.get(0);
URI subUri = UriUtils.buildUri(serviceHost.getUri(), uri.getPath());
TestContext subscribeCtx = this.host.testCreate(1);
Operation subscribe = Operation.createPost(subUri)
.setCompletion(subscribeCtx.getCompletion());
subscribe.setReferer(localHost.getReferer());
subscribe.forceRemote();
// replay state
serviceHost.startSubscriptionService(subscribe, notificationTarget, ServiceSubscriber
.create(false).setUsePublicUri(true));
this.host.testWait(subscribeCtx);
// do an update to cause a notification
TestContext updateCtx = this.host.testCreate(1);
ExampleServiceState body = new ExampleServiceState();
body.name = UUID.randomUUID().toString();
this.host.send(Operation.createPatch(uri).setBody(body).setCompletion((o, e) -> {
if (e != null) {
updateCtx.fail(e);
return;
}
ExampleServiceState rsp = o.getBody(ExampleServiceState.class);
ownerHostId[0] = rsp.documentOwner;
updateCtx.complete();
}));
this.host.testWait(updateCtx);
this.host.testWait(oneUseNotificationCtx);
// remove subscription
TestContext unSubscribeCtx = this.host.testCreate(1);
Operation unSubscribe = subscribe.clone()
.setCompletion(unSubscribeCtx.getCompletion())
.setAction(Action.DELETE);
serviceHost.stopSubscriptionService(unSubscribe,
notificationTarget.getUri());
this.host.testWait(unSubscribeCtx);
this.verifySubscriberCount(new URI[] { uri }, 0);
VerificationHost ownerHost = null;
// find the host that owns the example service and make sure we subscribe from the OTHER
// host (since we will stop the current owner)
for (VerificationHost h : this.host.getInProcessHostMap().values()) {
if (!h.getId().equals(ownerHostId[0])) {
serviceHost = h;
} else {
ownerHost = h;
}
}
this.host.log("Owner node: %s, subscriber node: %s (%s)", ownerHostId[0],
serviceHost.getId(), serviceHost.getUri());
AtomicInteger reliableNotificationCount = new AtomicInteger();
TestContext subscribeCtxNonOwner = this.host.testCreate(1);
// subscribe using non owner host
subscribe.setCompletion(subscribeCtxNonOwner.getCompletion());
serviceHost.startReliableSubscriptionService(subscribe, (o) -> {
reliableNotificationCount.incrementAndGet();
o.complete();
});
localHost.testWait(subscribeCtxNonOwner);
// send explicit update to example service
body.name = UUID.randomUUID().toString();
this.host.send(Operation.createPatch(uri).setBody(body));
while (reliableNotificationCount.get() < 1) {
Thread.sleep(100);
}
reliableNotificationCount.set(0);
this.verifySubscriberCount(new URI[] { uri }, 1);
// Check reliability: determine what host is owner for the example service we subscribed to.
// Then stop that host which should cause the remaining host(s) to pick up ownership.
// Subscriptions will not survive on their own, but we expect the ReliableSubscriptionService
// to notice the subscription is gone on the new owner, and re subscribe.
List<URI> exampleSubUris = new ArrayList<>();
for (URI hostUri : this.host.getNodeGroupMap().keySet()) {
exampleSubUris.add(UriUtils.buildUri(hostUri, uri.getPath(),
ServiceHost.SERVICE_URI_SUFFIX_SUBSCRIPTIONS));
}
// stop host that has ownership of example service
NodeGroupConfig cfg = new NodeGroupConfig();
cfg.nodeRemovalDelayMicros = TimeUnit.SECONDS.toMicros(2);
this.host.setNodeGroupConfig(cfg);
// relax quorum
this.host.setNodeGroupQuorum(1);
// stop host with subscription
this.host.stopHost(ownerHost);
factoryUri = UriUtils.buildUri(serviceHost, ExampleService.FACTORY_LINK);
this.host.waitForReplicatedFactoryServiceAvailable(factoryUri);
uri = UriUtils.buildUri(serviceHost.getUri(), uri.getPath());
// verify that we still have 1 subscription on the remaining host, which can only happen if the
// reliable subscription service notices the current owner failure and re subscribed
this.verifySubscriberCount(new URI[] { uri }, 1);
// and test once again that notifications flow.
this.host.log("Sending PATCH requests to %s", uri);
long c = this.updateCount;
for (int i = 0; i < c; i++) {
body.name = "post-stop-" + UUID.randomUUID().toString();
this.host.send(Operation.createPatch(uri).setBody(body));
}
Date exp = this.host.getTestExpiration();
while (reliableNotificationCount.get() < c) {
Thread.sleep(250);
this.host.log("Received %d notifications, expecting %d",
reliableNotificationCount.get(), c);
if (new Date().after(exp)) {
throw new TimeoutException();
}
}
}
@Test
public void subscriptionsToFactoryAndChildren() throws Throwable {
this.host.stop();
this.host.setPort(0);
this.host.start();
this.host.setPublicUri(UriUtils.buildUri("localhost", this.host.getPort(), "", null));
this.host.waitForServiceAvailable(ExampleService.FACTORY_LINK);
URI factoryUri = UriUtils.buildFactoryUri(this.host, ExampleService.class);
String prefix = "example-";
Long counterValue = Long.MAX_VALUE;
URI[] childUris = new URI[this.serviceCount];
doFactoryPostNotifications(factoryUri, this.serviceCount, prefix, counterValue, childUris);
doNotificationsWithReplayState(childUris);
doNotificationsWithFailure(childUris);
doNotificationsWithLimitAndPublicUri(childUris);
doNotificationsWithExpiration(childUris);
doDeleteNotifications(childUris, counterValue);
}
@Test
public void subscriptionsWithAuth() throws Throwable {
VerificationHost hostWithAuth = null;
try {
String testUserEmail = "foo@vmware.com";
hostWithAuth = VerificationHost.create(0);
hostWithAuth.setAuthorizationEnabled(true);
hostWithAuth.start();
hostWithAuth.setSystemAuthorizationContext();
TestContext waitContext = hostWithAuth.testCreate(1);
AuthorizationSetupHelper.create()
.setHost(hostWithAuth)
.setDocumentKind(Utils.buildKind(MinimalTestServiceState.class))
.setUserEmail(testUserEmail)
.setUserSelfLink(testUserEmail)
.setUserPassword(testUserEmail)
.setCompletion(waitContext.getCompletion())
.start();
hostWithAuth.testWait(waitContext);
hostWithAuth.resetSystemAuthorizationContext();
hostWithAuth.assumeIdentity(UriUtils.buildUriPath(ServiceUriPaths.CORE_AUTHZ_USERS, testUserEmail));
MinimalTestService s = new MinimalTestService();
MinimalTestServiceState serviceState = new MinimalTestServiceState();
serviceState.id = UUID.randomUUID().toString();
String minimalServiceUUID = UUID.randomUUID().toString();
TestContext notifyContext = hostWithAuth.testCreate(1);
hostWithAuth.startServiceAndWait(s, minimalServiceUUID, serviceState);
Consumer<Operation> notifyC = (nOp) -> {
nOp.complete();
switch (nOp.getAction()) {
case PUT:
notifyContext.completeIteration();
break;
default:
break;
}
};
Operation subscribe = Operation.createPost(UriUtils.buildUri(hostWithAuth, minimalServiceUUID));
subscribe.setReferer(hostWithAuth.getReferer());
ServiceSubscriber subscriber = new ServiceSubscriber();
subscriber.replayState = true;
hostWithAuth.startSubscriptionService(subscribe, notifyC, subscriber);
hostWithAuth.testWait(notifyContext);
} finally {
if (hostWithAuth != null) {
hostWithAuth.tearDown();
}
}
}
@Test
public void testSubscriptionsWithExpiry() throws Throwable {
MinimalTestService s = new MinimalTestService();
MinimalTestServiceState serviceState = new MinimalTestServiceState();
serviceState.id = UUID.randomUUID().toString();
String minimalServiceUUID = UUID.randomUUID().toString();
TestContext notifyContext = this.host.testCreate(1);
TestContext notifyDeleteContext = this.host.testCreate(1);
this.host.startServiceAndWait(s, minimalServiceUUID, serviceState);
Service notificationTarget = new StatelessService() {
@Override
public void authorizeRequest(Operation op) {
op.complete();
return;
}
@Override
public void handleRequest(Operation op) {
if (!op.isNotification()) {
if (op.getAction() == Action.DELETE && op.getUri().equals(getUri())) {
notifyDeleteContext.completeIteration();
}
super.handleRequest(op);
return;
}
if (op.getAction() == Action.PUT) {
notifyContext.completeIteration();
}
}
};
Operation subscribe = Operation.createPost(UriUtils.buildUri(host, minimalServiceUUID));
subscribe.setReferer(host.getReferer());
ServiceSubscriber subscriber = new ServiceSubscriber();
subscriber.replayState = true;
// Set a 500ms expiry
subscriber.documentExpirationTimeMicros = Utils
.fromNowMicrosUtc(TimeUnit.MILLISECONDS.toMicros(500));
host.startSubscriptionService(subscribe, notificationTarget, subscriber);
host.testWait(notifyContext);
host.testWait(notifyDeleteContext);
}
@Test
public void subscribeAndWaitForServiceAvailability() throws Throwable {
// until HTTP2 support is we must only subscribe to less than max connections!
// otherwise we deadlock: the connection for the queued subscribe is used up,
// no more connections can be created, to that owner.
this.serviceCount = NettyHttpServiceClient.DEFAULT_CONNECTIONS_PER_HOST / 2;
setUpPeers();
this.host.waitForReplicatedFactoryServiceAvailable(
this.host.getPeerServiceUri(ExampleService.FACTORY_LINK));
// Pick one host to post to
VerificationHost serviceHost = this.host.getPeerHost();
// Create example service states to subscribe to
List<ExampleServiceState> states = new ArrayList<>();
for (int i = 0; i < this.serviceCount; i++) {
ExampleServiceState state = new ExampleServiceState();
state.documentSelfLink = UriUtils.buildUriPath(
ExampleService.FACTORY_LINK,
UUID.randomUUID().toString());
state.name = UUID.randomUUID().toString();
states.add(state);
}
AtomicInteger notifications = new AtomicInteger();
// Subscription target
ServiceSubscriber sr = createAndStartNotificationTarget((update) -> {
if (update.getAction() != Action.PATCH) {
// because we start multiple nodes and we do not wait for factory start
// we will receive synchronization related PUT requests, on each service.
// Ignore everything but the PATCH we send from the test
return false;
}
this.host.completeIteration();
this.host.log("notification %d", notifications.incrementAndGet());
update.complete();
return true;
});
this.host.log("Subscribing to %d services", this.serviceCount);
// Subscribe to factory (will not complete until factory is started again)
for (ExampleServiceState state : states) {
URI uri = UriUtils.buildUri(serviceHost, state.documentSelfLink);
subscribeToService(uri, sr);
}
// First the subscription requests will be sent and will be queued.
// So N completions come from the subscribe requests.
// After that, the services will be POSTed and started. This is the second set
// of N completions.
this.host.testStart(2 * this.serviceCount);
this.host.log("Sending parallel POST for %d services", this.serviceCount);
AtomicInteger postCount = new AtomicInteger();
// Create example services, triggering subscriptions to complete
for (ExampleServiceState state : states) {
URI uri = UriUtils.buildFactoryUri(serviceHost, ExampleService.class);
Operation op = Operation.createPost(uri)
.setBody(state)
.setCompletion((o, e) -> {
if (e != null) {
this.host.failIteration(e);
return;
}
this.host.log("POST count %d", postCount.incrementAndGet());
this.host.completeIteration();
});
this.host.send(op);
}
this.host.testWait();
this.host.testStart(2 * this.serviceCount);
// now send N PATCH ops so we get notifications
for (ExampleServiceState state : states) {
// send a PATCH, to trigger notification
URI u = UriUtils.buildUri(serviceHost, state.documentSelfLink);
state.counter = Utils.getNowMicrosUtc();
Operation patch = Operation.createPatch(u)
.setBody(state)
.setCompletion(this.host.getCompletion());
this.host.send(patch);
}
this.host.testWait();
}
private void doFactoryPostNotifications(URI factoryUri, int childCount, String prefix,
Long counterValue,
URI[] childUris) throws Throwable {
this.host.log("starting subscription to factory");
this.host.testStart(1);
// let the service host update the URI from the factory to its subscriptions
Operation subscribeOp = Operation.createPost(factoryUri)
.setReferer(this.host.getReferer())
.setCompletion(this.host.getCompletion());
URI notificationTarget = host.startSubscriptionService(subscribeOp, (o) -> {
if (o.getAction() == Action.POST) {
this.host.completeIteration();
} else {
this.host.failIteration(new IllegalStateException("Unexpected notification: "
+ o.toString()));
}
});
this.host.testWait();
// expect a POST notification per child, a POST completion per child
this.host.testStart(childCount * 2);
for (int i = 0; i < childCount; i++) {
ExampleServiceState initialState = new ExampleServiceState();
initialState.name = initialState.documentSelfLink = prefix + i;
initialState.counter = counterValue;
final int finalI = i;
// create an example service
this.host.send(Operation
.createPost(factoryUri)
.setBody(initialState).setCompletion((o, e) -> {
if (e != null) {
this.host.failIteration(e);
return;
}
ServiceDocument rsp = o.getBody(ServiceDocument.class);
childUris[finalI] = UriUtils.buildUri(this.host, rsp.documentSelfLink);
this.host.completeIteration();
}));
}
this.host.testWait();
this.host.testStart(1);
Operation delete = subscribeOp.clone().setUri(factoryUri).setAction(Action.DELETE);
this.host.stopSubscriptionService(delete, notificationTarget);
this.host.testWait();
this.verifySubscriberCount(new URI[]{factoryUri}, 0);
}
private void doNotificationsWithReplayState(URI[] childUris)
throws Throwable {
this.host.log("starting subscription with replay");
final AtomicInteger deletesRemainingCount = new AtomicInteger();
ServiceSubscriber sr = createAndStartNotificationTarget(
UUID.randomUUID().toString(),
deletesRemainingCount);
sr.replayState = true;
// Subscribe to notifications from every example service; get notified with current state
subscribeToServices(childUris, sr);
verifySubscriberCount(childUris, 1);
patchChildren(childUris, false);
patchChildren(childUris, false);
// Finally un subscribe the notification handlers
unsubscribeFromChildren(childUris, sr.reference, false);
verifySubscriberCount(childUris, 0);
deleteNotificationTarget(deletesRemainingCount, sr);
}
private void doNotificationsWithExpiration(URI[] childUris)
throws Throwable {
this.host.log("starting subscription with expiration");
final AtomicInteger deletesRemainingCount = new AtomicInteger();
// start a notification target that will not complete test iterations since expirations race
// with notifications, allowing for notifications to be processed after the next test starts
ServiceSubscriber sr = createAndStartNotificationTarget(UUID.randomUUID()
.toString(), deletesRemainingCount, false, false);
sr.documentExpirationTimeMicros = Utils.fromNowMicrosUtc(
this.host.getMaintenanceIntervalMicros() * 2);
// Subscribe to notifications from every example service; get notified with current state
subscribeToServices(childUris, sr);
verifySubscriberCount(childUris, 1);
Thread.sleep((this.host.getMaintenanceIntervalMicros() / 1000) * 2);
// do a patch which will cause the publisher to evaluate and expire subscriptions
patchChildren(childUris, true);
verifySubscriberCount(childUris, 0);
deleteNotificationTarget(deletesRemainingCount, sr);
}
private void deleteNotificationTarget(AtomicInteger deletesRemainingCount,
ServiceSubscriber sr) throws Throwable {
deletesRemainingCount.set(1);
TestContext ctx = testCreate(1);
this.host.send(Operation.createDelete(sr.reference)
.setCompletion((o, e) -> ctx.completeIteration()));
testWait(ctx);
}
private void doNotificationsWithFailure(URI[] childUris) throws Throwable, InterruptedException {
this.host.log("starting subscription with failure, stopping notification target");
final AtomicInteger deletesRemainingCount = new AtomicInteger();
ServiceSubscriber sr = createAndStartNotificationTarget(UUID.randomUUID()
.toString(), deletesRemainingCount);
// Re subscribe, but stop the notification target, causing automatic removal of the
// subscriptions
subscribeToServices(childUris, sr);
verifySubscriberCount(childUris, 1);
deleteNotificationTarget(deletesRemainingCount, sr);
// send updates and expect failure in delivering notifications
patchChildren(childUris, true);
// expect the publisher to note at least one failed notification attempt
verifySubscriberCount(true, childUris, 1, 1L);
// restart notification target service but expect a pragma in the notifications
// saying we missed some
boolean expectSkippedNotificationsPragma = true;
this.host.log("restarting notification target");
createAndStartNotificationTarget(sr.reference.getPath(),
deletesRemainingCount, expectSkippedNotificationsPragma, true);
// send some more updates, this time expect ZERO failures;
patchChildren(childUris, false);
verifySubscriberCount(true, childUris, 1, 0L);
this.host.log("stopping notification target, again");
deleteNotificationTarget(deletesRemainingCount, sr);
while (!verifySubscriberCount(false, childUris, 0, null)) {
Thread.sleep(VerificationHost.FAST_MAINT_INTERVAL_MILLIS);
patchChildren(childUris, true);
}
this.host.log("Verifying all subscriptions have been removed");
// because we sent more than K updates, causing K + 1 notification delivery failures,
// the subscriptions should all be automatically removed!
verifySubscriberCount(childUris, 0);
}
private void doNotificationsWithLimitAndPublicUri(URI[] childUris) throws Throwable,
InterruptedException, TimeoutException {
this.host.log("starting subscription with limit and public uri");
final AtomicInteger deletesRemainingCount = new AtomicInteger();
ServiceSubscriber sr = createAndStartNotificationTarget(UUID.randomUUID()
.toString(), deletesRemainingCount);
// Re subscribe, use public URI and limit notifications to one.
// After these notifications are sent, we should see all subscriptions removed
deletesRemainingCount.set(childUris.length + 1);
sr.usePublicUri = true;
sr.notificationLimit = this.updateCount;
subscribeToServices(childUris, sr);
verifySubscriberCount(childUris, 1);
// Issue another patch request on every example service instance
patchChildren(childUris, false);
// because we set notificationLimit, all subscriptions should be removed
verifySubscriberCount(childUris, 0);
Date exp = this.host.getTestExpiration();
// verify we received DELETEs on the notification target when a subscription was removed
while (deletesRemainingCount.get() != 1) {
Thread.sleep(250);
if (new Date().after(exp)) {
throw new TimeoutException("DELETEs not received at notification target:"
+ deletesRemainingCount.get());
}
}
deleteNotificationTarget(deletesRemainingCount, sr);
}
private void doDeleteNotifications(URI[] childUris, Long counterValue) throws Throwable {
this.host.log("starting subscription for DELETEs");
final AtomicInteger deletesRemainingCount = new AtomicInteger();
ServiceSubscriber sr = createAndStartNotificationTarget(UUID.randomUUID()
.toString(), deletesRemainingCount);
subscribeToServices(childUris, sr);
// Issue DELETEs and verify the subscription was notified
this.host.testStart(childUris.length * 2);
for (URI child : childUris) {
ExampleServiceState initialState = new ExampleServiceState();
initialState.counter = counterValue;
Operation delete = Operation
.createDelete(child)
.setBody(initialState)
.setCompletion(this.host.getCompletion());
this.host.send(delete);
}
this.host.testWait();
deleteNotificationTarget(deletesRemainingCount, sr);
}
private ServiceSubscriber createAndStartNotificationTarget(String link,
final AtomicInteger deletesRemainingCount) throws Throwable {
return createAndStartNotificationTarget(link, deletesRemainingCount, false, true);
}
private ServiceSubscriber createAndStartNotificationTarget(String link,
final AtomicInteger deletesRemainingCount,
boolean expectSkipNotificationsPragma,
boolean completeIterations) throws Throwable {
final AtomicBoolean seenSkippedNotificationPragma =
new AtomicBoolean(false);
return createAndStartNotificationTarget(link, (update) -> {
if (!update.isNotification()) {
if (update.getAction() == Action.DELETE) {
int r = deletesRemainingCount.decrementAndGet();
if (r != 0) {
update.complete();
return true;
}
}
return false;
}
if (update.getAction() != Action.PATCH &&
update.getAction() != Action.PUT &&
update.getAction() != Action.DELETE) {
update.complete();
return true;
}
if (expectSkipNotificationsPragma) {
String pragma = update.getRequestHeader(Operation.PRAGMA_HEADER);
if (!seenSkippedNotificationPragma.get() && (pragma == null
|| !pragma.contains(Operation.PRAGMA_DIRECTIVE_SKIPPED_NOTIFICATIONS))) {
this.host.failIteration(new IllegalStateException(
"Missing skipped notification pragma"));
return true;
} else {
seenSkippedNotificationPragma.set(true);
}
}
if (completeIterations) {
this.host.completeIteration();
}
update.complete();
return true;
});
}
private ServiceSubscriber createAndStartNotificationTarget(
Function<Operation, Boolean> h) throws Throwable {
return createAndStartNotificationTarget(UUID.randomUUID().toString(), h);
}
private ServiceSubscriber createAndStartNotificationTarget(
String link,
Function<Operation, Boolean> h) throws Throwable {
StatelessService notificationTarget = createNotificationTargetService(h);
// Start notification target (shared between subscriptions)
Operation startOp = Operation
.createPost(UriUtils.buildUri(this.host, link))
.setCompletion(this.host.getCompletion())
.setReferer(this.host.getReferer());
this.host.testStart(1);
this.host.startService(startOp, notificationTarget);
this.host.testWait();
ServiceSubscriber sr = new ServiceSubscriber();
sr.reference = notificationTarget.getUri();
return sr;
}
private StatelessService createNotificationTargetService(Function<Operation, Boolean> h) {
return new StatelessService() {
@Override
public void handleRequest(Operation update) {
if (!h.apply(update)) {
super.handleRequest(update);
}
}
};
}
private void subscribeToServices(URI[] uris, ServiceSubscriber sr) throws Throwable {
int expectedCompletions = uris.length;
if (sr.replayState) {
expectedCompletions *= 2;
}
subscribeToServices(uris, sr, expectedCompletions);
}
private void subscribeToServices(URI[] uris, ServiceSubscriber sr, int expectedCompletions) throws Throwable {
this.host.testStart(expectedCompletions);
for (int i = 0; i < uris.length; i++) {
subscribeToService(uris[i], sr);
}
this.host.testWait();
}
private void subscribeToService(URI uri, ServiceSubscriber sr) {
if (sr.usePublicUri) {
sr = Utils.clone(sr);
sr.reference = UriUtils.buildPublicUri(this.host, sr.reference.getPath());
}
URI subUri = UriUtils.buildSubscriptionUri(uri);
this.host.send(Operation.createPost(subUri)
.setCompletion(this.host.getCompletion())
.setReferer(this.host.getReferer())
.setBody(sr)
.addPragmaDirective(Operation.PRAGMA_DIRECTIVE_QUEUE_FOR_SERVICE_AVAILABILITY));
}
private void unsubscribeFromChildren(URI[] uris, URI targetUri,
boolean useServiceHostStopSubscription) throws Throwable {
int count = uris.length;
TestContext ctx = testCreate(count);
for (int i = 0; i < count; i++) {
if (useServiceHostStopSubscription) {
// stop the subscriptions using the service host API
host.stopSubscriptionService(
Operation.createDelete(uris[i])
.setCompletion(ctx.getCompletion()),
targetUri);
continue;
}
ServiceSubscriber unsubscribeBody = new ServiceSubscriber();
unsubscribeBody.reference = targetUri;
URI subUri = UriUtils.buildSubscriptionUri(uris[i]);
this.host.send(Operation.createDelete(subUri)
.setCompletion(ctx.getCompletion())
.setBody(unsubscribeBody));
}
testWait(ctx);
}
private boolean verifySubscriberCount(URI[] uris, int subscriberCount) throws Throwable {
return verifySubscriberCount(true, uris, subscriberCount, null);
}
private boolean verifySubscriberCount(boolean wait, URI[] uris, int subscriberCount,
Long failedNotificationCount)
throws Throwable {
URI[] subUris = new URI[uris.length];
int i = 0;
for (URI u : uris) {
URI subUri = UriUtils.buildSubscriptionUri(u);
subUris[i++] = subUri;
}
AtomicBoolean isConverged = new AtomicBoolean();
this.host.waitFor("subscriber verification timed out", () -> {
isConverged.set(true);
Map<URI, ServiceSubscriptionState> subStates = new ConcurrentSkipListMap<>();
TestContext ctx = this.host.testCreate(uris.length);
for (URI u : subUris) {
this.host.send(Operation.createGet(u).setCompletion((o, e) -> {
ServiceSubscriptionState s = null;
if (e == null) {
s = o.getBody(ServiceSubscriptionState.class);
} else {
this.host.log("error response from %s: %s", o.getUri(), e.getMessage());
// because we stopped an owner node, if gossip is not updated a GET
// to subscriptions might fail because it was forward to a stale node
s = new ServiceSubscriptionState();
s.subscribers = new HashMap<>();
}
subStates.put(o.getUri(), s);
ctx.complete();
}));
}
ctx.await();
for (ServiceSubscriptionState state : subStates.values()) {
int expected = subscriberCount;
int actual = state.subscribers.size();
if (actual != expected) {
isConverged.set(false);
break;
}
if (failedNotificationCount == null) {
continue;
}
for (ServiceSubscriber sr : state.subscribers.values()) {
if (sr.failedNotificationCount == null && failedNotificationCount == 0) {
continue;
}
if (sr.failedNotificationCount == null
|| 0 != sr.failedNotificationCount.compareTo(failedNotificationCount)) {
isConverged.set(false);
break;
}
}
}
if (isConverged.get() || !wait) {
return true;
}
return false;
});
return isConverged.get();
}
private void patchChildren(URI[] uris, boolean expectFailure) throws Throwable {
int count = expectFailure ? uris.length : uris.length * 2;
long c = this.updateCount;
if (!expectFailure) {
count *= this.updateCount;
} else {
c = 1;
}
this.host.testStart(count);
for (int i = 0; i < uris.length; i++) {
for (int k = 0; k < c; k++) {
ExampleServiceState initialState = new ExampleServiceState();
initialState.counter = Long.MAX_VALUE;
Operation patch = Operation
.createPatch(uris[i])
.setBody(initialState)
.setCompletion(this.host.getCompletion());
this.host.send(patch);
}
}
this.host.testWait();
}
}
| |
/**
* Copyright 2007-2015, Kaazing Corporation. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kaazing.net.bbosh.internal;
import static java.nio.charset.StandardCharsets.UTF_8;
import static java.util.concurrent.TimeUnit.SECONDS;
import static org.junit.Assert.assertEquals;
import static org.junit.rules.RuleChain.outerRule;
import java.io.InputStream;
import java.io.OutputStream;
import java.net.URI;
import java.net.URL;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.DisableOnDebug;
import org.junit.rules.TestRule;
import org.junit.rules.Timeout;
import org.kaazing.k3po.junit.annotation.Specification;
import org.kaazing.k3po.junit.rules.K3poRule;
import org.kaazing.netx.URLConnectionHelper;
import org.kaazing.netx.bbosh.BBoshStrategy.Polling;
import org.kaazing.netx.bbosh.BBoshStrategy.Streaming;
import org.kaazing.netx.bbosh.BBoshURLConnection;
public class BBoshURLConnectionHelperIT {
private final TestRule timeout = new DisableOnDebug(new Timeout(1, SECONDS));
private final K3poRule k3po = new K3poRule().setScriptRoot("org/kaazing/robotic/bbosh");
@Rule
public TestRule chain = outerRule(k3po).around(timeout);
@Test
@Specification("polling/accept.echo.then.close")
public void shouldConnectEchoThenClosedViaPolling() throws Exception {
URLConnectionHelper helper = URLConnectionHelper.newInstance();
URI location = URI.create("bbosh://localhost:8000/connections");
BBoshURLConnection connection = (BBoshURLConnection) helper.openConnection(location);
connection.setSupportedStrategies(new Polling(5, SECONDS));
connection.connect();
OutputStream out = connection.getOutputStream();
InputStream in = connection.getInputStream();
out.write("Hello, world".getBytes(UTF_8));
out.close();
byte[] buf = new byte[32];
int len = in.read(buf);
in.close();
k3po.finish();
assertEquals(12, len);
assertEquals("Hello, world", new String(buf, 0, 12, UTF_8));
}
@Test
@Specification("polling/accept.echo.then.close")
public void shouldConnectEchoThenClosedViaPollingURL() throws Exception {
URLConnectionHelper helper = URLConnectionHelper.newInstance();
URI location = URI.create("bbosh://localhost:8000/connections");
URL locationURL = helper.toURL(location);
BBoshURLConnection connection = (BBoshURLConnection) locationURL.openConnection();
connection.setSupportedStrategies(new Polling(5, SECONDS));
connection.connect();
OutputStream out = connection.getOutputStream();
InputStream in = connection.getInputStream();
out.write("Hello, world".getBytes(UTF_8));
out.close();
byte[] buf = new byte[32];
int len = in.read(buf);
in.close();
k3po.finish();
assertEquals(12, len);
assertEquals("Hello, world", new String(buf, 0, 12, UTF_8));
}
@Test
@Specification("polling/accept.echo.then.closed")
public void shouldConnectEchoThenCloseViaPolling() throws Exception {
URLConnectionHelper helper = URLConnectionHelper.newInstance();
URI location = URI.create("bbosh://localhost:8000/connections");
BBoshURLConnection connection = (BBoshURLConnection) helper.openConnection(location);
connection.setSupportedStrategies(new Polling(5, SECONDS));
connection.connect();
OutputStream out = connection.getOutputStream();
InputStream in = connection.getInputStream();
out.write("Hello, world".getBytes(UTF_8));
out.close();
byte[] buf = new byte[12];
int len = in.read(buf);
in.close();
k3po.finish();
assertEquals(12, len);
assertEquals("Hello, world", new String(buf, 0, 12, UTF_8));
}
@Test
@Specification("polling/accept.echo.then.closed")
public void shouldConnectEchoThenCloseViaPollingURL() throws Exception {
URLConnectionHelper helper = URLConnectionHelper.newInstance();
URI location = URI.create("bbosh://localhost:8000/connections");
URL locationURL = helper.toURL(location);
BBoshURLConnection connection = (BBoshURLConnection) locationURL.openConnection();
connection.setSupportedStrategies(new Polling(5, SECONDS));
connection.connect();
OutputStream out = connection.getOutputStream();
InputStream in = connection.getInputStream();
out.write("Hello, world".getBytes(UTF_8));
out.close();
byte[] buf = new byte[12];
int len = in.read(buf);
in.close();
k3po.finish();
assertEquals(12, len);
assertEquals("Hello, world", new String(buf, 0, 12, UTF_8));
}
@Test
@Specification("streaming/accept.echo.then.close")
public void shouldConnectEchoThenClosedViaStreaming() throws Exception {
URLConnectionHelper helper = URLConnectionHelper.newInstance();
URI location = URI.create("bbosh://localhost:8000/connections");
BBoshURLConnection connection = (BBoshURLConnection) helper.openConnection(location);
connection.setSupportedStrategies(new Streaming());
connection.connect();
OutputStream out = connection.getOutputStream();
InputStream in = connection.getInputStream();
out.write("Hello, world".getBytes(UTF_8));
out.close();
byte[] buf = new byte[32];
int len = in.read(buf);
in.close();
k3po.finish();
assertEquals(12, len);
assertEquals("Hello, world", new String(buf, 0, 12, UTF_8));
}
@Test
@Specification("streaming/accept.echo.then.close")
public void shouldConnectEchoThenClosedViaStreamingURL() throws Exception {
URLConnectionHelper helper = URLConnectionHelper.newInstance();
URI location = URI.create("bbosh://localhost:8000/connections");
URL locationURL = helper.toURL(location);
BBoshURLConnection connection = (BBoshURLConnection) locationURL.openConnection();
connection.setSupportedStrategies(new Streaming());
connection.connect();
OutputStream out = connection.getOutputStream();
InputStream in = connection.getInputStream();
out.write("Hello, world".getBytes(UTF_8));
out.close();
byte[] buf = new byte[32];
int len = in.read(buf);
in.close();
k3po.finish();
assertEquals(12, len);
assertEquals("Hello, world", new String(buf, 0, 12, UTF_8));
}
@Test
@Specification("streaming/accept.echo.then.closed")
public void shouldConnectEchoThenCloseViaStreaming() throws Exception {
URLConnectionHelper helper = URLConnectionHelper.newInstance();
URI location = URI.create("bbosh://localhost:8000/connections");
BBoshURLConnection connection = (BBoshURLConnection) helper.openConnection(location);
connection.setSupportedStrategies(new Streaming());
connection.connect();
OutputStream out = connection.getOutputStream();
InputStream in = connection.getInputStream();
out.write("Hello, world".getBytes(UTF_8));
out.close();
byte[] buf = new byte[12];
int len = in.read(buf);
in.close();
k3po.finish();
assertEquals(12, len);
assertEquals("Hello, world", new String(buf, 0, 12, UTF_8));
}
@Test
@Specification("streaming/accept.echo.then.closed")
public void shouldConnectEchoThenCloseViaStreamingURL() throws Exception {
URLConnectionHelper helper = URLConnectionHelper.newInstance();
URI location = URI.create("bbosh://localhost:8000/connections");
URL locationURL = helper.toURL(location);
BBoshURLConnection connection = (BBoshURLConnection) locationURL.openConnection();
connection.setSupportedStrategies(new Streaming());
connection.connect();
OutputStream out = connection.getOutputStream();
InputStream in = connection.getInputStream();
out.write("Hello, world".getBytes(UTF_8));
out.close();
byte[] buf = new byte[12];
int len = in.read(buf);
in.close();
k3po.finish();
assertEquals(12, len);
assertEquals("Hello, world", new String(buf, 0, 12, UTF_8));
}
}
| |
package com.thinkbiganalytics.metadata.jpa.sla;
/*-
* #%L
* thinkbig-operational-metadata-jpa
* %%
* Copyright (C) 2017 ThinkBig Analytics
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import com.google.common.collect.ComparisonChain;
import com.thinkbiganalytics.jpa.AbstractAuditedEntity;
import com.thinkbiganalytics.jpa.BaseJpaId;
import com.thinkbiganalytics.metadata.sla.api.AssessmentResult;
import com.thinkbiganalytics.metadata.sla.api.ObligationAssessment;
import com.thinkbiganalytics.metadata.sla.api.ServiceLevelAgreement;
import com.thinkbiganalytics.metadata.sla.api.ServiceLevelAssessment;
import org.joda.time.DateTime;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.UUID;
import javax.persistence.CascadeType;
import javax.persistence.Column;
import javax.persistence.Embeddable;
import javax.persistence.EmbeddedId;
import javax.persistence.Entity;
import javax.persistence.EnumType;
import javax.persistence.Enumerated;
import javax.persistence.FetchType;
import javax.persistence.OneToMany;
import javax.persistence.Table;
import javax.persistence.Transient;
/**
* Entity representing service level assessment results for Service Level Agreement (SLA).
* SLA's are defined in Modeshape, but their assessments are stored here
* Service level assessments contain 1 ore more attached {@link JpaObligationAssessment}.
* Each Obligation assessment contains 1 or more {@link JpaMetricAssessment}
* the result of this service level assessment come from the results of the {@link JpaObligationAssessment}'s
*/
@Entity
@Table(name = "SLA_ASSESSMENT")
public class JpaServiceLevelAssessment extends AbstractAuditedEntity implements ServiceLevelAssessment {
@EmbeddedId
private SlaAssessmentId id;
@Transient
private ServiceLevelAgreement agreement;
@Column(name = "SLA_ID")
private String slaId;
@Column(name = "MESSAGE")
private String message;
@Enumerated(EnumType.STRING)
@Column(name = "RESULT")
private AssessmentResult result;
@OneToMany(targetEntity = JpaObligationAssessment.class, mappedBy = "serviceLevelAssessment", fetch = FetchType.EAGER, cascade = CascadeType.ALL, orphanRemoval = true)
private Set<ObligationAssessment> obligationAssessments = new HashSet<>();
public JpaServiceLevelAssessment() {
}
public SlaAssessmentId getId() {
return id;
}
public void setId(SlaAssessmentId id) {
this.id = id;
}
@Override
public String getServiceLevelAgreementId() {
return slaId;
}
public ServiceLevelAgreement getAgreement() {
return agreement;
}
public void setAgreement(ServiceLevelAgreement agreement) {
this.agreement = agreement;
this.setSlaId(agreement.getId().toString());
}
public String getSlaId() {
return slaId;
}
public void setSlaId(String slaId) {
this.slaId = slaId;
}
public String getMessage() {
return message;
}
public void setMessage(String message) {
this.message = message;
}
public AssessmentResult getResult() {
return result;
}
public void setResult(AssessmentResult result) {
this.result = result;
}
@Override
public DateTime getTime() {
return super.getCreatedTime();
}
@Override
public Set<ObligationAssessment> getObligationAssessments() {
return obligationAssessments;
}
public void setObligationAssessments(Set<ObligationAssessment> obligationAssessments) {
this.obligationAssessments = obligationAssessments;
}
@Override
public int compareTo(ServiceLevelAssessment sla) {
ComparisonChain chain = ComparisonChain
.start()
.compare(this.getResult(), sla.getResult())
.compare(this.getAgreement().getName(), sla.getAgreement().getName());
if (chain.result() != 0) {
return chain.result();
}
List<ObligationAssessment> list1 = new ArrayList<>(this.getObligationAssessments());
List<ObligationAssessment> list2 = new ArrayList<>(sla.getObligationAssessments());
chain = chain.compare(list1.size(), list2.size());
Collections.sort(list1);
Collections.sort(list2);
for (int idx = 0; idx < list1.size(); idx++) {
chain = chain.compare(list1.get(idx), list2.get(idx));
}
return chain.result();
}
@Embeddable
public static class SlaAssessmentId extends BaseJpaId implements ServiceLevelAssessment.ID, Serializable {
private static final long serialVersionUID = 6965221468619613881L;
@Column(name = "id")
private UUID uuid;
public SlaAssessmentId() {
}
public SlaAssessmentId(Serializable ser) {
super(ser);
}
public static SlaAssessmentId create() {
return new SlaAssessmentId(UUID.randomUUID());
}
@Override
public UUID getUuid() {
return this.uuid;
}
@Override
public void setUuid(UUID uuid) {
this.uuid = uuid;
}
}
}
| |
/*
* Copyright (c) 2008-2021, Hazelcast, Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hazelcast.cache.impl.operation;
import com.hazelcast.cache.impl.CacheDataSerializerHook;
import com.hazelcast.cache.impl.CachePartitionSegment;
import com.hazelcast.cache.impl.ICacheRecordStore;
import com.hazelcast.cache.impl.ICacheService;
import com.hazelcast.cache.impl.PreJoinCacheConfig;
import com.hazelcast.cache.impl.record.CacheRecord;
import com.hazelcast.config.CacheConfig;
import com.hazelcast.internal.cluster.Versions;
import com.hazelcast.internal.nio.IOUtil;
import com.hazelcast.nio.ObjectDataInput;
import com.hazelcast.nio.ObjectDataOutput;
import com.hazelcast.internal.serialization.Data;
import com.hazelcast.nio.serialization.IdentifiedDataSerializable;
import com.hazelcast.internal.services.ObjectNamespace;
import com.hazelcast.spi.impl.operationservice.Operation;
import com.hazelcast.internal.services.ServiceNamespace;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import static com.hazelcast.internal.util.MapUtil.createHashMap;
import com.hazelcast.nio.serialization.impl.Versioned;
/**
* Replication operation is the data migration operation of {@link com.hazelcast.cache.impl.CacheRecordStore}.
* <p>
* <p>Cache record store's records and configurations will be migrated into their new nodes.
* <p>
* Steps;
* <ul>
* <li>Serialize all non expired data.</li>
* <li>Deserialize the data and config.</li>
* <li>Create the configuration in the new node service.</li>
* <li>Insert each record into {@link ICacheRecordStore}.</li>
* </ul>
* <p><b>Note:</b> This operation is a per partition operation.</p>
*/
public class CacheReplicationOperation extends Operation implements IdentifiedDataSerializable, Versioned {
private final List<CacheConfig> configs = new ArrayList<CacheConfig>();
private final Map<String, Map<Data, CacheRecord>> data = new HashMap<String, Map<Data, CacheRecord>>();
private CacheNearCacheStateHolder nearCacheStateHolder;
private transient boolean classesAlwaysAvailable = true;
public CacheReplicationOperation() {
nearCacheStateHolder = new CacheNearCacheStateHolder();
nearCacheStateHolder.setCacheReplicationOperation(this);
}
public final void prepare(CachePartitionSegment segment, Collection<ServiceNamespace> namespaces,
int replicaIndex) {
for (ServiceNamespace namespace : namespaces) {
ObjectNamespace ns = (ObjectNamespace) namespace;
ICacheRecordStore recordStore = segment.getRecordStore(ns.getObjectName());
if (recordStore == null) {
continue;
}
CacheConfig cacheConfig = recordStore.getConfig();
if (cacheConfig.getTotalBackupCount() >= replicaIndex) {
storeRecordsToReplicate(recordStore);
}
}
configs.addAll(segment.getCacheConfigs());
nearCacheStateHolder.prepare(segment, namespaces);
classesAlwaysAvailable = segment.getCacheService().getNodeEngine()
.getTenantControlService()
.getTenantControlFactory()
.isClassesAlwaysAvailable();
}
protected void storeRecordsToReplicate(ICacheRecordStore recordStore) {
data.put(recordStore.getName(), recordStore.getReadOnlyRecords());
}
@Override
public void beforeRun() throws Exception {
// Migrate CacheConfigs first
ICacheService service = getService();
for (CacheConfig config : configs) {
service.putCacheConfigIfAbsent(config);
}
}
@Override
public void run() throws Exception {
ICacheService service = getService();
for (Map.Entry<String, Map<Data, CacheRecord>> entry : data.entrySet()) {
ICacheRecordStore cache;
cache = service.getOrCreateRecordStore(entry.getKey(), getPartitionId());
cache.reset();
Map<Data, CacheRecord> map = entry.getValue();
Iterator<Map.Entry<Data, CacheRecord>> iterator = map.entrySet().iterator();
while (iterator.hasNext()) {
if (cache.evictIfRequired()) {
// No need to continue replicating records anymore.
// We are already over eviction threshold, each put record will cause another eviction.
break;
}
Map.Entry<Data, CacheRecord> next = iterator.next();
Data key = next.getKey();
CacheRecord record = next.getValue();
iterator.remove();
cache.putRecord(key, record, false);
}
}
data.clear();
if (getReplicaIndex() == 0) {
nearCacheStateHolder.applyState();
}
}
@Override
public String getServiceName() {
return ICacheService.SERVICE_NAME;
}
@Override
protected void writeInternal(ObjectDataOutput out)
throws IOException {
int confSize = configs.size();
out.writeInt(confSize);
for (CacheConfig config : configs) {
// RU_COMPAT_4_1
if (out.getVersion().isGreaterOrEqual(Versions.V4_2) && !classesAlwaysAvailable) {
out.writeObject(PreJoinCacheConfig.of(config));
} else {
out.writeObject(config);
}
}
int count = data.size();
out.writeInt(count);
for (Map.Entry<String, Map<Data, CacheRecord>> entry : data.entrySet()) {
Map<Data, CacheRecord> cacheMap = entry.getValue();
int subCount = cacheMap.size();
out.writeInt(subCount);
out.writeString(entry.getKey());
for (Map.Entry<Data, CacheRecord> e : cacheMap.entrySet()) {
final Data key = e.getKey();
final CacheRecord record = e.getValue();
IOUtil.writeData(out, key);
out.writeObject(record);
}
// Empty data will terminate the iteration for read in case
// expired entries were found while serializing, since the
// real subCount will then be different from the one written
// before
IOUtil.writeData(out, null);
}
out.writeObject(nearCacheStateHolder);
}
@Override
protected void readInternal(ObjectDataInput in)
throws IOException {
super.readInternal(in);
int confSize = in.readInt();
for (int i = 0; i < confSize; i++) {
final CacheConfig config = in.readObject();
// RU_COMPAT_4_1
if (in.getVersion().isGreaterOrEqual(Versions.V4_2) && !classesAlwaysAvailable) {
configs.add(PreJoinCacheConfig.asCacheConfig(config));
} else {
configs.add(config);
}
}
int count = in.readInt();
for (int i = 0; i < count; i++) {
int subCount = in.readInt();
String name = in.readString();
Map<Data, CacheRecord> m = createHashMap(subCount);
data.put(name, m);
// subCount + 1 because of the DefaultData written as the last entry
// which adds another Data entry at the end of the stream!
for (int j = 0; j < subCount + 1; j++) {
Data key = IOUtil.readData(in);
// Empty data received so reading can be stopped here since
// since the real object subCount might be different from
// the number on the stream due to found expired entries
if (key == null || key.dataSize() == 0) {
break;
}
CacheRecord record = in.readObject();
m.put(key, record);
}
}
nearCacheStateHolder = in.readObject();
nearCacheStateHolder.setCacheReplicationOperation(this);
}
public boolean isEmpty() {
return configs.isEmpty() && data.isEmpty();
}
Collection<CacheConfig> getConfigs() {
return Collections.unmodifiableCollection(configs);
}
@Override
public int getFactoryId() {
return CacheDataSerializerHook.F_ID;
}
@Override
public int getClassId() {
return CacheDataSerializerHook.CACHE_REPLICATION;
}
@Override
public boolean requiresTenantContext() {
return true;
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.node;
import org.elasticsearch.Build;
import org.elasticsearch.Version;
import org.elasticsearch.action.ActionModule;
import org.elasticsearch.cache.recycler.PageCacheRecycler;
import org.elasticsearch.client.Client;
import org.elasticsearch.client.node.NodeClientModule;
import org.elasticsearch.cluster.ClusterModule;
import org.elasticsearch.cluster.ClusterNameModule;
import org.elasticsearch.cluster.ClusterService;
import org.elasticsearch.cluster.action.index.MappingUpdatedAction;
import org.elasticsearch.cluster.routing.RoutingService;
import org.elasticsearch.common.StopWatch;
import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.component.Lifecycle;
import org.elasticsearch.common.component.LifecycleComponent;
import org.elasticsearch.common.inject.Injector;
import org.elasticsearch.common.inject.Module;
import org.elasticsearch.common.inject.ModulesBuilder;
import org.elasticsearch.common.lease.Releasable;
import org.elasticsearch.common.lease.Releasables;
import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.network.NetworkModule;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.settings.SettingsModule;
import org.elasticsearch.discovery.Discovery;
import org.elasticsearch.discovery.DiscoveryModule;
import org.elasticsearch.discovery.DiscoveryService;
import org.elasticsearch.env.Environment;
import org.elasticsearch.env.EnvironmentModule;
import org.elasticsearch.env.NodeEnvironment;
import org.elasticsearch.env.NodeEnvironmentModule;
import org.elasticsearch.gateway.GatewayAllocator;
import org.elasticsearch.gateway.GatewayModule;
import org.elasticsearch.gateway.GatewayService;
import org.elasticsearch.http.HttpServer;
import org.elasticsearch.http.HttpServerModule;
import org.elasticsearch.index.search.shape.ShapeModule;
import org.elasticsearch.indices.IndicesModule;
import org.elasticsearch.indices.IndicesService;
import org.elasticsearch.indices.breaker.CircuitBreakerModule;
import org.elasticsearch.indices.cache.query.IndicesQueryCache;
import org.elasticsearch.indices.cluster.IndicesClusterStateService;
import org.elasticsearch.indices.fielddata.cache.IndicesFieldDataCache;
import org.elasticsearch.indices.memory.IndexingMemoryController;
import org.elasticsearch.indices.store.IndicesStore;
import org.elasticsearch.indices.ttl.IndicesTTLService;
import org.elasticsearch.monitor.MonitorModule;
import org.elasticsearch.monitor.MonitorService;
import org.elasticsearch.monitor.jvm.JvmInfo;
import org.elasticsearch.node.internal.InternalSettingsPreparer;
import org.elasticsearch.node.settings.NodeSettingsService;
import org.elasticsearch.percolator.PercolatorModule;
import org.elasticsearch.percolator.PercolatorService;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.plugins.PluginsModule;
import org.elasticsearch.plugins.PluginsService;
import org.elasticsearch.repositories.RepositoriesModule;
import org.elasticsearch.rest.RestController;
import org.elasticsearch.rest.RestModule;
import org.elasticsearch.script.ScriptModule;
import org.elasticsearch.script.ScriptService;
import org.elasticsearch.search.SearchModule;
import org.elasticsearch.search.SearchService;
import org.elasticsearch.snapshots.SnapshotShardsService;
import org.elasticsearch.snapshots.SnapshotsService;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.threadpool.ThreadPoolModule;
import org.elasticsearch.transport.TransportModule;
import org.elasticsearch.transport.TransportService;
import org.elasticsearch.tribe.TribeModule;
import org.elasticsearch.tribe.TribeService;
import org.elasticsearch.watcher.ResourceWatcherModule;
import org.elasticsearch.watcher.ResourceWatcherService;
import java.io.IOException;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.concurrent.TimeUnit;
import static org.elasticsearch.common.settings.Settings.settingsBuilder;
/**
* A node represent a node within a cluster (<tt>cluster.name</tt>). The {@link #client()} can be used
* in order to use a {@link Client} to perform actions/operations against the cluster.
* <p>In order to create a node, the {@link NodeBuilder} can be used. When done with it, make sure to
* call {@link #close()} on it.
*/
public class Node implements Releasable {
private static final String CLIENT_TYPE = "node";
public static final String HTTP_ENABLED = "http.enabled";
private final Lifecycle lifecycle = new Lifecycle();
private final Injector injector;
private final Settings settings;
private final Environment environment;
private final PluginsService pluginsService;
private final Client client;
/**
* Constructs a node with the given settings.
*
* @param preparedSettings Base settings to configure the node with
*/
public Node(Settings preparedSettings) {
this(preparedSettings, Version.CURRENT, Collections.<Class<? extends Plugin>>emptyList());
}
Node(Settings preparedSettings, Version version, Collection<Class<? extends Plugin>> classpathPlugins) {
final Settings pSettings = settingsBuilder().put(preparedSettings)
.put(Client.CLIENT_TYPE_SETTING, CLIENT_TYPE).build();
Environment tmpEnv = InternalSettingsPreparer.prepareEnvironment(pSettings, null);
Settings tmpSettings = TribeService.processSettings(tmpEnv.settings());
ESLogger logger = Loggers.getLogger(Node.class, tmpSettings.get("name"));
logger.info("version[{}], pid[{}], build[{}/{}]", version, JvmInfo.jvmInfo().pid(), Build.CURRENT.hashShort(), Build.CURRENT.timestamp());
logger.info("initializing ...");
if (logger.isDebugEnabled()) {
logger.debug("using config [{}], data [{}], logs [{}], plugins [{}]",
tmpEnv.configFile(), Arrays.toString(tmpEnv.dataFiles()), tmpEnv.logsFile(), tmpEnv.pluginsFile());
}
this.pluginsService = new PluginsService(tmpSettings, tmpEnv.pluginsFile(), classpathPlugins);
this.settings = pluginsService.updatedSettings();
// create the environment based on the finalized (processed) view of the settings
this.environment = new Environment(this.settings());
final NodeEnvironment nodeEnvironment;
try {
nodeEnvironment = new NodeEnvironment(this.settings, this.environment);
} catch (IOException ex) {
throw new IllegalStateException("Failed to created node environment", ex);
}
final ThreadPool threadPool = new ThreadPool(settings);
boolean success = false;
try {
ModulesBuilder modules = new ModulesBuilder();
modules.add(new Version.Module(version));
modules.add(new CircuitBreakerModule(settings));
// plugin modules must be added here, before others or we can get crazy injection errors...
for (Module pluginModule : pluginsService.nodeModules()) {
modules.add(pluginModule);
}
modules.add(new PluginsModule(pluginsService));
modules.add(new SettingsModule(this.settings));
modules.add(new NodeModule(this));
modules.add(new NetworkModule());
modules.add(new ScriptModule(this.settings));
modules.add(new EnvironmentModule(environment));
modules.add(new NodeEnvironmentModule(nodeEnvironment));
modules.add(new ClusterNameModule(this.settings));
modules.add(new ThreadPoolModule(threadPool));
modules.add(new DiscoveryModule(this.settings));
modules.add(new ClusterModule(this.settings));
modules.add(new RestModule(this.settings));
modules.add(new TransportModule(settings));
if (settings.getAsBoolean(HTTP_ENABLED, true)) {
modules.add(new HttpServerModule(settings));
}
modules.add(new IndicesModule(settings));
modules.add(new SearchModule(settings));
modules.add(new ActionModule(false));
modules.add(new MonitorModule(settings));
modules.add(new GatewayModule(settings));
modules.add(new NodeClientModule());
modules.add(new ShapeModule());
modules.add(new PercolatorModule());
modules.add(new ResourceWatcherModule());
modules.add(new RepositoriesModule());
modules.add(new TribeModule());
pluginsService.processModules(modules);
injector = modules.createInjector();
client = injector.getInstance(Client.class);
threadPool.setNodeSettingsService(injector.getInstance(NodeSettingsService.class));
success = true;
} finally {
if (!success) {
nodeEnvironment.close();
ThreadPool.terminate(threadPool, 10, TimeUnit.SECONDS);
}
}
logger.info("initialized");
}
/**
* The settings that were used to create the node.
*/
public Settings settings() {
return this.settings;
}
/**
* A client that can be used to execute actions (operations) against the cluster.
*/
public Client client() {
return client;
}
/**
* Start the node. If the node is already started, this method is no-op.
*/
public Node start() {
if (!lifecycle.moveToStarted()) {
return this;
}
ESLogger logger = Loggers.getLogger(Node.class, settings.get("name"));
logger.info("starting ...");
// hack around dependency injection problem (for now...)
injector.getInstance(Discovery.class).setRoutingService(injector.getInstance(RoutingService.class));
for (Class<? extends LifecycleComponent> plugin : pluginsService.nodeServices()) {
injector.getInstance(plugin).start();
}
injector.getInstance(MappingUpdatedAction.class).setClient(client);
injector.getInstance(IndicesService.class).start();
injector.getInstance(IndexingMemoryController.class).start();
injector.getInstance(IndicesClusterStateService.class).start();
injector.getInstance(IndicesTTLService.class).start();
injector.getInstance(SnapshotsService.class).start();
injector.getInstance(SnapshotShardsService.class).start();
injector.getInstance(TransportService.class).start();
injector.getInstance(ClusterService.class).start();
injector.getInstance(RoutingService.class).start();
injector.getInstance(SearchService.class).start();
injector.getInstance(MonitorService.class).start();
injector.getInstance(RestController.class).start();
// TODO hack around circular dependencies problems
injector.getInstance(GatewayAllocator.class).setReallocation(injector.getInstance(ClusterService.class), injector.getInstance(RoutingService.class));
DiscoveryService discoService = injector.getInstance(DiscoveryService.class).start();
discoService.waitForInitialState();
// gateway should start after disco, so it can try and recovery from gateway on "start"
injector.getInstance(GatewayService.class).start();
if (settings.getAsBoolean("http.enabled", true)) {
injector.getInstance(HttpServer.class).start();
}
injector.getInstance(ResourceWatcherService.class).start();
injector.getInstance(TribeService.class).start();
logger.info("started");
return this;
}
private Node stop() {
if (!lifecycle.moveToStopped()) {
return this;
}
ESLogger logger = Loggers.getLogger(Node.class, settings.get("name"));
logger.info("stopping ...");
injector.getInstance(TribeService.class).stop();
injector.getInstance(ResourceWatcherService.class).stop();
if (settings.getAsBoolean("http.enabled", true)) {
injector.getInstance(HttpServer.class).stop();
}
injector.getInstance(SnapshotsService.class).stop();
injector.getInstance(SnapshotShardsService.class).stop();
// stop any changes happening as a result of cluster state changes
injector.getInstance(IndicesClusterStateService.class).stop();
// we close indices first, so operations won't be allowed on it
injector.getInstance(IndexingMemoryController.class).stop();
injector.getInstance(IndicesTTLService.class).stop();
injector.getInstance(RoutingService.class).stop();
injector.getInstance(ClusterService.class).stop();
injector.getInstance(DiscoveryService.class).stop();
injector.getInstance(MonitorService.class).stop();
injector.getInstance(GatewayService.class).stop();
injector.getInstance(SearchService.class).stop();
injector.getInstance(RestController.class).stop();
injector.getInstance(TransportService.class).stop();
for (Class<? extends LifecycleComponent> plugin : pluginsService.nodeServices()) {
injector.getInstance(plugin).stop();
}
// we should stop this last since it waits for resources to get released
// if we had scroll searchers etc or recovery going on we wait for to finish.
injector.getInstance(IndicesService.class).stop();
logger.info("stopped");
return this;
}
// During concurrent close() calls we want to make sure that all of them return after the node has completed it's shutdown cycle.
// If not, the hook that is added in Bootstrap#setup() will be useless: close() might not be executed, in case another (for example api) call
// to close() has already set some lifecycles to stopped. In this case the process will be terminated even if the first call to close() has not finished yet.
@Override
public synchronized void close() {
if (lifecycle.started()) {
stop();
}
if (!lifecycle.moveToClosed()) {
return;
}
ESLogger logger = Loggers.getLogger(Node.class, settings.get("name"));
logger.info("closing ...");
StopWatch stopWatch = new StopWatch("node_close");
stopWatch.start("tribe");
injector.getInstance(TribeService.class).close();
stopWatch.stop().start("http");
if (settings.getAsBoolean("http.enabled", true)) {
injector.getInstance(HttpServer.class).close();
}
stopWatch.stop().start("snapshot_service");
injector.getInstance(SnapshotsService.class).close();
injector.getInstance(SnapshotShardsService.class).close();
stopWatch.stop().start("client");
Releasables.close(injector.getInstance(Client.class));
stopWatch.stop().start("indices_cluster");
injector.getInstance(IndicesClusterStateService.class).close();
stopWatch.stop().start("indices");
injector.getInstance(IndexingMemoryController.class).close();
injector.getInstance(IndicesTTLService.class).close();
injector.getInstance(IndicesService.class).close();
// close filter/fielddata caches after indices
injector.getInstance(IndicesQueryCache.class).close();
injector.getInstance(IndicesFieldDataCache.class).close();
injector.getInstance(IndicesStore.class).close();
stopWatch.stop().start("routing");
injector.getInstance(RoutingService.class).close();
stopWatch.stop().start("cluster");
injector.getInstance(ClusterService.class).close();
stopWatch.stop().start("discovery");
injector.getInstance(DiscoveryService.class).close();
stopWatch.stop().start("monitor");
injector.getInstance(MonitorService.class).close();
stopWatch.stop().start("gateway");
injector.getInstance(GatewayService.class).close();
stopWatch.stop().start("search");
injector.getInstance(SearchService.class).close();
stopWatch.stop().start("rest");
injector.getInstance(RestController.class).close();
stopWatch.stop().start("transport");
injector.getInstance(TransportService.class).close();
stopWatch.stop().start("percolator_service");
injector.getInstance(PercolatorService.class).close();
for (Class<? extends LifecycleComponent> plugin : pluginsService.nodeServices()) {
stopWatch.stop().start("plugin(" + plugin.getName() + ")");
injector.getInstance(plugin).close();
}
stopWatch.stop().start("script");
try {
injector.getInstance(ScriptService.class).close();
} catch(IOException e) {
logger.warn("ScriptService close failed", e);
}
stopWatch.stop().start("thread_pool");
// TODO this should really use ThreadPool.terminate()
injector.getInstance(ThreadPool.class).shutdown();
try {
injector.getInstance(ThreadPool.class).awaitTermination(10, TimeUnit.SECONDS);
} catch (InterruptedException e) {
// ignore
}
stopWatch.stop().start("thread_pool_force_shutdown");
try {
injector.getInstance(ThreadPool.class).shutdownNow();
} catch (Exception e) {
// ignore
}
stopWatch.stop();
if (logger.isTraceEnabled()) {
logger.trace("Close times for each service:\n{}", stopWatch.prettyPrint());
}
injector.getInstance(NodeEnvironment.class).close();
injector.getInstance(PageCacheRecycler.class).close();
logger.info("closed");
}
/**
* Returns <tt>true</tt> if the node is closed.
*/
public boolean isClosed() {
return lifecycle.closed();
}
public Injector injector() {
return this.injector;
}
}
| |
/*
* Copyright (c) 2016, Oracle and/or its affiliates.
*
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification, are
* permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice, this list of
* conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright notice, this list of
* conditions and the following disclaimer in the documentation and/or other materials provided
* with the distribution.
*
* 3. Neither the name of the copyright holder nor the names of its contributors may be used to
* endorse or promote products derived from this software without specific prior written
* permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS
* OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
* COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE
* GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
* AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
* OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package com.oracle.truffle.llvm.option.processor;
import java.io.PrintWriter;
import java.io.StringWriter;
import java.util.ArrayList;
import java.util.List;
import javax.annotation.processing.ProcessingEnvironment;
import javax.lang.model.element.Element;
import javax.lang.model.element.ElementKind;
import javax.lang.model.element.PackageElement;
import javax.lang.model.element.TypeElement;
import javax.lang.model.type.ArrayType;
import javax.lang.model.type.DeclaredType;
import javax.lang.model.type.ExecutableType;
import javax.lang.model.type.TypeKind;
import javax.lang.model.type.TypeMirror;
import javax.lang.model.type.WildcardType;
/**
* Methods of this class are copied from com.oracle.truffle.dsl.processor.java.ElementUtils.
*/
final class Utils {
public static String printException(Throwable e) {
StringWriter string = new StringWriter();
PrintWriter writer = new PrintWriter(string);
e.printStackTrace(writer);
writer.flush();
string.flush();
return e.getMessage() + "\r\n" + string.toString();
}
static String getSimpleSubClassName(TypeElement innerClass) {
return getSimpleName(innerClass) + "Gen";
}
static String getSimpleName(Element element) {
return getSimpleName(element.asType());
}
static TypeMirror getTypeMirror(ProcessingEnvironment env, Class<?> clazz) {
String name = clazz.getCanonicalName();
TypeElement elem = env.getElementUtils().getTypeElement(name);
return elem.asType();
}
static String getSimpleName(TypeMirror mirror) {
switch (mirror.getKind()) {
case BOOLEAN:
return "boolean";
case BYTE:
return "byte";
case CHAR:
return "char";
case DOUBLE:
return "double";
case FLOAT:
return "float";
case SHORT:
return "short";
case INT:
return "int";
case LONG:
return "long";
case DECLARED:
return getDeclaredName((DeclaredType) mirror, true);
case ARRAY:
return getSimpleName(((ArrayType) mirror).getComponentType()) + "[]";
case VOID:
return "void";
case NULL:
return "null";
case WILDCARD:
return getWildcardName((WildcardType) mirror);
case TYPEVAR:
return "?";
case ERROR:
throw new RuntimeException("Type error " + mirror);
default:
throw new RuntimeException("Unknown type specified " + mirror.getKind() + " mirror: " + mirror);
}
}
static String getQualifiedName(TypeMirror mirror) {
switch (mirror.getKind()) {
case BOOLEAN:
return "boolean";
case BYTE:
return "byte";
case CHAR:
return "char";
case DOUBLE:
return "double";
case SHORT:
return "short";
case FLOAT:
return "float";
case INT:
return "int";
case LONG:
return "long";
case DECLARED:
return getQualifiedName(fromTypeMirror(mirror));
case ARRAY:
return getQualifiedName(((ArrayType) mirror).getComponentType());
case VOID:
return "void";
case NULL:
return "null";
case TYPEVAR:
return getSimpleName(mirror);
case ERROR:
throw new RuntimeException("Type error " + mirror);
case EXECUTABLE:
return ((ExecutableType) mirror).toString();
case NONE:
return "$none";
default:
throw new RuntimeException("Unknown type specified " + mirror + " mirror: " + mirror);
}
}
static String getQualifiedName(TypeElement element) {
String qualifiedName = element.getQualifiedName().toString();
if (qualifiedName.contains("$")) {
/*
* If a class gets loaded in its binary form by the ECJ compiler it fails to produce the
* proper canonical class name. It leaves the $ in the qualified name of the class. So
* one instance of a TypeElement may be loaded in binary and one in source form. The
* current type comparison in #typeEquals compares by the qualified name so the
* qualified name must match. This is basically a hack to fix the returned qualified
* name of eclipse.
*/
qualifiedName = qualifiedName.replace('$', '.');
}
return qualifiedName;
}
static TypeElement fromTypeMirror(TypeMirror mirror) {
switch (mirror.getKind()) {
case DECLARED:
return (TypeElement) ((DeclaredType) mirror).asElement();
case ARRAY:
return fromTypeMirror(((ArrayType) mirror).getComponentType());
default:
return null;
}
}
static String getDeclaredName(DeclaredType element, boolean includeTypeVariables) {
String simpleName = fixECJBinaryNameIssue(element.asElement().getSimpleName().toString());
if (!includeTypeVariables || element.getTypeArguments().size() == 0) {
return simpleName;
}
StringBuilder b = new StringBuilder(simpleName);
b.append("<");
if (element.getTypeArguments().size() > 0) {
for (int i = 0; i < element.getTypeArguments().size(); i++) {
b.append(getSimpleName(element.getTypeArguments().get(i)));
if (i < element.getTypeArguments().size() - 1) {
b.append(", ");
}
}
}
b.append(">");
return b.toString();
}
static String fixECJBinaryNameIssue(String name) {
if (name.contains("$")) {
int lastIndex = name.lastIndexOf('$');
return name.substring(lastIndex + 1, name.length());
}
return name;
}
private static String getWildcardName(WildcardType type) {
StringBuilder b = new StringBuilder();
if (type.getExtendsBound() != null) {
b.append("? extends ").append(getSimpleName(type.getExtendsBound()));
} else if (type.getSuperBound() != null) {
b.append("? super ").append(getSimpleName(type.getExtendsBound()));
}
return b.toString();
}
static String getPackageName(TypeElement element) {
return findPackageElement(element).getQualifiedName().toString();
}
static PackageElement findPackageElement(Element type) {
List<Element> hierarchy = getElementHierarchy(type);
for (Element element : hierarchy) {
if (element.getKind() == ElementKind.PACKAGE) {
return (PackageElement) element;
}
}
return null;
}
static List<Element> getElementHierarchy(Element e) {
List<Element> elements = new ArrayList<>();
elements.add(e);
Element enclosing = e.getEnclosingElement();
while (enclosing != null && enclosing.getKind() != ElementKind.PACKAGE) {
elements.add(enclosing);
enclosing = enclosing.getEnclosingElement();
}
if (enclosing != null) {
elements.add(enclosing);
}
return elements;
}
static String getFullOptionsClassName(TypeElement clazz) {
return getPackageName(clazz) + "." + getSimpleSubClassName(clazz);
}
static boolean typeEquals(TypeMirror type1, TypeMirror type2) {
if (type1 == type2) {
return true;
} else if (type1 == null || type2 == null) {
return false;
} else {
if (type1.getKind() == type2.getKind()) {
return getUniqueIdentifier(type1).equals(getUniqueIdentifier(type2));
} else {
return false;
}
}
}
static String getUniqueIdentifier(TypeMirror typeMirror) {
if (typeMirror.getKind() == TypeKind.ARRAY) {
return getUniqueIdentifier(((ArrayType) typeMirror).getComponentType()) + "[]";
} else {
return getQualifiedName(typeMirror);
}
}
static boolean isInt(ProcessingEnvironment processingEnv, TypeMirror e) {
return Utils.typeEquals(e, Utils.getTypeMirror(processingEnv, Integer.class));
}
static boolean isBoolean(ProcessingEnvironment processingEnv, TypeMirror e) {
return Utils.typeEquals(e, Utils.getTypeMirror(processingEnv, Boolean.class));
}
static boolean isString(ProcessingEnvironment processingEnv, TypeMirror e) {
return Utils.typeEquals(e, Utils.getTypeMirror(processingEnv, String.class));
}
static boolean isStringArr(TypeMirror e) {
return e.toString().equals("java.lang.String[]");
}
}
| |
// Licensed to the Software Freedom Conservancy (SFC) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The SFC licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package org.openqa.selenium.json;
import org.openqa.selenium.internal.Require;
import org.openqa.selenium.logging.LogLevelMapping;
import java.io.Closeable;
import java.io.File;
import java.io.IOException;
import java.lang.reflect.Method;
import java.net.URI;
import java.net.URL;
import java.time.Instant;
import java.time.format.DateTimeFormatter;
import java.util.ArrayDeque;
import java.util.Collection;
import java.util.Collections;
import java.util.Date;
import java.util.Deque;
import java.util.LinkedHashMap;
import java.util.Map;
import java.util.Objects;
import java.util.Optional;
import java.util.UUID;
import java.util.function.Consumer;
import java.util.function.Function;
import java.util.function.Predicate;
import java.util.logging.Level;
import java.util.logging.Logger;
import java.util.stream.Stream;
import static java.util.concurrent.TimeUnit.MILLISECONDS;
public class JsonOutput implements Closeable {
private static final Logger LOG = Logger.getLogger(JsonOutput.class.getName());
private static final int MAX_DEPTH = 10;
private static final Predicate<Class<?>> GSON_ELEMENT;
static {
Predicate<Class<?>> gsonElement;
try {
Class<?> elementClass = Class.forName("com.google.gson.JsonElement");
gsonElement = elementClass::isAssignableFrom;
} catch (ReflectiveOperationException e) {
gsonElement = clazz -> false;
}
GSON_ELEMENT = gsonElement;
}
// https://www.json.org has some helpful comments on characters to escape
// See also https://tools.ietf.org/html/rfc8259#section-7 and
// https://github.com/google/gson/issues/341 so we escape those as well.
// It's legal to escape any character, so to be nice to HTML parsers,
// we'll also escape "<" and "&"
private static final Map<Integer, String> ESCAPES;
static {
Map<Integer, String> builder = new LinkedHashMap<>();
for (int i = 0; i <= 0x1f; i++) {
// We want nice looking escapes for these, which are called out
// by json.org
if (!(i == '\b' || i == '\f' || i == '\n' || i == '\r' || i == '\t')) {
builder.put(i, String.format("\\u%04x", i));
}
}
builder.put((int) '"', "\\\"");
builder.put((int) '\\', "\\\\");
builder.put((int) '/', "\\u002f");
builder.put((int) '\b', "\\b");
builder.put((int) '\f', "\\f");
builder.put((int) '\n', "\\n");
builder.put((int) '\r', "\\r");
builder.put((int) '\t', "\\t");
builder.put((int) '\u2028', "\\u2028");
builder.put((int) '<', String.format("\\u%04x", (int) '<'));
builder.put((int) '&', String.format("\\u%04x", (int) '&'));
ESCAPES = Collections.unmodifiableMap(builder);
}
private final Map<Predicate<Class<?>>, SafeBiConsumer<Object, Integer>> converters;
private final Appendable appendable;
private final Consumer<String> appender;
private Deque<Node> stack;
private String indent = "";
private String lineSeparator = "\n";
private String indentBy = " ";
private boolean writeClassName = true;
JsonOutput(Appendable appendable) {
this.appendable = Require.nonNull("Underlying appendable", appendable);
this.appender =
str -> {
try {
appendable.append(str);
} catch (IOException e) {
throw new JsonException("Unable to write to underlying appendable", e);
}
};
this.stack = new ArrayDeque<>();
this.stack.addFirst(new Empty());
// Order matters, since we want to handle null values first to avoid exceptions, and then then
// common kinds of inputs next.
Map<Predicate<Class<?>>, SafeBiConsumer<Object, Integer>> builder = new LinkedHashMap<>();
builder.put(Objects::isNull, (obj, depth) -> append("null"));
builder.put(CharSequence.class::isAssignableFrom, (obj, depth) -> append(asString(obj)));
builder.put(Number.class::isAssignableFrom, (obj, depth) -> append(obj.toString()));
builder.put(Boolean.class::isAssignableFrom, (obj, depth) -> append((Boolean) obj ? "true" : "false"));
builder.put(Date.class::isAssignableFrom, (obj, depth) -> append(String.valueOf(MILLISECONDS.toSeconds(((Date) obj).getTime()))));
builder.put(Instant.class::isAssignableFrom, (obj, depth) -> append(asString(DateTimeFormatter.ISO_INSTANT.format((Instant) obj))));
builder.put(Enum.class::isAssignableFrom, (obj, depth) -> append(asString(obj)));
builder.put(File.class::isAssignableFrom, (obj, depth) -> append(((File) obj).getAbsolutePath()));
builder.put(URI.class::isAssignableFrom, (obj, depth) -> append(asString((obj).toString())));
builder.put(URL.class::isAssignableFrom, (obj, depth) -> append(asString(((URL) obj).toExternalForm())));
builder.put(UUID.class::isAssignableFrom, (obj, depth) -> append(asString(obj.toString())));
builder.put(Level.class::isAssignableFrom, (obj, depth) -> append(asString(LogLevelMapping.getName((Level) obj))));
builder.put(
GSON_ELEMENT,
(obj, depth) -> {
LOG.log(
Level.WARNING,
"Attempt to convert JsonElement from GSON. This functionality is deprecated. "
+ "Diagnostic stacktrace follows",
new JsonException("Stack trace to determine cause of warning"));
append(obj.toString());
});
// Special handling of asMap and toJson
builder.put(
cls -> getMethod(cls, "toJson") != null,
(obj, depth) -> convertUsingMethod("toJson", obj, depth));
builder.put(
cls -> getMethod(cls, "asMap") != null,
(obj, depth) -> convertUsingMethod("asMap", obj, depth));
builder.put(
cls -> getMethod(cls, "toMap") != null,
(obj, depth) -> convertUsingMethod("toMap", obj, depth));
// And then the collection types
builder.put(
Collection.class::isAssignableFrom,
(obj, depth) -> {
beginArray();
((Collection<?>) obj).stream()
.filter(o -> (!(o instanceof Optional) || ((Optional<?>) o).isPresent()))
.forEach(o -> write(o, depth - 1));
endArray();
});
builder.put(
Map.class::isAssignableFrom,
(obj, depth) -> {
beginObject();
((Map<?, ?>) obj).forEach(
(key, value) -> {
if (value instanceof Optional && !((Optional) value).isPresent()) {
return;
}
name(String.valueOf(key)).write(value, depth - 1);
});
endObject();
});
builder.put(
Class::isArray,
(obj, depth) -> {
beginArray();
Stream.of((Object[]) obj)
.filter(o -> (!(o instanceof Optional) || ((Optional<?>) o).isPresent()))
.forEach(o -> write(o, depth - 1));
endArray();
});
builder.put(Optional.class::isAssignableFrom, (obj, depth) -> {
Optional<?> optional = (Optional<?>) obj;
if (!optional.isPresent()) {
append("null");
return;
}
write(optional.get(), depth);
});
// Finally, attempt to convert as an object
builder.put(cls -> true, (obj, depth) -> mapObject(obj, depth - 1));
this.converters = Collections.unmodifiableMap(builder);
}
public JsonOutput setPrettyPrint(boolean enablePrettyPrinting) {
this.lineSeparator = enablePrettyPrinting ? "\n" : "";
this.indentBy = enablePrettyPrinting ? " " : "";
return this;
}
public JsonOutput writeClassName(boolean writeClassName) {
this.writeClassName = writeClassName;
return this;
}
public JsonOutput beginObject() {
stack.getFirst().write("{" + lineSeparator);
indent += indentBy;
stack.addFirst(new JsonObject());
return this;
}
public JsonOutput name(String name) {
if (!(stack.getFirst() instanceof JsonObject)) {
throw new JsonException("Attempt to write name, but not writing a json object: " + name);
}
((JsonObject) stack.getFirst()).name(name);
return this;
}
public JsonOutput endObject() {
Node topOfStack = stack.getFirst();
if (!(topOfStack instanceof JsonObject)) {
throw new JsonException("Attempt to close a json object, but not writing a json object");
}
stack.removeFirst();
indent = indent.substring(0, indent.length() - indentBy.length());
if (topOfStack.isEmpty) {
appender.accept(indent + "}");
} else {
appender.accept(lineSeparator + indent + "}");
}
return this;
}
public JsonOutput beginArray() {
append("[" + lineSeparator);
indent += " ";
stack.addFirst(new JsonCollection());
return this;
}
public JsonOutput endArray() {
Node topOfStack = stack.getFirst();
if (!(topOfStack instanceof JsonCollection)) {
throw new JsonException("Attempt to close a json array, but not writing a json array");
}
stack.removeFirst();
indent = indent.substring(0, indent.length() - indentBy.length());
if (topOfStack.isEmpty) {
appender.accept(indent + "]");
} else {
appender.accept(lineSeparator + indent + "]");
}
return this;
}
public JsonOutput write(Object value) {
return write(value, MAX_DEPTH);
}
public JsonOutput write(Object input, int depthRemaining) {
converters.entrySet().stream()
.filter(entry -> entry.getKey().test(input == null ? null : input.getClass()))
.findFirst()
.map(Map.Entry::getValue)
.orElseThrow(() -> new JsonException("Unable to write " + input))
.consume(input, depthRemaining);
return this;
}
@Override
public void close() {
if (appendable instanceof Closeable) {
try {
((Closeable) appendable).close();
} catch (IOException e) {
throw new JsonException(e);
}
}
if (!(stack.getFirst() instanceof Empty)) {
throw new JsonException("Attempting to close incomplete json stream");
}
}
private JsonOutput append(String text) {
stack.getFirst().write(text);
return this;
}
private String asString(Object obj) {
StringBuilder toReturn = new StringBuilder("\"");
String.valueOf(obj)
.chars()
.forEach(i -> {
String escaped = ESCAPES.get(i);
if (escaped != null) {
toReturn.append(escaped);
} else {
toReturn.append((char) i);
}
});
toReturn.append('"');
return toReturn.toString();
}
private Method getMethod(Class<?> clazz, String methodName) {
if (Object.class.equals(clazz)) {
return null;
}
try {
Method method = clazz.getDeclaredMethod(methodName);
method.setAccessible(true);
return method;
} catch (NoSuchMethodException e) {
return getMethod(clazz.getSuperclass(), methodName);
} catch (SecurityException e) {
throw new JsonException(
"Unable to find the method because of a security constraint: " + methodName,
e);
}
}
private JsonOutput convertUsingMethod(String methodName, Object toConvert, int depth) {
try {
Method method = getMethod(toConvert.getClass(), methodName);
if (method == null) {
throw new JsonException(String.format(
"Unable to read object %s using method %s",
toConvert,
methodName));
}
Object value = method.invoke(toConvert);
return write(value, depth);
} catch (ReflectiveOperationException e) {
throw new JsonException(e);
}
}
private void mapObject(Object toConvert, int maxDepth) {
if (maxDepth < 1) {
append("null");
return;
}
if (toConvert instanceof Class) {
write(((Class<?>) toConvert).getName());
return;
}
// Raw object via reflection? Nope, not needed
beginObject();
for (SimplePropertyDescriptor pd :
SimplePropertyDescriptor.getPropertyDescriptors(toConvert.getClass())) {
// Only include methods not on java.lang.Object to stop things being super-noisy
Function<Object, Object> readMethod = pd.getReadMethod();
if (readMethod == null) {
continue;
}
if (!writeClassName && "class".equals(pd.getName())) {
continue;
}
Object value = pd.getReadMethod().apply(toConvert);
if (!Optional.empty().equals(value)) {
name(pd.getName());
write(value, maxDepth - 1);
}
}
endObject();
}
private class Node {
protected boolean isEmpty = true;
public void write(String text) {
if (isEmpty) {
isEmpty = false;
} else {
appender.accept("," + lineSeparator);
}
appender.accept(indent);
appender.accept(text);
}
}
private class Empty extends Node {
@Override
public void write(String text) {
if (!isEmpty) {
throw new JsonException("Only allowed to write one value to a json stream");
}
super.write(text);
}
}
private class JsonCollection extends Node {
}
private class JsonObject extends Node {
private boolean isNameNext = true;
public void name(String name) {
if (!isNameNext) {
throw new JsonException("Unexpected attempt to set name of json object: " + name);
}
isNameNext = false;
super.write(asString(name));
appender.accept(": ");
}
@Override
public void write(String text) {
if (isNameNext) {
throw new JsonException("Unexpected attempt to write value before name: " + text);
}
isNameNext = true;
appender.accept(text);
}
}
@FunctionalInterface
private interface SafeBiConsumer<T, U> {
void consume(T t, U u);
}
}
| |
/*
Copyright 2014-2016 Intel Corporation
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package apple.metalperformanceshaders;
import apple.NSObject;
import apple.foundation.NSArray;
import apple.foundation.NSCoder;
import apple.foundation.NSMethodSignature;
import apple.foundation.NSSet;
import org.moe.natj.c.ann.FunctionPtr;
import org.moe.natj.general.NatJ;
import org.moe.natj.general.Pointer;
import org.moe.natj.general.ann.Generated;
import org.moe.natj.general.ann.Library;
import org.moe.natj.general.ann.Mapped;
import org.moe.natj.general.ann.NInt;
import org.moe.natj.general.ann.NUInt;
import org.moe.natj.general.ann.Owned;
import org.moe.natj.general.ann.Runtime;
import org.moe.natj.general.ptr.VoidPtr;
import org.moe.natj.objc.Class;
import org.moe.natj.objc.ObjCRuntime;
import org.moe.natj.objc.SEL;
import org.moe.natj.objc.ann.ObjCClassBinding;
import org.moe.natj.objc.ann.ProtocolClassMethod;
import org.moe.natj.objc.ann.Selector;
import org.moe.natj.objc.map.ObjCObjectMapper;
/**
* MPSImageLanczosScale
* <p>
* Resize an image and / or change its aspect ratio
* <p>
* The MPSImageLanczosScale filter can be used to resample an existing image
* using a different sampling frequency in each dimension. This can be
* used to enlarge or reduce the size of an image, or change the aspect
* ratio of an image. The filter uses a Lanczos resampling algorithm
* which typically produces better quality for photographs, but is slower
* than linear sampling using the GPU texture units. Lanczos downsampling
* does not require a low pass filter to be applied before it is used.
* Because the resampling function has negative lobes, Lanczos can result
* in ringing near sharp edges, making it less suitable for vector art.
*/
@Generated
@Library("MetalPerformanceShaders")
@Runtime(ObjCRuntime.class)
@ObjCClassBinding
public class MPSImageLanczosScale extends MPSImageScale {
static {
NatJ.register();
}
@Generated
protected MPSImageLanczosScale(Pointer peer) {
super(peer);
}
@Generated
@Selector("accessInstanceVariablesDirectly")
public static native boolean accessInstanceVariablesDirectly();
@Generated
@Owned
@Selector("alloc")
public static native MPSImageLanczosScale alloc();
@Owned
@Generated
@Selector("allocWithZone:")
public static native MPSImageLanczosScale allocWithZone(VoidPtr zone);
@Generated
@Selector("automaticallyNotifiesObserversForKey:")
public static native boolean automaticallyNotifiesObserversForKey(String key);
@Generated
@Selector("cancelPreviousPerformRequestsWithTarget:")
public static native void cancelPreviousPerformRequestsWithTarget(@Mapped(ObjCObjectMapper.class) Object aTarget);
@Generated
@Selector("cancelPreviousPerformRequestsWithTarget:selector:object:")
public static native void cancelPreviousPerformRequestsWithTargetSelectorObject(
@Mapped(ObjCObjectMapper.class) Object aTarget, SEL aSelector,
@Mapped(ObjCObjectMapper.class) Object anArgument);
@Generated
@Selector("classFallbacksForKeyedArchiver")
public static native NSArray<String> classFallbacksForKeyedArchiver();
@Generated
@Selector("classForKeyedUnarchiver")
public static native Class classForKeyedUnarchiver();
@Generated
@Selector("debugDescription")
public static native String debugDescription_static();
@Generated
@Selector("description")
public static native String description_static();
@Generated
@Selector("hash")
@NUInt
public static native long hash_static();
@Generated
@Selector("instanceMethodForSelector:")
@FunctionPtr(name = "call_instanceMethodForSelector_ret")
public static native NSObject.Function_instanceMethodForSelector_ret instanceMethodForSelector(SEL aSelector);
@Generated
@Selector("instanceMethodSignatureForSelector:")
public static native NSMethodSignature instanceMethodSignatureForSelector(SEL aSelector);
@Generated
@Selector("instancesRespondToSelector:")
public static native boolean instancesRespondToSelector(SEL aSelector);
@Generated
@Selector("isSubclassOfClass:")
public static native boolean isSubclassOfClass(Class aClass);
@Generated
@Selector("keyPathsForValuesAffectingValueForKey:")
public static native NSSet<String> keyPathsForValuesAffectingValueForKey(String key);
@Generated
@Owned
@Selector("new")
public static native MPSImageLanczosScale new_objc();
@Generated
@Selector("resolveClassMethod:")
public static native boolean resolveClassMethod(SEL sel);
@Generated
@Selector("resolveInstanceMethod:")
public static native boolean resolveInstanceMethod(SEL sel);
@Generated
@Selector("setVersion:")
public static native void setVersion_static(@NInt long aVersion);
@Generated
@Selector("superclass")
public static native Class superclass_static();
@Generated
@Selector("version")
@NInt
public static native long version_static();
@Generated
@Selector("init")
public native MPSImageLanczosScale init();
@Generated
@Selector("initWithDevice:")
public native MPSImageLanczosScale initWithDevice(@Mapped(ObjCObjectMapper.class) Object device);
@Generated
@Selector("initWithCoder:")
public native MPSImageLanczosScale initWithCoder(NSCoder aDecoder);
/**
* NSSecureCoding compatability
* <p>
* While the standard NSSecureCoding/NSCoding method
* -initWithCoder: should work, since the file can't
* know which device your data is allocated on, we
* have to guess and may guess incorrectly. To avoid
* that problem, use initWithCoder:device instead.
*
* @param aDecoder The NSCoder subclass with your serialized MPSKernel
* @param device The MTLDevice on which to make the MPSKernel
* @return A new MPSKernel object, or nil if failure.
*/
@Generated
@Selector("initWithCoder:device:")
public native MPSImageLanczosScale initWithCoderDevice(NSCoder aDecoder,
@Mapped(ObjCObjectMapper.class) Object device);
@Generated
@Selector("supportsSecureCoding")
public static native boolean supportsSecureCoding();
@Generated
@ProtocolClassMethod("supportsSecureCoding")
public boolean _supportsSecureCoding() {
return supportsSecureCoding();
}
}
| |
/**
*/
package org.mobadsl.semantic.model.moba.provider;
import java.util.Collection;
import java.util.List;
import org.eclipse.emf.common.notify.AdapterFactory;
import org.eclipse.emf.common.notify.Notification;
import org.eclipse.emf.common.util.ResourceLocator;
import org.eclipse.emf.edit.provider.ComposeableAdapterFactory;
import org.eclipse.emf.edit.provider.IEditingDomainItemProvider;
import org.eclipse.emf.edit.provider.IItemLabelProvider;
import org.eclipse.emf.edit.provider.IItemPropertyDescriptor;
import org.eclipse.emf.edit.provider.IItemPropertySource;
import org.eclipse.emf.edit.provider.IStructuredItemContentProvider;
import org.eclipse.emf.edit.provider.ITreeItemContentProvider;
import org.eclipse.emf.edit.provider.ItemPropertyDescriptor;
import org.eclipse.emf.edit.provider.ItemProviderAdapter;
import org.eclipse.emf.edit.provider.ViewerNotification;
import org.mobadsl.semantic.model.moba.MobaEntityIndex;
import org.mobadsl.semantic.model.moba.MobaPackage;
/**
* This is the item provider adapter for a {@link org.mobadsl.semantic.model.moba.MobaEntityIndex} object.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public class MobaEntityIndexItemProvider
extends ItemProviderAdapter
implements
IEditingDomainItemProvider,
IStructuredItemContentProvider,
ITreeItemContentProvider,
IItemLabelProvider,
IItemPropertySource {
/**
* This constructs an instance from a factory and a notifier.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public MobaEntityIndexItemProvider(AdapterFactory adapterFactory) {
super(adapterFactory);
}
/**
* This returns the property descriptors for the adapted class.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public List<IItemPropertyDescriptor> getPropertyDescriptors(Object object) {
if (itemPropertyDescriptors == null) {
super.getPropertyDescriptors(object);
addNamePropertyDescriptor(object);
addUniquePropertyDescriptor(object);
addAttributesPropertyDescriptor(object);
}
return itemPropertyDescriptors;
}
/**
* This adds a property descriptor for the Name feature.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected void addNamePropertyDescriptor(Object object) {
itemPropertyDescriptors.add
(createItemPropertyDescriptor
(((ComposeableAdapterFactory)adapterFactory).getRootAdapterFactory(),
getResourceLocator(),
getString("_UI_MobaEntityIndex_name_feature"),
getString("_UI_PropertyDescriptor_description", "_UI_MobaEntityIndex_name_feature", "_UI_MobaEntityIndex_type"),
MobaPackage.Literals.MOBA_ENTITY_INDEX__NAME,
true,
false,
false,
ItemPropertyDescriptor.GENERIC_VALUE_IMAGE,
null,
null));
}
/**
* This adds a property descriptor for the Unique feature.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected void addUniquePropertyDescriptor(Object object) {
itemPropertyDescriptors.add
(createItemPropertyDescriptor
(((ComposeableAdapterFactory)adapterFactory).getRootAdapterFactory(),
getResourceLocator(),
getString("_UI_MobaEntityIndex_unique_feature"),
getString("_UI_PropertyDescriptor_description", "_UI_MobaEntityIndex_unique_feature", "_UI_MobaEntityIndex_type"),
MobaPackage.Literals.MOBA_ENTITY_INDEX__UNIQUE,
true,
false,
false,
ItemPropertyDescriptor.BOOLEAN_VALUE_IMAGE,
null,
null));
}
/**
* This adds a property descriptor for the Attributes feature.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected void addAttributesPropertyDescriptor(Object object) {
itemPropertyDescriptors.add
(createItemPropertyDescriptor
(((ComposeableAdapterFactory)adapterFactory).getRootAdapterFactory(),
getResourceLocator(),
getString("_UI_MobaEntityIndex_attributes_feature"),
getString("_UI_PropertyDescriptor_description", "_UI_MobaEntityIndex_attributes_feature", "_UI_MobaEntityIndex_type"),
MobaPackage.Literals.MOBA_ENTITY_INDEX__ATTRIBUTES,
true,
false,
true,
null,
null,
null));
}
/**
* This returns MobaEntityIndex.gif.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public Object getImage(Object object) {
return overlayImage(object, getResourceLocator().getImage("full/obj16/MobaEntityIndex"));
}
/**
* This returns the label text for the adapted class.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public String getText(Object object) {
String label = ((MobaEntityIndex)object).getName();
return label == null || label.length() == 0 ?
getString("_UI_MobaEntityIndex_type") :
getString("_UI_MobaEntityIndex_type") + " " + label;
}
/**
* This handles model notifications by calling {@link #updateChildren} to update any cached
* children and by creating a viewer notification, which it passes to {@link #fireNotifyChanged}.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public void notifyChanged(Notification notification) {
updateChildren(notification);
switch (notification.getFeatureID(MobaEntityIndex.class)) {
case MobaPackage.MOBA_ENTITY_INDEX__NAME:
case MobaPackage.MOBA_ENTITY_INDEX__UNIQUE:
fireNotifyChanged(new ViewerNotification(notification, notification.getNotifier(), false, true));
return;
}
super.notifyChanged(notification);
}
/**
* This adds {@link org.eclipse.emf.edit.command.CommandParameter}s describing the children
* that can be created under this object.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
protected void collectNewChildDescriptors(Collection<Object> newChildDescriptors, Object object) {
super.collectNewChildDescriptors(newChildDescriptors, object);
}
/**
* Return the resource locator for this item provider's resources.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public ResourceLocator getResourceLocator() {
return MobaEditPlugin.INSTANCE;
}
}
| |
package io.github.omn0mn0m.tortilla;
import io.github.omn0mn0m.tortilla.entity.Hero;
import io.github.omn0mn0m.tortilla.list.AttackList;
import io.github.omn0mn0m.tortilla.list.HostileList;
import io.github.omn0mn0m.tortilla.list.ItemList;
import io.github.omn0mn0m.tortilla.location.LocationMap;
import io.github.omn0mn0m.util.Input;
import java.util.NoSuchElementException;
import java.util.Random;
public class Dream {
private final int ROOMS_TO_WIN = 10;
public static HostileList hostileList;
public static ItemList itemList;
public static AttackList attackList;
private Input input = new Input();
private boolean paused = false;
private boolean isDreaming = true;
private Hero hero;
private LocationMap locationMap = new LocationMap(ROOMS_TO_WIN + 1, ROOMS_TO_WIN + 1);
public static Random random = new Random();
public Dream() {
hostileList = new HostileList();
Main.print("Hostiles list successfully loaded!");
itemList = new ItemList();
Main.print("Items list successfully loaded!");
attackList = new AttackList(); // List of attacks the player can do
Main.print("Attacks list successfully loaded!");
hero = new Hero();
Main.print("Player successfully loaded!");
Main.print("Swag out. \n");
locationMap.generateRoomAtPlayer(0, random.nextInt(itemList.getTotalItems()));
locationMap.enterCurrentMapLocation(hero);
}
public void runInputCommand() {
try {
if (!paused) {
switch (input.splitAndGetInput(0)) {
case "go":
switch (input.getInputWord(1)) {
case "north":
checkForWin();
locationMap.moveTo(-1, 0, hero);
break;
case "east":
checkForWin();
locationMap.moveTo(0, 1, hero);
break;
case "south":
checkForWin();
locationMap.moveTo(1, 0, hero);
break;
case "west":
checkForWin();
locationMap.moveTo(0, -1, hero);
break;
default:
Main.print("You can't go that way...");
break;
}
break;
case "look":
if (input.isSplitWordTarget(1, "around")) {
if (locationMap.getCurrentLocation() != null) {
locationMap.printAllCurrentLocationInformation();
} else {
Main.print("There is nothing to see...");
}
} else if (input.isSplitWordTarget(1, "at")) {
locationMap.printTargetHostileStats(input.getInputWord(2));
}
break;
case "attack":
if (input.getSplitLength() >= 2) {
hero.attack(locationMap.getHostileAtCurrentLocation(input.getInputWord(1)));
} else {
Main.print("You did not choose anything to attack...");
}
break;
case "quit":
this.quit();
break;
case "restart":
this.restart();
break;
case "reroll":
hero.rerollCharacter();
break;
case "check":
switch (input.getInputWord(1)) {
case "stats":
hero.printStats();
break;
case "inventory":
hero.checkInventory();
break;
case "equipped":
hero.checkEquipped();
break;
default:
Main.print("That is not something valid to check...");
break;
}
break;
case "drop":
hero.removeItem(locationMap.getCurrentLocationItems(), input.getInputWord(1));
break;
case "take":
hero.addItem(locationMap.getCurrentLocationItems(), input.getInputWord(1));
break;
case "equip":
hero.equipItem(input.getInputWord(1));
break;
case "unequip":
hero.unequipItem(input.getInputWord(1));
break;
case "consume":
hero.consumeItem(input.getInputWord(1));
break;
case "pause":
pause();
break;
case "unpause":
unpause();
break;
case "help":
printHelp();
break;
default:
Main.print("That is not a valid command");
break;
}
} else {
switch (input.splitAndGetInput(0)) {
case "quit":
this.quit();
break;
case "restart":
this.restart();
break;
case "reroll":
hero.rerollCharacter();
break;
case "pause":
pause();
break;
case "unpause":
unpause();
break;
case "help":
printHelp();
break;
default:
Main.print("That is not a valid command");
break;
}
}
} catch (NoSuchElementException e) {}
}
public void runGame() {
for (int i = 0; i < locationMap.getHostilesInCurrentLocation(); i++) {
if (locationMap.getHostileAtCurrentLocation(i) != null) {
locationMap.getHostileAtCurrentLocation(i).checkIfAlive(hero);
locationMap.checkIfHostileDead(i, hero);
if (locationMap.getHostileAtCurrentLocation(i) != null) {
hero.takeDamage(locationMap.getHostileAtCurrentLocation(i), 0);
}
}
}
hero.checkIfAlive();
}
public void heroClassSelect() {
hero.selectClass();
}
public void checkForWin() {
if (locationMap.getRoomsCleared() == ROOMS_TO_WIN) {
Main.print("You walk through into the next room, but there is no more dungeon. You have reached the end. Congradulations!");
isDreaming = false;
locationMap.resetMap();
locationMap.resetPlayerLocation(hero);
}
}
public void quit() {
Main.print("Are you sure you want to quit? ");
if(input.getSimpleInput().equalsIgnoreCase("yes")) {
isDreaming = false;
} else {
Main.print("Resuming game then...");
}
}
public void restart() {
Main.print("Are you sure you want to restart?");
if(input.getSimpleInput().equalsIgnoreCase("Yes")) {
locationMap.resetMap();
locationMap.resetPlayerLocation(hero);
this.heroClassSelect();
}
}
public void pause() {
paused = true;
Main.print("The game is now paused.");
}
public void unpause() {
paused = false;
Main.print("The game is resuming.");
}
public boolean isPaused() {
return paused;
}
public void printHelp() {
if(input.getSplitLength() == 1) {
while(Main.fileScanner.hasNextLine()) {
String fileStr = Main.fileScanner.useDelimiter("[\\r\\n]+").next();
Main.print(fileStr);
}
} else {
switch(input.getInputWord(1)) {
case "go":
Main.print("Syntax: go <direction>");
Main.print("You may go north, south, east, or west.");
Main.print("You are not imaginative enough to even think of going other directions.");
break;
case "look":
Main.print("Syntax: look <arguments> <object>");
Main.print("You can look around anywhere, but you can only look at objects.");
break;
case "attack":
Main.print("Syntax: attack <enemy>");
Main.print("Just be sure you're attacking what is actually there!");
break;
case "quit":
Main.print("Syntax: quit");
Main.print("Quits the game and shouts ''I'm a quitter'' to the cosmos.");
break;
case "restart":
Main.print("Syntax: restart");
Main.print("Restarts the game. In case it wasn't already clear, this wipes your progress.");
break;
case "reroll":
Main.print("Syntax: reroll");
Main.print("Resets your character's stats, so you can change them.");
break;
case "check":
Main.print("Syntax: check <vitals>");
Main.print("You can check your stats, inventory, and equipped.");
Main.print("You tried checking some other stuff a while ago, but you found it too difficult and gave up.");
break;
case "drop":
Main.print("Syntax: drop <item>");
Main.print("Drops the item that you specify. Be careful what you do with basses.");
break;
case "take":
Main.print("Syntax: take <item>");
Main.print("Takes an item from the surroundings and places it in your inventory.");
break;
case "equip":
Main.print("Syntax: equip <item>");
Main.print("Equips the item you specify. Just be sure you actually have the item...");
break;
case "unequip":
Main.print("Syntax: unequip <item>");
Main.print("Removes the item from your equipment and places it in your inventory.");
break;
case "consume":
Main.print("Syntax: consume <item>");
Main.print("Consumes an item from your inventory and removes it from your inventory.");
break;
case "pause":
Main.print("Syntax: pause");
Main.print("Pauses the game, like stopping the world, only possible");
break;
case "unpause":
Main.print("Syntax: unpause");
Main.print("Unpauses the game, like unstopping the world but...");
Main.print("You know, now that I think about it, this is a really bad analogy");
break;
case "help":
Main.print("Syntax: help <command>");
Main.print("You ask for help, recieving a list of commands if you do not specify one.");
Main.print("Or you ask for help about a specific command, getting the syntax and purpose of it");
break;
}
}
}
public boolean isDreaming() {
return isDreaming;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.atmosphere.websocket;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.Reader;
import java.io.StringReader;
import java.util.List;
import org.apache.camel.Exchange;
import org.apache.camel.Processor;
import org.apache.camel.builder.RouteBuilder;
import org.junit.jupiter.api.Test;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertTrue;
public class WebsocketRouteTest extends WebsocketCamelRouterTestSupport {
private static final String RESPONSE_GREETING = "Hola ";
private static final byte[] RESPONSE_GREETING_BYTES = { 0x48, 0x6f, 0x6c, 0x61, 0x20 };
@Test
void testWebsocketSingleClient() throws Exception {
TestClient wsclient = new TestClient("ws://localhost:" + PORT + "/hola");
wsclient.connect();
wsclient.sendTextMessage("Cerveza");
assertTrue(wsclient.await(10));
List<String> received = wsclient.getReceived(String.class);
assertEquals(1, received.size());
assertEquals("Hola Cerveza", received.get(0));
wsclient.close();
}
@Test
void testWebsocketSingleClientForBytes() throws Exception {
TestClient wsclient = new TestClient("ws://localhost:" + PORT + "/hola");
wsclient.connect();
wsclient.sendBytesMessage("Cerveza".getBytes("UTF-8"));
assertTrue(wsclient.await(10));
List<String> received = wsclient.getReceived(String.class);
assertEquals(1, received.size());
assertEquals("Hola Cerveza", received.get(0));
wsclient.close();
}
@Test
void testWebsocketSingleClientForReader() throws Exception {
TestClient wsclient = new TestClient("ws://localhost:" + PORT + "/hola3");
wsclient.connect();
wsclient.sendTextMessage("Cerveza");
assertTrue(wsclient.await(10));
List<String> received = wsclient.getReceived(String.class);
assertEquals(1, received.size());
assertEquals("Hola Cerveza", received.get(0));
wsclient.close();
}
@Test
void testWebsocketSingleClientForInputStream() throws Exception {
TestClient wsclient = new TestClient("ws://localhost:" + PORT + "/hola3");
wsclient.connect();
wsclient.sendBytesMessage("Cerveza".getBytes("UTF-8"));
assertTrue(wsclient.await(10));
List<String> received = wsclient.getReceived(String.class);
assertEquals(1, received.size());
assertEquals("Hola Cerveza", received.get(0));
wsclient.close();
}
@Test
void testWebsocketBroadcastClient() throws Exception {
TestClient wsclient1 = new TestClient("ws://localhost:" + PORT + "/hola2", 2);
TestClient wsclient2 = new TestClient("ws://localhost:" + PORT + "/hola2", 2);
wsclient1.connect();
wsclient2.connect();
wsclient1.sendTextMessage("Gambas");
wsclient2.sendTextMessage("Calamares");
assertTrue(wsclient1.await(10));
assertTrue(wsclient2.await(10));
List<String> received1 = wsclient1.getReceived(String.class);
assertEquals(2, received1.size());
assertTrue(received1.contains("Hola Gambas"));
assertTrue(received1.contains("Hola Calamares"));
List<String> received2 = wsclient2.getReceived(String.class);
assertEquals(2, received2.size());
assertTrue(received2.contains("Hola Gambas"));
assertTrue(received2.contains("Hola Calamares"));
wsclient1.close();
wsclient2.close();
}
@Test
void testWebsocketEventsResendingDisabled() throws Exception {
TestClient wsclient = new TestClient("ws://localhost:" + PORT + "/hola4");
wsclient.connect();
assertFalse(wsclient.await(10));
wsclient.close();
}
// START SNIPPET: payload
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
public void configure() {
// route for a single line
from("atmosphere-websocket:///hola").to("log:info").process(new Processor() {
public void process(final Exchange exchange) {
createResponse(exchange, false);
}
}).to("atmosphere-websocket:///hola");
// route for a broadcast line
from("atmosphere-websocket:///hola2").to("log:info").process(new Processor() {
public void process(final Exchange exchange) {
createResponse(exchange, false);
}
}).to("atmosphere-websocket:///hola2?sendToAll=true");
// route for a single stream line
from("atmosphere-websocket:///hola3?useStreaming=true").to("log:info").process(new Processor() {
public void process(final Exchange exchange) {
createResponse(exchange, true);
}
}).to("atmosphere-websocket:///hola3");
// route for events resending disabled
from("atmosphere-websocket:///hola4").to("log:info").process(new Processor() {
public void process(final Exchange exchange) {
checkEventsResendingDisabled(exchange);
}
}).to("atmosphere-websocket:///hola4");
}
};
}
private static void createResponse(Exchange exchange, boolean streaming) {
Object msg = exchange.getIn().getBody();
if (streaming) {
assertTrue(msg instanceof Reader || msg instanceof InputStream, "Expects Reader or InputStream");
} else {
assertTrue(msg instanceof String || msg instanceof byte[], "Expects String or byte[]");
}
if (msg instanceof String) {
exchange.getIn().setBody(RESPONSE_GREETING + msg);
} else if (msg instanceof byte[]) {
exchange.getIn().setBody(createByteResponse((byte[]) msg));
} else if (msg instanceof Reader) {
exchange.getIn().setBody(new StringReader(RESPONSE_GREETING + readAll((Reader) msg)));
} else if (msg instanceof InputStream) {
exchange.getIn().setBody(createByteResponse(readAll((InputStream) msg)));
}
}
private static void checkEventsResendingDisabled(Exchange exchange) {
Object eventType = exchange.getIn().getHeader(WebsocketConstants.EVENT_TYPE);
if (eventType instanceof Integer) {
if (eventType.equals(WebsocketConstants.ONOPEN_EVENT_TYPE)
|| eventType.equals(WebsocketConstants.ONCLOSE_EVENT_TYPE)
|| eventType.equals(WebsocketConstants.ONERROR_EVENT_TYPE)) {
exchange.getIn().setBody("Error. This place should never be reached.");
}
}
}
private static byte[] createByteResponse(byte[] req) {
byte[] resp = new byte[req.length + RESPONSE_GREETING_BYTES.length];
System.arraycopy(RESPONSE_GREETING_BYTES, 0, resp, 0, RESPONSE_GREETING_BYTES.length);
System.arraycopy(req, 0, resp, RESPONSE_GREETING_BYTES.length, req.length);
return resp;
}
private static String readAll(Reader reader) {
StringBuffer strbuf = new StringBuffer();
try {
char[] buf = new char[4024];
int n;
while ((n = reader.read(buf, 0, buf.length)) > 0) {
strbuf.append(buf, 0, n);
}
} catch (IOException e) {
// ignore
} finally {
try {
reader.close();
} catch (IOException e) {
// ignore
}
}
return strbuf.toString();
}
private static byte[] readAll(InputStream is) {
ByteArrayOutputStream bytebuf = new ByteArrayOutputStream();
try {
byte[] buf = new byte[4024];
int n;
while ((n = is.read(buf, 0, buf.length)) > 0) {
bytebuf.write(buf, 0, n);
}
} catch (IOException e) {
// ignore
} finally {
try {
is.close();
} catch (IOException e) {
// ignore
}
}
return bytebuf.toByteArray();
}
// END SNIPPET: payload
}
| |
/*******************************************************************************
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*******************************************************************************/
package org.apache.wink.server.internal.registry;
import java.io.IOException;
import java.lang.annotation.Annotation;
import java.lang.reflect.Member;
import java.lang.reflect.Type;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Comparator;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.ws.rs.WebApplicationException;
import javax.ws.rs.core.Cookie;
import javax.ws.rs.core.HttpHeaders;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.MultivaluedMap;
import javax.ws.rs.core.PathSegment;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.UriInfo;
import javax.ws.rs.ext.MessageBodyReader;
import javax.ws.rs.ext.Providers;
import org.apache.wink.common.RuntimeContext;
import org.apache.wink.common.internal.PathSegmentImpl;
import org.apache.wink.common.internal.i18n.Messages;
import org.apache.wink.common.internal.registry.BoundInjectable;
import org.apache.wink.common.internal.registry.ContextAccessor;
import org.apache.wink.common.internal.registry.Injectable;
import org.apache.wink.common.internal.registry.InjectableFactory;
import org.apache.wink.common.internal.registry.ValueConvertor.ConversionException;
import org.apache.wink.common.internal.runtime.RuntimeContextTLS;
import org.apache.wink.common.internal.uri.UriEncoder;
import org.apache.wink.common.internal.utils.MediaTypeUtils;
import org.apache.wink.common.internal.utils.StringUtils;
import org.apache.wink.common.utils.ProviderUtils;
import org.apache.wink.common.utils.ProviderUtils.PROVIDER_EXCEPTION_ORIGINATOR;
import org.apache.wink.server.internal.handlers.SearchResult;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class ServerInjectableFactory extends InjectableFactory {
private final static Logger logger = LoggerFactory.getLogger(ServerInjectableFactory.class);
@Override
public Injectable createContextParam(Class<?> classType, Annotation[] annotations, Member member) {
return new ServerContextParam(classType, annotations, member);
}
@Override
public Injectable createCookieParam(String value,
Class<?> classType,
Type genericType,
Annotation[] annotations,
Member member) {
return new CookieParamBinding(value, classType, genericType, annotations, member);
}
@Override
public Injectable createEntityParam(Class<?> classType,
Type genericType,
Annotation[] annotations,
Member member) {
return new EntityParam(classType, genericType, annotations, member);
}
@Override
public Injectable createFormParam(String value,
Class<?> classType,
Type genericType,
Annotation[] annotations,
Member member) {
return new FormParamBinding(value, classType, genericType, annotations, member);
}
@Override
public Injectable createHeaderParam(String value,
Class<?> classType,
Type genericType,
Annotation[] annotations,
Member member) {
return new HeaderParamBinding(value, classType, genericType, annotations, member);
}
@Override
public Injectable createMatrixParam(String value,
Class<?> classType,
Type genericType,
Annotation[] annotations,
Member member) {
return new MatrixParamBinding(value, classType, genericType, annotations, member);
}
@Override
public Injectable createPathParam(String value,
Class<?> classType,
Type genericType,
Annotation[] annotations,
Member member) {
return new PathParamBinding(value, classType, genericType, annotations, member);
}
@Override
public Injectable createQueryParam(String value,
Class<?> classType,
Type genericType,
Annotation[] annotations,
Member member) {
return new QueryParamBinding(value, classType, genericType, annotations, member);
}
/**
* Used for injecting a field or parameter of JAX-RS resource with a
* context, as defined by the JAX-RS spec. First searches for a
* ContextResolver to get the context to inject, and if none is found, then
* tries one of the built-in types of context
*/
public static class ServerContextParam extends Injectable {
private ContextAccessor contextAccessor;
public ServerContextParam(Class<?> type, Annotation[] annotations, Member member) {
super(ParamType.CONTEXT, type, type, annotations, member);
if (type != HttpServletRequest.class && type != HttpServletResponse.class) {
contextAccessor = new ContextAccessor();
} else {
// due to strict checking of HttpServletRequest and
// HttpServletResponse
// injections, a special injector must be used
contextAccessor = new ServletContextAccessor();
}
}
@Override
public Object getValue(RuntimeContext runtimeContext) {
return contextAccessor.getContext(getType(), runtimeContext);
}
}
/**
* Used for injecting a field or parameter of JAX-RS resource that has no
* annotation on it - represents the request entity.
*/
public static class EntityParam extends Injectable {
public EntityParam(Class<?> type, Type genericType, Annotation[] annotations, Member member) {
super(ParamType.ENTITY, type, genericType, annotations, member);
}
@SuppressWarnings("unchecked")
public Object getValue(RuntimeContext runtimeContext) throws IOException {
if (runtimeContext == null) {
return null;
}
Class<?> paramType = getType();
// check if there is a provider that can handle this parameter
Providers providers = runtimeContext.getProviders();
if (providers != null) {
MediaType mediaType = runtimeContext.getHttpHeaders().getMediaType();
if (mediaType == null) {
mediaType = MediaType.APPLICATION_OCTET_STREAM_TYPE;
}
MessageBodyReader mbr =
providers.getMessageBodyReader(paramType,
getGenericType(),
getAnnotations(),
mediaType);
if (mbr != null) {
Object read;
try {
read = mbr.readFrom(paramType,
getGenericType(),
getAnnotations(),
mediaType,
runtimeContext.getHttpHeaders().getRequestHeaders(),
runtimeContext.getInputStream());
} catch (RuntimeException e) {
ProviderUtils.logUserProviderException(e, mbr, PROVIDER_EXCEPTION_ORIGINATOR.readFrom, new Object[]{paramType, getGenericType(), getAnnotations(), mediaType, runtimeContext.getHttpHeaders().getRequestHeaders(),
runtimeContext.getInputStream()}, runtimeContext);
throw e;
}
return read;
}
}
throw new WebApplicationException(Response.Status.UNSUPPORTED_MEDIA_TYPE);
}
}
/**
* Used for injecting a field or parameter of JAX-RS resource with the value
* of a matrix parameter
*/
public static class MatrixParamBinding extends BoundInjectable {
public MatrixParamBinding(String variableName,
Class<?> type,
Type genericType,
Annotation[] annotations,
Member member) {
super(ParamType.MATRIX, variableName, type, genericType, annotations, member);
}
@Override
public Object getValue(RuntimeContext runtimeContext) throws IOException {
if (runtimeContext == null) {
return null;
}
List<String> allValues = new ArrayList<String>();
List<PathSegment> segments =
runtimeContext.getAttribute(SearchResult.class).getData().getMatchedURIs().get(0);
// get the matrix parameter only from the last segment
PathSegment segment = segments.get(segments.size() - 1);
MultivaluedMap<String, String> matrixParameters = segment.getMatrixParameters();
List<String> values = matrixParameters.get(getName());
if (values != null) {
allValues.addAll(values);
}
if (allValues.size() == 0 && hasDefaultValue()) {
allValues.add(getDefaultValue());
}
decodeValues(allValues);
// we found matrix parameters with the specified name
try {
return getConvertor().convert(allValues);
} catch (ConversionException e) {
throw new WebApplicationException(e.getCause(), Response.Status.NOT_FOUND);
}
}
}
/**
* Used for injecting a field or parameter of JAX-RS resource with the value
* of a query parameter
*/
public static class QueryParamBinding extends BoundInjectable {
public QueryParamBinding(String variableName,
Class<?> type,
Type genericType,
Annotation[] annotations,
Member member) {
super(ParamType.QUERY, variableName, type, genericType, annotations, member);
}
@Override
public Object getValue(RuntimeContext runtimeContext) throws IOException {
if (runtimeContext == null) {
return null;
}
UriInfo uriInfo = runtimeContext.getUriInfo();
MultivaluedMap<String, String> queryParameters = uriInfo.getQueryParameters(false);
List<String> values = queryParameters.get(getName());
if (values == null) {
values = new LinkedList<String>();
}
if (values.size() == 0 && hasDefaultValue()) {
values.add(getDefaultValue());
}
decodeValues(values);
// we found query parameter values with the specified name
try {
return getConvertor().convert(values);
} catch (ConversionException e) {
throw new WebApplicationException(e.getCause(), Response.Status.NOT_FOUND);
}
}
@Override
protected String decodeValue(String value) {
// also decodes the '+' signs into spaces
return UriEncoder.decodeQuery(value);
}
}
/**
* Used for injecting a field or parameter of JAX-RS resource with the value
* of a Form parameter
*/
public static class FormParamBinding extends BoundInjectable {
static final String FORM_PARAMATERS =
"wink.formParameters"; //$NON-NLS-1$
public final static MultivaluedMap<String, String> dummyMultivaluedMap = null;
private static Type MULTIVALUED_MAP_STRING_TYPE = null;
static {
try {
MULTIVALUED_MAP_STRING_TYPE =
FormParamBinding.class.getField("dummyMultivaluedMap").getGenericType(); //$NON-NLS-1$
} catch (SecurityException e) {
throw new WebApplicationException(e);
} catch (NoSuchFieldException e) {
throw new WebApplicationException(e);
}
}
public FormParamBinding(String variableName,
Class<?> type,
Type genericType,
Annotation[] annotations,
Member member) {
super(ParamType.FORM, variableName, type, genericType, annotations, member);
}
@SuppressWarnings("unchecked")
@Override
public Object getValue(RuntimeContext runtimeContext) throws IOException {
if (runtimeContext == null) {
return null;
}
// request must be application/x-www-form-urlencoded
MediaType mediaType = runtimeContext.getHttpHeaders().getMediaType();
if (!MediaTypeUtils.equalsIgnoreParameters(mediaType,
MediaType.APPLICATION_FORM_URLENCODED_TYPE)) {
return null;
}
// see if we already have the form parameters, which will happen if
// there
// is more than one form parameter on the method
MultivaluedMap<String, String> formParameters =
(MultivaluedMap<String, String>)runtimeContext.getAttributes().get(FORM_PARAMATERS);
if (formParameters == null) {
// read the request body as an entity parameter to get the form
// parameters
EntityParam entityParam =
new EntityParam(MultivaluedMap.class, MULTIVALUED_MAP_STRING_TYPE,
getAnnotations(), null);
formParameters =
(MultivaluedMap<String, String>)entityParam.getValue(runtimeContext);
if (formParameters.isEmpty()) {
// see E011 at
// http://jcp.org/aboutJava/communityprocess/maintenance/jsr311/311ChangeLog.html
// Perhaps the message body was already consumed by a
// servlet filter. Let's try the servlet request parameters
// instead.
Map map =
RuntimeContextTLS.getRuntimeContext()
.getAttribute(HttpServletRequest.class).getParameterMap();
// We can't easily use MultivaluedMap.putAll because we have
// a map whose values are String[]
// Let's iterate and call the appropriate MultivaluedMap.put
// method.
for (Iterator it = map.keySet().iterator(); it.hasNext();) {
String key = (String)it.next();
String[] value = (String[])map.get(key);
formParameters.put(key, Arrays.asList(value));
}
}
runtimeContext.getAttributes().put(FORM_PARAMATERS, formParameters);
}
// get the values of the parameter
List<String> values = formParameters.get(getName());
if (values == null) {
values = new LinkedList<String>();
}
// TODO: do we add also all the query parameters???
if (values.size() == 0 && hasDefaultValue()) {
values.add(getDefaultValue());
}
// decode all values
decodeValues(values);
try {
return getConvertor().convert(values);
} catch (ConversionException e) {
// See E010
// http://jcp.org/aboutJava/communityprocess/maintenance/jsr311/311ChangeLog.html:
// "400 status code should be returned if an exception is
// raised during @FormParam-annotated parameter construction"
logger.error(Messages.getMessage("conversionError", this, values), e);
throw new WebApplicationException(e.getCause(), Response.Status.BAD_REQUEST);
}
}
@Override
protected String decodeValue(String value) {
// also decodes the '+' signs into spaces
return UriEncoder.decodeQuery(value);
}
}
/**
* Used for injecting a field or parameter of JAX-RS resource with the value
* of a path template variable
*/
public static class PathParamBinding extends BoundInjectable {
public PathParamBinding(String variableName,
Class<?> type,
Type genericType,
Annotation[] annotations,
Member member) {
super(ParamType.PATH, variableName, type, genericType, annotations, member);
}
@Override
public Object getValue(RuntimeContext runtimeContext) throws IOException {
if (runtimeContext == null) {
return null;
}
MultivaluedMap<String, List<PathSegment>> pathSegmentsMap =
runtimeContext.getAttribute(SearchResult.class).getData()
.getMatchedVariablesPathSegments();
List<PathSegment> segments = null;
List<List<PathSegment>> listOfListPathSegments = pathSegmentsMap.get(getName());
if (listOfListPathSegments != null && listOfListPathSegments.size() > 0) {
segments = listOfListPathSegments.get(listOfListPathSegments.size() - 1);
}
if (segments != null && segments.size() > 0) {
// special handling for PathSegment
if (isTypeOf(PathSegment.class)) {
// return only the last segment
PathSegment segment = segments.get(segments.size() - 1);
if (!isEncoded()) {
segment = PathSegmentImpl.decode(segment);
}
return segment;
}
// special handling for collection of PathSegment
if (isTypeCollectionOf(PathSegment.class)) {
// return all segments
List<PathSegment> list = segments;
if (!isEncoded()) {
// decode all path segments
list = new ArrayList<PathSegment>(segments.size());
for (PathSegment segment : segments) {
list.add(PathSegmentImpl.decode(segment));
}
}
return asTypeCollection(list, null);
}
}
// for all other types and for cases where the default value should
// be used
UriInfo uriInfo = runtimeContext.getUriInfo();
MultivaluedMap<String, String> variables = uriInfo.getPathParameters(false);
List<String> values = variables.get(getName());
if (values == null) {
values = new LinkedList<String>();
}
// use default value
if (values.size() == 0 && hasDefaultValue()) {
String defaultValue = getDefaultValue();
// if the injected type is a PathSegment or some collection of
// PathSegment then
// split the default value
// into separate segments, otherwise, pass the default value
// as-is.
if (isTypeOf(PathSegment.class) || isTypeCollectionOf(PathSegment.class)) {
String[] segmentsArray = StringUtils.fastSplit(defaultValue, "/", true); //$NON-NLS-1$
values.addAll(Arrays.asList(segmentsArray));
} else {
values.add(defaultValue);
}
decodeValues(values);
try {
return getConvertor().convert(values);
} catch (ConversionException e) {
throw new WebApplicationException(e.getCause(), Response.Status.NOT_FOUND);
}
}
decodeValues(values);
try {
Collections.reverse(values);
return getConvertor().convert(values);
} catch (ConversionException e) {
throw new WebApplicationException(e.getCause(), Response.Status.NOT_FOUND);
}
}
}
/**
* Used for injecting a field or parameter of JAX-RS resource with the value
* of a request header
*/
public static class HeaderParamBinding extends BoundInjectable {
public HeaderParamBinding(String variableName,
Class<?> type,
Type genericType,
Annotation[] annotations,
Member member) {
super(ParamType.HEADER, variableName, type, genericType, annotations, member);
}
@Override
public Object getValue(RuntimeContext runtimeContext) throws IOException {
if (runtimeContext == null) {
return null;
}
// for all headers
HttpHeaders httpHeaders = runtimeContext.getHttpHeaders();
List<String> values = httpHeaders.getRequestHeader(getName());
if (values == null) {
values = new LinkedList<String>();
}
if (values.size() == 0 && hasDefaultValue()) {
values.add(getDefaultValue());
}
try {
return getConvertor().convert(values);
} catch (ConversionException e) {
logger.error(Messages.getMessage("conversionError", this, values), e);
throw new WebApplicationException(e.getCause(), Response.Status.BAD_REQUEST);
}
}
}
/**
* Used for injecting a field or parameter of JAX-RS resource with the value
* of a request cookie
*/
public static class CookieParamBinding extends BoundInjectable {
public CookieParamBinding(String variableName,
Class<?> type,
Type genericType,
Annotation[] annotations,
Member member) {
super(ParamType.COOKIE, variableName, type, genericType, annotations, member);
}
@Override
public Object getValue(RuntimeContext runtimeContext) throws IOException {
if (runtimeContext == null) {
return null;
}
String value = null;
HttpHeaders httpHeaders = runtimeContext.getHttpHeaders();
Map<String, Cookie> values = httpHeaders.getCookies();
Cookie cookie = null;
if (values.size() > 0) {
cookie = values.get(getName());
}
if (cookie == null && hasDefaultValue()) {
cookie = new Cookie(getName(), getDefaultValue());
}
if (cookie != null) {
// special handling for List<Cookie>
if (isTypeCollectionOf(Cookie.class)) {
return elementAsTypeCollection(cookie, new CookieComparator());
}
// special handling for Cookie
if (isTypeOf(Cookie.class)) {
return cookie;
}
// for all other types
value = cookie.getValue();
}
try {
return getConvertor().convert(value);
} catch (ConversionException e) {
logger.error(Messages.getMessage("conversionError", this, value), e);
throw new WebApplicationException(e.getCause(), Response.Status.BAD_REQUEST);
}
}
public static class CookieComparator implements Comparator<Cookie> {
public int compare(Cookie o1, Cookie o2) {
int val = 0;
if (o1.getName() != null) {
val = o1.getName().compareTo(o2.getName());
if (val != 0) {
return val;
}
}
if (o1.getValue() != null) {
val = o1.getValue().compareTo(o2.getValue());
if (val != 0) {
return val;
}
}
if (o1.getPath() != null) {
val = o1.getPath().compareTo(o2.getPath());
if (val != 0) {
return val;
}
}
if (o1.getDomain() != null) {
val = o1.getDomain().compareTo(o2.getDomain());
if (val != 0) {
return val;
}
}
return o1.getVersion() - o2.getVersion();
}
}
}
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.testing;
import com.google.common.base.Preconditions;
import java.math.BigDecimal;
import java.math.MathContext;
import java.nio.ByteBuffer;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Objects;
import static com.google.common.base.Preconditions.checkArgument;
import static java.util.Objects.requireNonNull;
import static java.util.stream.Collectors.toList;
/**
* Materialize all values in a row
* Special handling is added for Double types for approximate comparisons
*/
public class MaterializedRow
{
private final int precision;
private final List<Object> values;
public MaterializedRow(int precision, Object... values)
{
this(precision, Arrays.asList(requireNonNull(values, "values is null")));
}
public MaterializedRow(int precision, List<Object> values)
{
checkArgument(precision > 0, "Need at least one digit of precision");
this.precision = precision;
this.values = (List<Object>) processValue(precision, values);
}
private static Object processValue(int precision, Object value)
{
if (value instanceof Double) {
return new ApproximateDouble(((Double) value), precision);
}
if (value instanceof Float) {
return new ApproximateFloat(((Float) value), precision);
}
if (value instanceof List) {
return ((List<?>) value).stream()
.map(element -> processValue(precision, element))
.collect(toList());
}
if (value instanceof Map) {
Map<Object, Object> map = new HashMap<>();
for (Entry<Object, Object> entry : ((Map<Object, Object>) value).entrySet()) {
map.put(processValue(precision, entry.getKey()), processValue(precision, entry.getValue()));
}
return map;
}
if (value instanceof byte[]) {
return ByteBuffer.wrap((byte[]) value);
}
return value;
}
public int getPrecision()
{
return precision;
}
public int getFieldCount()
{
return values.size();
}
public List<Object> getFields()
{
return values.stream()
.map(MaterializedRow::processField)
.collect(toList());
}
public Object getField(int field)
{
Preconditions.checkElementIndex(field, values.size());
return processField(values.get(field));
}
private static Object processField(Object value)
{
if (value instanceof ApproximateNumeric) {
return ((ApproximateNumeric) value).getValue();
}
if (value instanceof List) {
return ((List<?>) value).stream()
.map(MaterializedRow::processField)
.collect(toList());
}
if (value instanceof Map) {
Map<Object, Object> map = new HashMap<>();
for (Entry<Object, Object> entry : ((Map<Object, Object>) value).entrySet()) {
map.put(processField(entry.getKey()), processField(entry.getValue()));
}
return map;
}
if (value instanceof ByteBuffer) {
return ((ByteBuffer) value).array();
}
return value;
}
@Override
public String toString()
{
return values.toString();
}
@Override
public boolean equals(Object obj)
{
if (obj == this) {
return true;
}
if ((obj == null) || (getClass() != obj.getClass())) {
return false;
}
MaterializedRow o = (MaterializedRow) obj;
return Objects.equals(values, o.values);
}
@Override
public int hashCode()
{
return Objects.hash(values);
}
private abstract static class ApproximateNumeric
{
public abstract Number getValue();
protected abstract Number getNormalizedValue();
@Override
public String toString()
{
return getValue().toString();
}
@Override
public boolean equals(Object obj)
{
if (obj == this) {
return true;
}
if ((obj == null) || (getClass() != obj.getClass())) {
return false;
}
ApproximateNumeric o = (ApproximateNumeric) obj;
return Objects.equals(getNormalizedValue(), o.getNormalizedValue());
}
@Override
public int hashCode()
{
return Objects.hash(getNormalizedValue());
}
}
private static class ApproximateDouble
extends ApproximateNumeric
{
private final Double value;
private final int precision;
private ApproximateDouble(Double value, int precision)
{
this.value = requireNonNull(value, "value is null");
this.precision = precision;
}
@Override
public Number getValue()
{
return value;
}
@Override
protected Number getNormalizedValue()
{
if (value.isNaN() || value.isInfinite()) {
return value;
}
return new BigDecimal(getValue().doubleValue()).round(new MathContext(precision)).doubleValue();
}
}
private static class ApproximateFloat
extends ApproximateNumeric
{
private final Float value;
private final int precision;
private ApproximateFloat(Float value, int precision)
{
this.value = requireNonNull(value, "value is null");
this.precision = precision;
}
@Override
public Number getValue()
{
return value;
}
@Override
protected Number getNormalizedValue()
{
if (value.isNaN() || value.isInfinite()) {
return value;
}
return new BigDecimal(getValue().floatValue()).round(new MathContext(precision)).floatValue();
}
}
}
| |
// Copyright (c) 2008 The Board of Trustees of The Leland Stanford Junior University
// Copyright (c) 2011, 2012 Open Networking Foundation
// Copyright (c) 2012, 2013 Big Switch Networks, Inc.
// This library was generated by the LoxiGen Compiler.
// See the file LICENSE.txt which should have been included in the source distribution
// Automatically generated by LOXI from template of_class.java
// Do not modify
package org.projectfloodlight.openflow.protocol.ver11;
import org.projectfloodlight.openflow.protocol.*;
import org.projectfloodlight.openflow.protocol.action.*;
import org.projectfloodlight.openflow.protocol.actionid.*;
import org.projectfloodlight.openflow.protocol.bsntlv.*;
import org.projectfloodlight.openflow.protocol.errormsg.*;
import org.projectfloodlight.openflow.protocol.meterband.*;
import org.projectfloodlight.openflow.protocol.instruction.*;
import org.projectfloodlight.openflow.protocol.instructionid.*;
import org.projectfloodlight.openflow.protocol.match.*;
import org.projectfloodlight.openflow.protocol.stat.*;
import org.projectfloodlight.openflow.protocol.oxm.*;
import org.projectfloodlight.openflow.protocol.oxs.*;
import org.projectfloodlight.openflow.protocol.queueprop.*;
import org.projectfloodlight.openflow.types.*;
import org.projectfloodlight.openflow.util.*;
import org.projectfloodlight.openflow.exceptions.*;
import java.util.Set;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import io.netty.buffer.ByteBuf;
import com.google.common.hash.PrimitiveSink;
import com.google.common.hash.Funnel;
class OFActionBsnSetTunnelDstVer11 implements OFActionBsnSetTunnelDst {
private static final Logger logger = LoggerFactory.getLogger(OFActionBsnSetTunnelDstVer11.class);
// version: 1.1
final static byte WIRE_VERSION = 2;
final static int LENGTH = 16;
private final static long DEFAULT_DST = 0x0L;
// OF message fields
private final long dst;
//
// Immutable default instance
final static OFActionBsnSetTunnelDstVer11 DEFAULT = new OFActionBsnSetTunnelDstVer11(
DEFAULT_DST
);
// package private constructor - used by readers, builders, and factory
OFActionBsnSetTunnelDstVer11(long dst) {
this.dst = dst;
}
// Accessors for OF message fields
@Override
public OFActionType getType() {
return OFActionType.EXPERIMENTER;
}
@Override
public long getExperimenter() {
return 0x5c16c7L;
}
@Override
public long getSubtype() {
return 0x2L;
}
@Override
public long getDst() {
return dst;
}
@Override
public OFVersion getVersion() {
return OFVersion.OF_11;
}
public OFActionBsnSetTunnelDst.Builder createBuilder() {
return new BuilderWithParent(this);
}
static class BuilderWithParent implements OFActionBsnSetTunnelDst.Builder {
final OFActionBsnSetTunnelDstVer11 parentMessage;
// OF message fields
private boolean dstSet;
private long dst;
BuilderWithParent(OFActionBsnSetTunnelDstVer11 parentMessage) {
this.parentMessage = parentMessage;
}
@Override
public OFActionType getType() {
return OFActionType.EXPERIMENTER;
}
@Override
public long getExperimenter() {
return 0x5c16c7L;
}
@Override
public long getSubtype() {
return 0x2L;
}
@Override
public long getDst() {
return dst;
}
@Override
public OFActionBsnSetTunnelDst.Builder setDst(long dst) {
this.dst = dst;
this.dstSet = true;
return this;
}
@Override
public OFVersion getVersion() {
return OFVersion.OF_11;
}
@Override
public OFActionBsnSetTunnelDst build() {
long dst = this.dstSet ? this.dst : parentMessage.dst;
//
return new OFActionBsnSetTunnelDstVer11(
dst
);
}
}
static class Builder implements OFActionBsnSetTunnelDst.Builder {
// OF message fields
private boolean dstSet;
private long dst;
@Override
public OFActionType getType() {
return OFActionType.EXPERIMENTER;
}
@Override
public long getExperimenter() {
return 0x5c16c7L;
}
@Override
public long getSubtype() {
return 0x2L;
}
@Override
public long getDst() {
return dst;
}
@Override
public OFActionBsnSetTunnelDst.Builder setDst(long dst) {
this.dst = dst;
this.dstSet = true;
return this;
}
@Override
public OFVersion getVersion() {
return OFVersion.OF_11;
}
//
@Override
public OFActionBsnSetTunnelDst build() {
long dst = this.dstSet ? this.dst : DEFAULT_DST;
return new OFActionBsnSetTunnelDstVer11(
dst
);
}
}
final static Reader READER = new Reader();
static class Reader implements OFMessageReader<OFActionBsnSetTunnelDst> {
@Override
public OFActionBsnSetTunnelDst readFrom(ByteBuf bb) throws OFParseError {
int start = bb.readerIndex();
// fixed value property type == 65535
short type = bb.readShort();
if(type != (short) 0xffff)
throw new OFParseError("Wrong type: Expected=OFActionType.EXPERIMENTER(65535), got="+type);
int length = U16.f(bb.readShort());
if(length != 16)
throw new OFParseError("Wrong length: Expected=16(16), got="+length);
if(bb.readableBytes() + (bb.readerIndex() - start) < length) {
// Buffer does not have all data yet
bb.readerIndex(start);
return null;
}
if(logger.isTraceEnabled())
logger.trace("readFrom - length={}", length);
// fixed value property experimenter == 0x5c16c7L
int experimenter = bb.readInt();
if(experimenter != 0x5c16c7)
throw new OFParseError("Wrong experimenter: Expected=0x5c16c7L(0x5c16c7L), got="+experimenter);
// fixed value property subtype == 0x2L
int subtype = bb.readInt();
if(subtype != 0x2)
throw new OFParseError("Wrong subtype: Expected=0x2L(0x2L), got="+subtype);
long dst = U32.f(bb.readInt());
OFActionBsnSetTunnelDstVer11 actionBsnSetTunnelDstVer11 = new OFActionBsnSetTunnelDstVer11(
dst
);
if(logger.isTraceEnabled())
logger.trace("readFrom - read={}", actionBsnSetTunnelDstVer11);
return actionBsnSetTunnelDstVer11;
}
}
public void putTo(PrimitiveSink sink) {
FUNNEL.funnel(this, sink);
}
final static OFActionBsnSetTunnelDstVer11Funnel FUNNEL = new OFActionBsnSetTunnelDstVer11Funnel();
static class OFActionBsnSetTunnelDstVer11Funnel implements Funnel<OFActionBsnSetTunnelDstVer11> {
private static final long serialVersionUID = 1L;
@Override
public void funnel(OFActionBsnSetTunnelDstVer11 message, PrimitiveSink sink) {
// fixed value property type = 65535
sink.putShort((short) 0xffff);
// fixed value property length = 16
sink.putShort((short) 0x10);
// fixed value property experimenter = 0x5c16c7L
sink.putInt(0x5c16c7);
// fixed value property subtype = 0x2L
sink.putInt(0x2);
sink.putLong(message.dst);
}
}
public void writeTo(ByteBuf bb) {
WRITER.write(bb, this);
}
final static Writer WRITER = new Writer();
static class Writer implements OFMessageWriter<OFActionBsnSetTunnelDstVer11> {
@Override
public void write(ByteBuf bb, OFActionBsnSetTunnelDstVer11 message) {
// fixed value property type = 65535
bb.writeShort((short) 0xffff);
// fixed value property length = 16
bb.writeShort((short) 0x10);
// fixed value property experimenter = 0x5c16c7L
bb.writeInt(0x5c16c7);
// fixed value property subtype = 0x2L
bb.writeInt(0x2);
bb.writeInt(U32.t(message.dst));
}
}
@Override
public String toString() {
StringBuilder b = new StringBuilder("OFActionBsnSetTunnelDstVer11(");
b.append("dst=").append(dst);
b.append(")");
return b.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
OFActionBsnSetTunnelDstVer11 other = (OFActionBsnSetTunnelDstVer11) obj;
if( dst != other.dst)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * (int) (dst ^ (dst >>> 32));
return result;
}
}
| |
/*
* Copyright 2015 JP Ventura
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.jpventura.xyzreader.ui;
import android.app.Fragment;
import android.app.LoaderManager;
import android.content.Intent;
import android.content.Loader;
import android.content.res.ColorStateList;
import android.database.Cursor;
import android.graphics.Color;
import android.graphics.Rect;
import android.graphics.drawable.BitmapDrawable;
import android.graphics.drawable.ColorDrawable;
import android.os.Bundle;
import android.support.design.widget.FloatingActionButton;
import android.support.v4.app.ShareCompat;
import android.support.v7.graphics.Palette;
import android.support.v7.graphics.Palette.PaletteAsyncListener;
import android.text.Html;
import android.text.format.DateUtils;
import android.text.method.LinkMovementMethod;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.ImageView;
import android.widget.TextView;
import com.jpventura.xyzreader.R;
import com.jpventura.xyzreader.data.ArticleLoader;
import com.squareup.picasso.Picasso;
import com.squareup.picasso.Callback;
/**
* A fragment representing a single Article detail screen. This fragment is
* either contained in a {@link ArticleListActivity} in two-pane mode (on
* tablets) or a {@link ArticleDetailActivity} on handsets.
*/
public class ArticleDetailFragment extends Fragment
implements Callback, LoaderManager.LoaderCallbacks<Cursor>, PaletteAsyncListener {
private static final String TAG = "ArticleDetailFragment";
public static final String ARG_ITEM_ID = "item_id";
private static final float PARALLAX_FACTOR = 1.25f;
private Cursor mCursor;
private long mItemId;
private View mRootView;
private int mMutedColor = 0xFF333333;
private ObservableScrollView mScrollView;
private DrawInsetsFrameLayout mDrawInsetsFrameLayout;
private ColorDrawable mStatusBarColorDrawable;
private int mTopInset;
private View mPhotoContainerView;
private ImageView mPhotoView;
private int mScrollY;
private boolean mIsCard = false;
private int mStatusBarFullOpacityBottom;
private TextView mTitleView;
private TextView mSubitleView;
private TextView mBodyView;
/**
* Mandatory empty constructor for the fragment manager to instantiate the
* fragment (e.g. upon screen orientation changes).
*/
public ArticleDetailFragment() {
}
public static ArticleDetailFragment newInstance(long itemId) {
Bundle arguments = new Bundle();
arguments.putLong(ARG_ITEM_ID, itemId);
ArticleDetailFragment fragment = new ArticleDetailFragment();
fragment.setArguments(arguments);
return fragment;
}
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
if (getArguments().containsKey(ARG_ITEM_ID)) {
mItemId = getArguments().getLong(ARG_ITEM_ID);
}
mIsCard = getResources().getBoolean(R.bool.detail_is_card);
mStatusBarFullOpacityBottom = getResources().getDimensionPixelSize(
R.dimen.detail_card_top_margin);
setHasOptionsMenu(true);
}
public ArticleDetailActivity getActivityCast() {
return (ArticleDetailActivity) getActivity();
}
@Override
public void onActivityCreated(Bundle savedInstanceState) {
super.onActivityCreated(savedInstanceState);
// In support library r8, calling initLoader for a fragment in a FragmentPagerAdapter in
// the fragment's onCreate may cause the same LoaderManager to be dealt to multiple
// fragments because their mIndex is -1 (haven't been added to the activity yet). Thus,
// we do this in onActivityCreated.
getLoaderManager().initLoader(0, null, this);
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
mRootView = inflater.inflate(R.layout.fragment_article_detail, container, false);
mDrawInsetsFrameLayout = (DrawInsetsFrameLayout)
mRootView.findViewById(R.id.draw_insets_frame_layout);
mDrawInsetsFrameLayout.setOnInsetsCallback(new DrawInsetsFrameLayout.OnInsetsCallback() {
@Override
public void onInsetsChanged(Rect insets) {
mTopInset = insets.top;
}
});
mScrollView = (ObservableScrollView) mRootView.findViewById(R.id.scrollview);
mScrollView.setCallbacks(new ObservableScrollView.Callbacks() {
@Override
public void onScrollChanged() {
mScrollY = mScrollView.getScrollY();
getActivityCast().onUpButtonFloorChanged(mItemId, ArticleDetailFragment.this);
mPhotoContainerView.setTranslationY((int) (mScrollY - mScrollY / PARALLAX_FACTOR));
updateStatusBar();
}
});
mPhotoView = (ImageView) mRootView.findViewById(R.id.photo);
mPhotoContainerView = mRootView.findViewById(R.id.photo_container);
mStatusBarColorDrawable = new ColorDrawable(0);
mRootView.findViewById(R.id.share_fab).setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
startActivity(Intent.createChooser(ShareCompat.IntentBuilder.from(getActivity())
.setType("text/plain")
.setText("Some sample text")
.getIntent(), getString(R.string.action_share)));
}
});
bindViews();
updateStatusBar();
return mRootView;
}
private void updateStatusBar() {
int color = 0;
if (mPhotoView != null && mTopInset != 0 && mScrollY > 0) {
float f = progress(mScrollY,
mStatusBarFullOpacityBottom - mTopInset * 3,
mStatusBarFullOpacityBottom - mTopInset);
color = Color.argb((int) (255 * f),
(int) (Color.red(mMutedColor) * 0.9),
(int) (Color.green(mMutedColor) * 0.9),
(int) (Color.blue(mMutedColor) * 0.9));
}
mStatusBarColorDrawable.setColor(color);
mDrawInsetsFrameLayout.setInsetBackground(mStatusBarColorDrawable);
}
static float progress(float v, float min, float max) {
return constrain((v - min) / (max - min), 0, 1);
}
static float constrain(float val, float min, float max) {
if (val < min) {
return min;
} else if (val > max) {
return max;
} else {
return val;
}
}
private void bindViews() {
if (mRootView == null) {
return;
}
mTitleView = (TextView) mRootView.findViewById(R.id.article_title);
mSubitleView = (TextView) mRootView.findViewById(R.id.article_byline);
mSubitleView.setMovementMethod(new LinkMovementMethod());
mBodyView = (TextView) mRootView.findViewById(R.id.article_body);
if (null == mCursor) {
mRootView.setVisibility(View.GONE);
mTitleView.setText("N/A");
mSubitleView.setText("N/A");
mBodyView.setText("N/A");
return;
}
mRootView.setAlpha(0);
mRootView.setVisibility(View.VISIBLE);
mRootView.animate().alpha(1);
mTitleView.setText(mCursor.getString(ArticleLoader.Query.TITLE));
mSubitleView.setText(Html.fromHtml(
DateUtils.getRelativeTimeSpanString(
mCursor.getLong(ArticleLoader.Query.PUBLISHED_DATE),
System.currentTimeMillis(), DateUtils.HOUR_IN_MILLIS,
DateUtils.FORMAT_ABBREV_ALL).toString()
+ " by <font color='#ffffff'>"
+ mCursor.getString(ArticleLoader.Query.AUTHOR)
+ "</font>"));
mBodyView.setText(Html.fromHtml(mCursor.getString(ArticleLoader.Query.BODY)));
Picasso.with(getActivity())
.load(mCursor.getString(ArticleLoader.Query.PHOTO_URL))
.into(mPhotoView, this);
}
@Override
public Loader<Cursor> onCreateLoader(int i, Bundle bundle) {
return ArticleLoader.newInstanceForItemId(getActivity(), mItemId);
}
@Override
public void onLoadFinished(Loader<Cursor> cursorLoader, Cursor cursor) {
if (!isAdded() && (null != cursor)) {
cursor.close();
return;
}
mCursor = cursor;
if (mCursor != null && !mCursor.moveToFirst()) {
Log.e(TAG, "Error reading item detail cursor");
mCursor.close();
mCursor = null;
}
bindViews();
}
@Override
public void onLoaderReset(Loader<Cursor> cursorLoader) {
mCursor = null;
bindViews();
}
@Override
public void onError() {
}
@Override
public void onGenerated(Palette palette) {
int primaryColor = mRootView.getResources().getColor(R.color.primary);
int primaryDarkColor = mRootView.getResources().getColor(R.color.primary_dark);
int darkMutedColor = palette.getDarkMutedColor(primaryDarkColor);
FloatingActionButton fab = (FloatingActionButton) mRootView.findViewById(R.id.share_fab);
int lightVibrantColor = palette.getLightVibrantColor(getResources().getColor(android.R.color.white));
int lightMutedColor = palette.getLightVibrantColor(getResources().getColor(R.color.text_primary));
int vibrantColor = palette.getVibrantColor(getResources().getColor(R.color.accent));
fab.setRippleColor(lightVibrantColor);
fab.setBackgroundTintList(ColorStateList.valueOf(vibrantColor));
mRootView.findViewById(R.id.meta_bar).setBackgroundColor(darkMutedColor);
mTitleView.setTextColor(lightMutedColor);
mBodyView.setLinkTextColor(vibrantColor);
mMutedColor = palette.getDarkMutedColor(0xff333333);
updateStatusBar();
}
@Override
public void onSuccess() {
Palette.from(((BitmapDrawable) mPhotoView.getDrawable()).getBitmap()).generate(this);
}
public int getUpButtonFloor() {
if (mPhotoContainerView == null || mPhotoView.getHeight() == 0) {
return Integer.MAX_VALUE;
}
// account for parallax
return mIsCard
? (int) mPhotoContainerView.getTranslationY() + mPhotoView.getHeight() - mScrollY
: mPhotoView.getHeight() - mScrollY;
}
}
| |
/* Name: Cut_Both_Canthi.java
* Project: Laser microdissection of dorsal closure
* Version: 2.1
* Author: Shane Hutson
* Maintained by: Albert Mao
* Date: 11/19/2004
* Description: This plugin uses the microbeam makes an incision in each canthus of the amnioserosa.
* The user specifies the position of the amnioserosa by using the straight line selection tool to draw a line from canthus to canthus, and specifies the experimental parameters in a dialog box.
* This plugin will then command the microbeam to make the appropriate incisions.
*/
import ij.*;
import ij.process.*;
import ij.gui.*;
import java.awt.*;
import java.awt.event.*;
import ij.plugin.filter.*;
import java.io.*;
import java.util.*;
import javax.comm.*;
import plugins.LaserMicrosurgery.laserj.*;
public class Cut_Both_Canthi implements PlugInFilter {
String ls;
ImagePlus imp;
ImageWindow win;
ImageCanvas canvas;
double[] xpath;
double[] ypath;
int zoom =1;
double ROBJ;
public int setup(String arg, ImagePlus imp) {
this.imp = imp;
if (imp!=null) {
win = imp.getWindow();
win.running = true;
}
ls = System.getProperty("line.separator");
return DOES_ALL+ROI_REQUIRED+NO_CHANGES;
}
public void run(ImageProcessor ip) {
double maxVelocity = 0.2;
double minVelocity = 0.01;
IJ.setColumnHeadings("");
ip.setColor(Color.white);
ip.setLineWidth(3);
// Initializes xpath and ypath with the four endpoints of the two line cuts
try {
getLineCoordinates(imp);
} catch (IllegalArgumentException e) {
IJ.showMessage("Invalid selection", e.getMessage());
return;
}
// Displays a dialog for the user to configure the experimental parameters
double sep = 40;
int ncuts = 1;
double period = 30;
boolean resetPosition = true;
double lcuts = 8;
double velocity = 0.1;
int mag = 40;
GenericDialog gd = new GenericDialog("Experimental Parameters");
gd.addCheckbox("Cut Parallel to Line (unchecked is perpendicular)?",true);
gd.addNumericField("Number of cuts: ", ncuts, 0);
gd.addNumericField("Time Between Cuts (s): ", period, 1);
gd.addCheckbox("Reset position between repeated incisions", resetPosition);
gd.addNumericField("Length of Cuts (microns): ", lcuts, 1);
gd.addMessage("");
gd.addNumericField("Vectorial Velocity of Mirror Drive during Cuts: ",velocity, 3);
gd.addMessage("");
gd.addNumericField("Magnification Factor of Objective: ",mag, 0);
gd.addNumericField("Zoom Factor of Image: ",zoom, 0);
gd.addMessage("");
gd.addMessage("After pressing OK,\n shutter will open 0.5 s after audible warning.");
gd.showDialog();
if (gd.wasCanceled()) {
IJ.error("No incision made. PlugIn canceled!");
return;
}
boolean parallel = gd.getNextBoolean();
ncuts = (int) gd.getNextNumber();
period = gd.getNextNumber();
resetPosition = gd.getNextBoolean();
lcuts = gd.getNextNumber();
velocity = gd.getNextNumber();
if (velocity > maxVelocity) {
velocity = maxVelocity;
IJ.write("Mirror Vectorial Velocity too High. Setting to Max Velocity = "+IJ.d2s(maxVelocity));
}
if (velocity < minVelocity) {
velocity = minVelocity;
IJ.write("Mirror Vectorial Velocity too Low. Setting to Min Velocity = "+IJ.d2s(minVelocity));
}
mag = (int) gd.getNextNumber();
zoom = (int) gd.getNextNumber();
// Initializes the microbeam
Microbeam microbeam = new Microbeam(Microbeam.CONFIG_FILENAME);
if(!microbeam.isSetupOK()) return;
ROBJ = microbeam.get_microns_per_pixel();
// Checks if the two incisions would overlap
if(pathLength()<2*lcuts) {
IJ.error("Defined Path ("+IJ.d2s(pathLength(),1) +" microns) must be >"+IJ.d2s(2*lcuts,1)+" microns. No incision made. PlugIn canceled!");
return;
}
cutBothEndsPath(lcuts, parallel);
/* This loop performs the repeated incisions.
* The implementation of the body of the loop could be made more elegant by using a for loop instead of repeating a lot of code.
* I have left it in its original form to minimize the change from Shane's code.
*/
boolean cut_again = true;
int counter = 1;
while(cut_again) {
long starttime = System.currentTimeMillis();
ip.snapshot();
microbeam.setMirrorVelocity(maxVelocity);
IJ.wait(500);
if (!win.running) break;
// Start at point 0
int j = 0;
if (!resetPosition && counter % 2 == 0) j = 3-j;
microbeam.moveToPIXELS(xpath[j], ypath[j], ip, zoom);
ip.moveTo((int)xpath[j], (int)ypath[j]);
microbeam.setMirrorVelocity(velocity);
IJ.wait(500);
for(int i=0; i<3; i++) {
IJ.beep();
IJ.wait(500);
}
long t1 = System.currentTimeMillis();
microbeam.openShutter();
IJ.wait(100);
// Cut to point 1
j = 1;
if (!resetPosition && counter % 2 == 0) j = 3-j;
microbeam.moveToPIXELS(xpath[j], ypath[j], ip, zoom);
if (!win.running) break;
IJ.wait(300);
ip.lineTo((int)xpath[j], (int)ypath[j]);
imp.updateAndDraw();
ip.moveTo((int)xpath[j], (int)ypath[j]);
microbeam.closeShutter();
t1 = System.currentTimeMillis()- t1;
long t2 = System.currentTimeMillis();
IJ.wait(500);
microbeam.setMirrorVelocity(maxVelocity);
IJ.wait(500);
// Move to point 2
j = 2;
if (!resetPosition && counter % 2 == 0) j = 3-j;
microbeam.moveToPIXELS(xpath[j], ypath[j], ip, zoom);
if (!win.running) break;
ip.moveTo((int)xpath[j], (int)ypath[j]);
IJ.wait(500);
microbeam.setMirrorVelocity(velocity);
IJ.wait(500);
t2 = System.currentTimeMillis()- t2;
long t3 = System.currentTimeMillis();
microbeam.openShutter();
IJ.wait(100);
// Cut to point 3
j = 3;
if (!resetPosition && counter % 2 == 0) j = 3-j;
microbeam.moveToPIXELS(xpath[j], ypath[j], ip, zoom);
if (!win.running) break;
IJ.wait(100);
ip.lineTo((int)xpath[j], (int)ypath[j]);
imp.updateAndDraw();
ip.moveTo((int)xpath[j], (int)ypath[j]);
microbeam.closeShutter();
t3 = System.currentTimeMillis()- t3;
// Display timing information
double exposure1 = ((double)(t1))/1000.0;
double exposure2 = ((double)(t3))/1000.0;
double traveltime = ((double)(t2))/1000.0;
IJ.write("\n\nIncision Pair #"+counter);
IJ.write("Exposure Time (Cut #1) = "+IJ.d2s(exposure1,2)+" s");
IJ.write("Exposure Time (Cut #2) = "+IJ.d2s(exposure2,2)+" s");
IJ.write("Travel Time Between Cuts = "+IJ.d2s(traveltime,2)+" s\n\n");
if (!win.running) break;
while((System.currentTimeMillis()- starttime)<(period*1000)) {
IJ.showStatus("PAUSE BETWEEN CUTS - WAITING");
IJ.wait(800);
IJ.showStatus("");
IJ.wait(200);
}
if (counter >= ncuts) {
IJ.beep();
GenericDialog cut = new GenericDialog("CONTINUE?");
cut.addMessage("OK will cut the sample one more time with the same trajectory.");
cut.addMessage("CANCEL to end cutting and return mirrors to 0, 0.");
cut.showDialog();
cut_again = !cut.wasCanceled();
}
counter++;
if (cut_again) {
ip.reset();
imp.updateAndDraw();
}
}
// Resets and releases the microbeam and restores ImageJ's line width setting
microbeam.setMirrorVelocity(maxVelocity);
IJ.wait(500);
microbeam.moveToMM(0, 0);
IJ.wait(500);
microbeam.off();
ip.setLineWidth(1);
IJ.write("\nDONE\n");
}
public void getLineCoordinates(ImagePlus imp) {
Roi roi = imp.getRoi();
if (roi == null)
throw new IllegalArgumentException("ROI required");
if (!(roi instanceof Line))
throw new IllegalArgumentException("Straight line selection required.");
Line lroi = (Line)roi;
xpath = new double[2];
ypath = new double[2];
xpath[0] = 1.0*lroi.x1; ypath[0] = 1.0*lroi.y1;
xpath[1] = 1.0*lroi.x2; ypath[1] = 1.0*lroi.y2;
// Sorts the endpoints from left to right
if (xpath[1] < xpath[0]) {
double temp = xpath[1];
xpath[1] = xpath[0];
xpath[0] = temp;
temp = ypath[1];
ypath[1] = ypath[0];
ypath[0] = temp;
}
}
public double pathLength() {
double L = 0.0;
for (int i=1; i<xpath.length; i++) {
L += Math.sqrt((xpath[i]-xpath[i-1])*(xpath[i]-xpath[i-1])+(ypath[i]-ypath[i-1])*(ypath[i]-ypath[i-1]));
}
//IJ.write(IJ.d2s(L,3));
return L*ROBJ/zoom;
}
public void cutBothEndsPath(double dL, boolean parallel) {
dL = dL*zoom/ROBJ; //convert to pixels
double dx;
double dy;
if (xpath[0] != xpath[1]) {
double m = (ypath[1]-ypath[0])/(xpath[1]-xpath[0]);
dx = dL/(Math.sqrt(1 + m*m));
dy = dx*m;
} else {
dx = 0;
dy = dL;
}
double[] xtemp = new double[4];
double[] ytemp = new double[4];
if(parallel) {
xtemp[0] = xpath[0] ; ytemp[0] = ypath[0];
xtemp[1] = xpath[0] + dx ; ytemp[1] = ypath[0]+ dy;
xtemp[2] = xpath[1] - dx ; ytemp[2] = ypath[1] - dy;
xtemp[3] = xpath[1] ; ytemp[3] = ypath[1];
} else {
double dxtemp = dx/2; double dytemp=dy/2;
dx = -dytemp;
dy = dxtemp;
xtemp[0] = xpath[0] - dx; ytemp[0] = ypath[0] - dy;
xtemp[1] = xpath[0] + dx ; ytemp[1] = ypath[0]+ dy;
xtemp[2] = xpath[1] - dx ; ytemp[2] = ypath[1] - dy;
xtemp[3] = xpath[1] + dx; ytemp[3] = ypath[1]+ dy;
}
IJ.write("ENDPOINTS OF TWO LINES TO BE CUT\n");
xpath = new double[4];
ypath = new double[4];
for (int i=0; i<xpath.length; i++) {
xpath[i]=xtemp[i]; ypath[i]=ytemp[i];
IJ.write(IJ.d2s(xpath[i],2)+" "+IJ.d2s(ypath[i],2));
}
IJ.write("");
}
}
| |
package com.game.server;
import java.io.File;
import java.io.IOException;
import java.net.InetSocketAddress;
import java.net.URL;
import java.security.PrivateKey;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.Map;
import java.util.Queue;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser;
import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
import org.apache.commons.cli.PosixParser;
import org.apache.mina.core.service.IoHandler;
import org.apache.mina.core.session.IdleStatus;
import org.apache.mina.core.session.IoSession;
import org.apache.mina.filter.codec.ProtocolCodecFilter;
import org.apache.mina.transport.socket.nio.NioSocketAcceptor;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.game.common.codec.Packet;
import com.game.common.codec.PacketCodecFactory;
import com.game.common.model.Item;
import com.game.common.util.PersistenceManager;
import com.game.server.db.Database;
import com.game.server.handlers.PacketHandler;
import com.game.server.model.Player;
public class Server implements IoHandler, Runnable {
private static final Logger log = LoggerFactory.getLogger(Server.class);
public static final int DEFAULT_PORT = 36954;
public static final int LOOP_DELAY = 100;
protected static final Options options;
static {
options = new Options();
options.addOption("h", "help", false, "print this help.");
options.addOption("p", "port", true, "port number to listen on, default " + DEFAULT_PORT + ".");
options.addOption("b", "bind", true, "IP address to bind to, default all.");
}
public static void main(String[] args) {
try {
CommandLineParser parser = new PosixParser();
CommandLine config = parser.parse(options, args);
if (config.hasOption("h")) {
HelpFormatter help = new HelpFormatter();
help.printHelp("java " + Server.class.getSimpleName(), options);
return;
}
Server server = new Server(config);
server.start();
}
catch (ParseException e) {
log.error("Error parsing command line options: " + e);
}
catch (RuntimeException e) {
log.error(e.getMessage());
}
}
protected final CommandLine config;
protected final NioSocketAcceptor acceptor;
protected final Map<Packet.Type, PacketHandler> packetHandlers;
protected final Database db;
protected final WorldManager world;
protected final PrivateKey privateKey;
protected final Queue<Packet> packets;
protected boolean running;
protected long lastPacketUpdate;
private Server(CommandLine config) {
this.config = config;
packetHandlers = this.loadPacketHandlers();
// Connection to the MySQL database
File dbConfig = new File("database.conf.xml");
if (!dbConfig.exists()) {
// fatal error
throw new RuntimeException("Unable to load database config file: " + dbConfig.getAbsolutePath());
}
privateKey = (PrivateKey) PersistenceManager.load(Server.class.getResource("privatekey.xml"));
// Pre-load the item definitions
Item.load();
db = new Database(dbConfig, this);
acceptor = new NioSocketAcceptor();
acceptor.setReuseAddress(true);
acceptor.getFilterChain().addLast("codec", new ProtocolCodecFilter(new PacketCodecFactory()));
// Set the idle timeout to 5 seconds - once a client has logged in this gets increased
acceptor.getSessionConfig().setIdleTime(IdleStatus.READER_IDLE, 5);
acceptor.setHandler(this);
world = new WorldManager(this);
packets = new LinkedList<Packet>();
running = false;
lastPacketUpdate = 0;
}
private Map<Packet.Type, PacketHandler> loadPacketHandlers() {
Map<Packet.Type, PacketHandler> handlers = new HashMap<Packet.Type, PacketHandler>();
URL path = PacketHandler.class.getResource("packethandlers.xml");
if (path == null) {
// fatal error
throw new RuntimeException("Unable to find packethandlers.xml resource");
}
PersistenceManager.PacketHandler[] definitions = (PersistenceManager.PacketHandler[]) PersistenceManager.load(path);
for (PersistenceManager.PacketHandler definition : definitions) {
try {
PacketHandler handler = (PacketHandler) definition.handler.newInstance();
for (Packet.Type type : definition.types)
handlers.put(type, handler);
}
catch (Exception e) {
// fatal error
throw new RuntimeException("Error loading packet handlers: " + e.getMessage());
}
}
if (log.isDebugEnabled())
log.debug("Loaded " + handlers.size() + " packet handlers");
return handlers;
}
public Database getDatabase() {
return db;
}
public WorldManager getWorldManager() {
return world;
}
@Override
public void run() {
while (running) {
long start = System.currentTimeMillis();
this.update(start);
int duration = (int) (System.currentTimeMillis() - start);
if (duration < LOOP_DELAY) {
try { Thread.sleep(LOOP_DELAY - duration); } catch (InterruptedException e) { }
}
}
// TODO: Shut down the server
}
private void update(long now) {
// Process the queued packets
synchronized (packets) {
for (Packet message : packets)
this.processPacket(message);
packets.clear();
}
// Update the world
world.update(now);
}
private boolean processPacket(Packet message) {
IoSession session = message.getSession();
// Confirm the client is still connected and valid
if (!session.isConnected() || (!session.containsAttribute("client") && !session.containsAttribute("pending")))
return false;
Player client = (Player) session.getAttribute("client");
PacketHandler handler = packetHandlers.get(message.getType());
// If there's no handler then close the session (forcefully)
if (handler == null) {
log.warn("Unhandled packet from: " + client);
session.close(true);
return false;
}
try {
handler.handlePacket(this, world, client, message);
return true;
}
// Something went wrong (malformed packet?), close the session (forcefully)
catch (Exception e) {
log.warn("Error decoding packet from: " + client);
e.printStackTrace();
session.close(true);
return false;
}
}
private void start() {
if (running)
return;
int port = DEFAULT_PORT;
if (config.hasOption("p")) {
try {
port = Integer.parseInt(config.getOptionValue("p"));
}
catch (NumberFormatException e) {
// fatal error
throw new RuntimeException("Invalid port number: " + config.getOptionValue("p"));
}
}
InetSocketAddress listen = null;
if (config.hasOption("b"))
listen = new InetSocketAddress(config.getOptionValue("b"), port);
else
listen = new InetSocketAddress(port);
running = true;
new Thread(this).start();
try {
acceptor.bind(listen);
System.out.println("Server listening on: " + listen.getHostName() + ":" + listen.getPort());
}
catch (IOException e) {
// fatal error
throw new RuntimeException("Unable to bind to: " + listen.getHostName() + ":" + listen.getPort());
}
}
public void stop() {
running = false;
}
@Override
public void exceptionCaught(IoSession session, Throwable cause) throws Exception {
log.warn("Error from " + (session.containsAttribute("client") ? session.getAttribute("client") : "new") + " connection: " + cause.getMessage());
// Close the session (forcefully)
session.close(true);
}
@Override
public void messageReceived(IoSession session, Object o) throws Exception {
Packet message = (Packet) o;
// We should only process 1 packet from a session at a time
synchronized (session) {
// If there is a client, queue the packet for processing
if (session.containsAttribute("client") || session.containsAttribute("pending")) {
synchronized (packets) {
packets.add(message);
}
}
// If there isn't a client attached then this must be the login request
else if (message.getType() == Packet.Type.LOGIN_SEND) {
// Decrypt the login request
message.decrypt(privateKey);
// Mark this session as pending login
session.setAttribute("pending");
// Queue the packet
synchronized (packets) {
packets.add(message);
}
}
// Otherwise this packet shouldn't be here!
else {
log.warn("Client isn't logged in, but sent a packet");
session.close(true);
}
}
}
@Override
public void messageSent(IoSession session, Object o) throws Exception { }
@Override
public void sessionClosed(IoSession session) throws Exception {
synchronized (packets) {
// If the session has a client attached, call the session closed method
if (session.containsAttribute("client")) {
Player client = (Player) session.getAttribute("client");
world.removePlayer(client);
}
// Otherwise they haven't logged in yet, so who cares just drop them
}
}
@Override
public void sessionCreated(IoSession session) throws Exception { }
@Override
public void sessionIdle(IoSession session, IdleStatus status) throws Exception {
// When a session becomes idle, close it (gracefully)
session.close(false);
}
@Override
public void sessionOpened(IoSession session) throws Exception { }
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hdfs.server.balancer;
import java.util.Collections;
import java.util.Set;
import org.apache.hadoop.classification.InterfaceAudience;
@InterfaceAudience.Private
final class BalancerParameters {
private final BalancingPolicy policy;
private final double threshold;
private final int maxIdleIteration;
private final long hotBlockTimeInterval;
/** Exclude the nodes in this set. */
private final Set<String> excludedNodes;
/** If empty, include any node; otherwise, include only these nodes. */
private final Set<String> includedNodes;
/**
* If empty, any node can be a source; otherwise, use only these nodes as
* source nodes.
*/
private final Set<String> sourceNodes;
/**
* A set of block pools to run the balancer on.
*/
private final Set<String> blockpools;
/**
* Whether to run the balancer during upgrade.
*/
private final boolean runDuringUpgrade;
private final boolean runAsService;
private final boolean sortTopNodes;
static final BalancerParameters DEFAULT = new BalancerParameters();
private BalancerParameters() {
this(new Builder());
}
private BalancerParameters(Builder builder) {
this.policy = builder.policy;
this.threshold = builder.threshold;
this.maxIdleIteration = builder.maxIdleIteration;
this.excludedNodes = builder.excludedNodes;
this.includedNodes = builder.includedNodes;
this.sourceNodes = builder.sourceNodes;
this.blockpools = builder.blockpools;
this.runDuringUpgrade = builder.runDuringUpgrade;
this.runAsService = builder.runAsService;
this.sortTopNodes = builder.sortTopNodes;
this.hotBlockTimeInterval = builder.hotBlockTimeInterval;
}
BalancingPolicy getBalancingPolicy() {
return this.policy;
}
double getThreshold() {
return this.threshold;
}
int getMaxIdleIteration() {
return this.maxIdleIteration;
}
Set<String> getExcludedNodes() {
return this.excludedNodes;
}
Set<String> getIncludedNodes() {
return this.includedNodes;
}
Set<String> getSourceNodes() {
return this.sourceNodes;
}
Set<String> getBlockPools() {
return this.blockpools;
}
boolean getRunDuringUpgrade() {
return this.runDuringUpgrade;
}
boolean getRunAsService() {
return this.runAsService;
}
boolean getSortTopNodes() {
return this.sortTopNodes;
}
@Override
public String toString() {
return String.format("%s.%s [%s," + " threshold = %s,"
+ " max idle iteration = %s," + " #excluded nodes = %s,"
+ " #included nodes = %s," + " #source nodes = %s,"
+ " #blockpools = %s," + " run during upgrade = %s,"
+ " sort top nodes = %s,"
+ " hot block time interval = %s]",
Balancer.class.getSimpleName(), getClass().getSimpleName(), policy,
threshold, maxIdleIteration, excludedNodes.size(),
includedNodes.size(), sourceNodes.size(), blockpools.size(),
runDuringUpgrade, sortTopNodes, hotBlockTimeInterval);
}
static class Builder {
// Defaults
private BalancingPolicy policy = BalancingPolicy.Node.INSTANCE;
private double threshold = 10.0;
private int maxIdleIteration =
NameNodeConnector.DEFAULT_MAX_IDLE_ITERATIONS;
private Set<String> excludedNodes = Collections.<String> emptySet();
private Set<String> includedNodes = Collections.<String> emptySet();
private Set<String> sourceNodes = Collections.<String> emptySet();
private Set<String> blockpools = Collections.<String> emptySet();
private boolean runDuringUpgrade = false;
private boolean runAsService = false;
private boolean sortTopNodes = false;
private long hotBlockTimeInterval = 0;
Builder() {
}
Builder setBalancingPolicy(BalancingPolicy p) {
this.policy = p;
return this;
}
Builder setThreshold(double t) {
this.threshold = t;
return this;
}
Builder setMaxIdleIteration(int m) {
this.maxIdleIteration = m;
return this;
}
Builder setHotBlockTimeInterval(long t) {
this.hotBlockTimeInterval = t;
return this;
}
Builder setExcludedNodes(Set<String> nodes) {
this.excludedNodes = nodes;
return this;
}
Builder setIncludedNodes(Set<String> nodes) {
this.includedNodes = nodes;
return this;
}
Builder setSourceNodes(Set<String> nodes) {
this.sourceNodes = nodes;
return this;
}
Builder setBlockpools(Set<String> pools) {
this.blockpools = pools;
return this;
}
Builder setRunDuringUpgrade(boolean run) {
this.runDuringUpgrade = run;
return this;
}
Builder setRunAsService(boolean asService) {
this.runAsService = asService;
return this;
}
Builder setSortTopNodes(boolean shouldSortTopNodes) {
this.sortTopNodes = shouldSortTopNodes;
return this;
}
BalancerParameters build() {
return new BalancerParameters(this);
}
}
}
| |
/*
* Copyright 2013 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.bitcoin.protocols.channels;
import com.google.bitcoin.core.*;
import com.google.bitcoin.utils.Threading;
import com.google.common.annotations.VisibleForTesting;
import com.google.protobuf.ByteString;
import net.jcip.annotations.GuardedBy;
import org.slf4j.LoggerFactory;
import java.math.BigInteger;
import java.util.*;
import java.util.concurrent.locks.ReentrantLock;
import static com.google.common.base.Preconditions.*;
/**
* Keeps track of a set of {@link StoredServerChannel}s and expires them 2 hours before their refund transactions
* unlock.
*/
public class StoredPaymentChannelServerStates implements WalletExtension {
private static final org.slf4j.Logger log = LoggerFactory.getLogger(StoredPaymentChannelServerStates.class);
static final String EXTENSION_ID = StoredPaymentChannelServerStates.class.getName();
@GuardedBy("lock") @VisibleForTesting final Map<Sha256Hash, StoredServerChannel> mapChannels = new HashMap<Sha256Hash, StoredServerChannel>();
private final Wallet wallet;
private final TransactionBroadcaster broadcaster;
private final Timer channelTimeoutHandler = new Timer(true);
private final ReentrantLock lock = Threading.lock("StoredPaymentChannelServerStates");
/**
* The offset between the refund transaction's lock time and the time channels will be automatically closed.
* This defines a window during which we must get the last payment transaction verified, ie it should allow time for
* network propagation and for the payment transaction to be included in a block. Note that the channel expire time
* is measured in terms of our local clock, and the refund transaction's lock time is measured in terms of Bitcoin
* block header timestamps, which are allowed to drift up to two hours in the future, as measured by relaying nodes.
*/
public static final long CHANNEL_EXPIRE_OFFSET = -2*60*60;
/**
* Creates a new PaymentChannelServerStateManager and associates it with the given {@link Wallet} and
* {@link TransactionBroadcaster} which are used to complete and announce payment transactions.
*/
public StoredPaymentChannelServerStates(Wallet wallet, TransactionBroadcaster broadcaster) {
this.wallet = checkNotNull(wallet);
this.broadcaster = checkNotNull(broadcaster);
}
/**
* <p>Closes the given channel using {@link ServerConnectionEventHandler#closeChannel()} and
* {@link PaymentChannelServerState#close()} to notify any connected client of channel closure and to complete and
* broadcast the latest payment transaction.</p>
*
* <p>Removes the given channel from this set of {@link StoredServerChannel}s and notifies the wallet of a change to
* this wallet extension.</p>
*/
public void closeChannel(StoredServerChannel channel) {
lock.lock();
try {
if (mapChannels.remove(channel.contract.getHash()) == null)
return;
} finally {
lock.unlock();
}
synchronized (channel) {
channel.closeConnectedHandler();
try {
channel.getOrCreateState(wallet, broadcaster).close();
} catch (ValueOutOfRangeException e) {
e.printStackTrace();
} catch (VerificationException e) {
e.printStackTrace();
}
channel.state = null;
}
wallet.addOrUpdateExtension(this);
}
/**
* Gets the {@link StoredServerChannel} with the given channel id (ie contract transaction hash).
*/
public StoredServerChannel getChannel(Sha256Hash id) {
lock.lock();
try {
return mapChannels.get(id);
} finally {
lock.unlock();
}
}
/**
* <p>Puts the given channel in the channels map and automatically closes it 2 hours before its refund transaction
* becomes spendable.</p>
*
* <p>Because there must be only one, canonical {@link StoredServerChannel} per channel, this method throws if the
* channel is already present in the set of channels.</p>
*/
public void putChannel(final StoredServerChannel channel) {
lock.lock();
try {
checkArgument(mapChannels.put(channel.contract.getHash(), checkNotNull(channel)) == null);
// Add the difference between real time and Utils.now() so that test-cases can use a mock clock.
Date autocloseTime = new Date((channel.refundTransactionUnlockTimeSecs + CHANNEL_EXPIRE_OFFSET) * 1000L
+ (System.currentTimeMillis() - Utils.now().getTime()));
log.info("Scheduling channel for automatic closure at {}: {}", autocloseTime, channel);
channelTimeoutHandler.schedule(new TimerTask() {
@Override
public void run() {
log.info("Auto-closing channel: {}", channel);
closeChannel(channel);
}
}, autocloseTime);
} finally {
lock.unlock();
}
}
@Override
public String getWalletExtensionID() {
return EXTENSION_ID;
}
@Override
public boolean isWalletExtensionMandatory() {
return false;
}
@Override
public byte[] serializeWalletExtension() {
lock.lock();
try {
ServerState.StoredServerPaymentChannels.Builder builder = ServerState.StoredServerPaymentChannels.newBuilder();
for (StoredServerChannel channel : mapChannels.values()) {
// First a few asserts to make sure things won't break
checkState(channel.bestValueToMe.compareTo(BigInteger.ZERO) >= 0 && channel.bestValueToMe.compareTo(NetworkParameters.MAX_MONEY) < 0);
checkState(channel.refundTransactionUnlockTimeSecs > 0);
checkNotNull(channel.myKey.getPrivKeyBytes());
ServerState.StoredServerPaymentChannel.Builder channelBuilder = ServerState.StoredServerPaymentChannel.newBuilder()
.setBestValueToMe(channel.bestValueToMe.longValue())
.setRefundTransactionUnlockTimeSecs(channel.refundTransactionUnlockTimeSecs)
.setContractTransaction(ByteString.copyFrom(channel.contract.bitcoinSerialize()))
.setClientOutput(ByteString.copyFrom(channel.clientOutput.bitcoinSerialize()))
.setMyKey(ByteString.copyFrom(channel.myKey.getPrivKeyBytes()));
if (channel.bestValueSignature != null)
channelBuilder.setBestValueSignature(ByteString.copyFrom(channel.bestValueSignature));
builder.addChannels(channelBuilder);
}
return builder.build().toByteArray();
} finally {
lock.unlock();
}
}
@Override
public void deserializeWalletExtension(Wallet containingWallet, byte[] data) throws Exception {
lock.lock();
try {
checkArgument(containingWallet == wallet);
ServerState.StoredServerPaymentChannels states = ServerState.StoredServerPaymentChannels.parseFrom(data);
NetworkParameters params = containingWallet.getParams();
for (ServerState.StoredServerPaymentChannel storedState : states.getChannelsList()) {
StoredServerChannel channel = new StoredServerChannel(null,
new Transaction(params, storedState.getContractTransaction().toByteArray()),
new TransactionOutput(params, null, storedState.getClientOutput().toByteArray(), 0),
storedState.getRefundTransactionUnlockTimeSecs(),
new ECKey(storedState.getMyKey().toByteArray(), null),
BigInteger.valueOf(storedState.getBestValueToMe()),
storedState.hasBestValueSignature() ? storedState.getBestValueSignature().toByteArray() : null);
putChannel(channel);
}
} finally {
lock.unlock();
}
}
@Override
public String toString() {
lock.lock();
try {
StringBuilder buf = new StringBuilder();
for (StoredServerChannel stored : mapChannels.values()) {
buf.append(stored);
}
return buf.toString();
} finally {
lock.unlock();
}
}
}
| |
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
package org.elasticsearch.search.sort;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.geo.GeoPoint;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.search.SearchModule;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.xcontent.NamedXContentRegistry;
import org.elasticsearch.xcontent.ToXContent;
import org.elasticsearch.xcontent.XContentBuilder;
import org.elasticsearch.xcontent.XContentFactory;
import org.elasticsearch.xcontent.XContentParser;
import org.elasticsearch.xcontent.json.JsonXContent;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Set;
import static java.util.Collections.emptyList;
public class SortBuilderTests extends ESTestCase {
private static final int NUMBER_OF_RUNS = 20;
private static NamedXContentRegistry xContentRegistry;
@BeforeClass
public static void init() {
SearchModule searchModule = new SearchModule(Settings.EMPTY, emptyList());
xContentRegistry = new NamedXContentRegistry(searchModule.getNamedXContents());
}
@AfterClass
public static void afterClass() throws Exception {
xContentRegistry = null;
}
/**
* test two syntax variations:
* - "sort" : "fieldname"
* - "sort" : { "fieldname" : "asc" }
*/
public void testSingleFieldSort() throws IOException {
SortOrder order = randomBoolean() ? SortOrder.ASC : SortOrder.DESC;
String json = "{ \"sort\" : { \"field1\" : \"" + order + "\" }}";
List<SortBuilder<?>> result = parseSort(json);
assertEquals(1, result.size());
SortBuilder<?> sortBuilder = result.get(0);
assertEquals(new FieldSortBuilder("field1").order(order), sortBuilder);
json = "{ \"sort\" : \"field1\" }";
result = parseSort(json);
assertEquals(1, result.size());
sortBuilder = result.get(0);
assertEquals(new FieldSortBuilder("field1"), sortBuilder);
// one element array, see https://github.com/elastic/elasticsearch/issues/17257
json = "{ \"sort\" : [\"field1\"] }";
result = parseSort(json);
assertEquals(1, result.size());
sortBuilder = result.get(0);
assertEquals(new FieldSortBuilder("field1"), sortBuilder);
json = "{ \"sort\" : { \"_doc\" : \"" + order + "\" }}";
result = parseSort(json);
assertEquals(1, result.size());
sortBuilder = result.get(0);
assertEquals(new FieldSortBuilder("_doc").order(order), sortBuilder);
json = "{ \"sort\" : \"_doc\" }";
result = parseSort(json);
assertEquals(1, result.size());
sortBuilder = result.get(0);
assertEquals(new FieldSortBuilder("_doc"), sortBuilder);
json = "{ \"sort\" : { \"_score\" : \"" + order + "\" }}";
result = parseSort(json);
assertEquals(1, result.size());
sortBuilder = result.get(0);
assertEquals(new ScoreSortBuilder().order(order), sortBuilder);
json = "{ \"sort\" : \"_score\" }";
result = parseSort(json);
assertEquals(1, result.size());
sortBuilder = result.get(0);
assertEquals(new ScoreSortBuilder(), sortBuilder);
// test two spellings for _geo_disctance
json = "{ \"sort\" : [" + "{\"_geoDistance\" : {" + "\"pin.location\" : \"40,-70\" } }" + "] }";
result = parseSort(json);
assertEquals(1, result.size());
sortBuilder = result.get(0);
assertEquals(new GeoDistanceSortBuilder("pin.location", 40, -70), sortBuilder);
json = "{ \"sort\" : [" + "{\"_geo_distance\" : {" + "\"pin.location\" : \"40,-70\" } }" + "] }";
result = parseSort(json);
assertEquals(1, result.size());
sortBuilder = result.get(0);
assertEquals(new GeoDistanceSortBuilder("pin.location", 40, -70), sortBuilder);
}
/**
* test parsing random syntax variations
*/
public void testRandomSortBuilders() throws IOException {
for (int runs = 0; runs < NUMBER_OF_RUNS; runs++) {
Set<String> expectedWarningHeaders = new HashSet<>();
List<SortBuilder<?>> testBuilders = randomSortBuilderList();
XContentBuilder xContentBuilder = XContentFactory.jsonBuilder();
xContentBuilder.startObject();
if (testBuilders.size() > 1) {
xContentBuilder.startArray("sort");
} else {
xContentBuilder.field("sort");
}
for (SortBuilder<?> builder : testBuilders) {
if (builder instanceof ScoreSortBuilder || builder instanceof FieldSortBuilder) {
switch (randomIntBetween(0, 2)) {
case 0:
if (builder instanceof ScoreSortBuilder) {
xContentBuilder.value("_score");
} else {
xContentBuilder.value(((FieldSortBuilder) builder).getFieldName());
}
break;
case 1:
xContentBuilder.startObject();
if (builder instanceof ScoreSortBuilder) {
xContentBuilder.field("_score");
} else {
xContentBuilder.field(((FieldSortBuilder) builder).getFieldName());
}
xContentBuilder.value(builder.order());
xContentBuilder.endObject();
break;
case 2:
builder.toXContent(xContentBuilder, ToXContent.EMPTY_PARAMS);
break;
}
} else {
builder.toXContent(xContentBuilder, ToXContent.EMPTY_PARAMS);
}
}
if (testBuilders.size() > 1) {
xContentBuilder.endArray();
}
xContentBuilder.endObject();
List<SortBuilder<?>> parsedSort = parseSort(Strings.toString(xContentBuilder));
assertEquals(testBuilders.size(), parsedSort.size());
Iterator<SortBuilder<?>> iterator = testBuilders.iterator();
for (SortBuilder<?> parsedBuilder : parsedSort) {
assertEquals(iterator.next(), parsedBuilder);
}
if (expectedWarningHeaders.size() > 0) {
assertWarnings(expectedWarningHeaders.toArray(new String[expectedWarningHeaders.size()]));
}
}
}
public static List<SortBuilder<?>> randomSortBuilderList() {
int size = randomIntBetween(1, 5);
List<SortBuilder<?>> list = new ArrayList<>(size);
for (int i = 0; i < size; i++) {
switch (randomIntBetween(0, 5)) {
case 0:
list.add(new ScoreSortBuilder());
break;
case 1:
list.add(new FieldSortBuilder(randomAlphaOfLengthBetween(1, 10)));
break;
case 2:
list.add(SortBuilders.fieldSort(FieldSortBuilder.DOC_FIELD_NAME));
break;
case 3:
list.add(GeoDistanceSortBuilderTests.randomGeoDistanceSortBuilder());
break;
case 4:
list.add(ScriptSortBuilderTests.randomScriptSortBuilder());
break;
case 5:
list.add(SortBuilders.pitTiebreaker());
break;
default:
throw new IllegalStateException("unexpected randomization in tests");
}
}
return list;
}
/**
* test array syntax variations:
* - "sort" : [ "fieldname", { "fieldname2" : "asc" }, ...]
*/
public void testMultiFieldSort() throws IOException {
String json = "{ \"sort\" : ["
+ "{ \"post_date\" : {\"order\" : \"asc\"}},"
+ "\"user\","
+ "{ \"name\" : \"desc\" },"
+ "{ \"age\" : \"desc\" },"
+ "{"
+ "\"_geo_distance\" : {"
+ "\"pin.location\" : \"40,-70\" } },"
+ "\"_score\""
+ "] }";
List<SortBuilder<?>> result = parseSort(json);
assertEquals(6, result.size());
assertEquals(new FieldSortBuilder("post_date").order(SortOrder.ASC), result.get(0));
assertEquals(new FieldSortBuilder("user").order(SortOrder.ASC), result.get(1));
assertEquals(new FieldSortBuilder("name").order(SortOrder.DESC), result.get(2));
assertEquals(new FieldSortBuilder("age").order(SortOrder.DESC), result.get(3));
assertEquals(new GeoDistanceSortBuilder("pin.location", new GeoPoint(40, -70)), result.get(4));
assertEquals(new ScoreSortBuilder(), result.get(5));
}
@Override
protected NamedXContentRegistry xContentRegistry() {
return xContentRegistry;
}
private List<SortBuilder<?>> parseSort(String jsonString) throws IOException {
try (XContentParser itemParser = createParser(JsonXContent.jsonXContent, jsonString)) {
assertEquals(XContentParser.Token.START_OBJECT, itemParser.nextToken());
assertEquals(XContentParser.Token.FIELD_NAME, itemParser.nextToken());
assertEquals("sort", itemParser.currentName());
itemParser.nextToken();
return SortBuilder.fromXContent(itemParser);
}
}
}
| |
package nl.hanze.t23i.gamemodule.extern.reversi;
import java.awt.Component;
import java.util.HashMap;
import java.util.Map;
import nl.hanze.t23i.gamemodule.extern.AbstractGameModule;
public class ReversiGame extends AbstractGameModule {
public static final String GAME_TYPE = "Reversi";
private String moveDetails;
private String matchResult = "";
private Map<String, Integer> playerResults;
private Map<String, Integer> playerScores;
private int[][] grid;
private ReversiView view;
private String playerToMove;
public ReversiGame(String playerOne, String playerTwo) {
super(playerOne, playerTwo);
this.moveDetails = null;
this.playerResults = new HashMap();
this.playerScores = new HashMap();
this.grid = new int[8][8];
this.view = new ReversiView(playerOne, playerTwo);
this.playerToMove = playerOne;
}
public void start() throws IllegalStateException {
super.start();
this.grid[3][3] = PlayerType.PLAYER_TWO.ordinal();
this.grid[3][4] = PlayerType.PLAYER_ONE.ordinal();
this.grid[4][3] = PlayerType.PLAYER_ONE.ordinal();
this.grid[4][4] = PlayerType.PLAYER_TWO.ordinal();
this.view.update(this.grid);
this.view.setPlayerToMove(PlayerType.PLAYER_ONE);
}
public Component getView() {
return this.view;
}
public void doPlayerMove(String player, String move)
throws IllegalStateException {
super.doPlayerMove(player, move);
if (!this.playerToMove.equals(player)) {
throw new IllegalStateException("It is not " + player + "'s turn");
}
int pos;
try {
pos = Integer.parseInt(move);
} catch (NumberFormatException e) {
illegalPlayerMove(player);
return;
}
if ((pos < 0) || (pos >= 64) || (this.grid[(pos / 8)][(pos % 8)] != 0) || (!isMoveValid(pos, getPlayerType(player)))) {
illegalPlayerMove(player);
return;
}
this.grid[(pos / 8)][(pos % 8)] = getPlayerType(player).ordinal();
this.moveDetails = "";
updateSurroundings(pos, getPlayerType(player));
this.view.update(this.grid);
if (isGridFull()) {
endMatch(player);
return;
}
if (moveAvailable(getPlayerType(this.playerToMove).getOpponent())) {
nextPlayer();
} else if (!moveAvailable(getPlayerType(this.playerToMove))) {
endMatch(player);
}
}
private void endMatch(String player) {
this.matchStatus = 1;
int[] tileCounts = getTileCounts();
if (tileCounts[0] > tileCounts[1]) {
this.playerResults.put(player, Integer.valueOf(1));
this.playerResults.put(otherPlayer(player), Integer.valueOf(-1));
} else if (tileCounts[1] > tileCounts[0]) {
this.playerResults.put(player, Integer.valueOf(-1));
this.playerResults.put(otherPlayer(player), Integer.valueOf(1));
} else {
this.playerResults.put(player, Integer.valueOf(0));
this.playerResults.put(otherPlayer(player), Integer.valueOf(0));
}
this.playerScores.put(player, Integer.valueOf(tileCounts[0]));
this.playerScores.put(otherPlayer(player), Integer.valueOf(tileCounts[1]));
}
private boolean moveAvailable(PlayerType opponent) {
for (int y = 0; y < this.grid.length; y++) {
for (int x = 0; x < this.grid[y].length; x++) {
if ((getTile(x, y) == PlayerType.NONE.ordinal()) && (willFlip(opponent, x, y))) {
return true;
}
}
}
return false;
}
private boolean isMoveValid(int pos, PlayerType type) {
int checkX = pos % 8;
int checkY = pos / 8;
return willFlip(type, checkX, checkY);
}
private boolean willFlip(PlayerType type, int checkX, int checkY) {
Direction[] directions = Direction.values();
for (Direction direction : directions) {
if (getTileLength(type, direction, checkX, checkY) > 0) {
return true;
}
}
return false;
}
private void updateSurroundings(int pos, PlayerType type) {
int checkX = pos % 8;
int checkY = pos / 8;
Direction[] directions = Direction.values();
for (Direction direction : directions) {
int length = getTileLength(type, direction, checkX, checkY);
if (length > 0) {
flipTiles(type, direction, checkX, checkY, length);
}
}
}
private void flipTiles(PlayerType type, Direction direction, int x, int y, int length) {
for (int i = 0; i < length; i++) {
x += direction.offsetX;
y += direction.offsetY;
if (!tileInBounds(x, y)) {
return;
}
setTile(x, y, type.ordinal());
}
}
private boolean tileInBounds(int checkX, int checkY) {
return (checkX >= 0) && (checkX < 8) && (checkY >= 0) && (checkY < 8);
}
private int getTileLength(PlayerType type, Direction direction, int checkX, int checkY) {
int tileLength = 0;
checkX += direction.offsetX;
checkY += direction.offsetY;
if (!tileInBounds(checkX, checkY)) {
return 0;
}
while ((tileInBounds(checkX, checkY)) && (getTile(checkX, checkY) == type.getOpponent().ordinal())) {
tileLength++;
checkX += direction.offsetX;
checkY += direction.offsetY;
}
return tileInBounds(checkX, checkY) ? 0 : getTile(checkX, checkY) == type.ordinal() ? tileLength : 0;
}
private int getTile(int checkX, int checkY) {
return this.grid[checkY][checkX];
}
private void setTile(int x, int y, int tile) {
this.grid[y][x] = tile;
}
public String getMatchResultComment() throws IllegalStateException {
super.getMatchResultComment();
return this.matchResult;
}
public String getMoveDetails() throws IllegalStateException {
super.getMoveDetails();
if (this.moveDetails == null) {
throw new IllegalStateException("No moves have been done yet");
}
return this.moveDetails;
}
public String getPlayerToMove() throws IllegalStateException {
super.getPlayerToMove();
return this.playerToMove;
}
public int getPlayerResult(String player) throws IllegalStateException {
super.getPlayerResult(player);
return ((Integer) this.playerResults.get(player)).intValue();
}
public int getPlayerScore(String player) throws IllegalStateException {
super.getPlayerScore(player);
return ((Integer) this.playerScores.get(player)).intValue();
}
public String getTurnMessage() throws IllegalStateException {
super.getTurnMessage();
return "";
}
private void nextPlayer() {
this.playerToMove = otherPlayer(this.playerToMove);
this.view.setPlayerToMove(getPlayerType(this.playerToMove));
}
private String otherPlayer(String player) {
return player.equals(this.playerOne) ? this.playerTwo : this.playerOne;
}
private PlayerType getPlayerType(String player) {
return player.equals(this.playerOne) ? PlayerType.PLAYER_ONE : PlayerType.PLAYER_TWO;
}
private void illegalPlayerMove(String player) {
endMatch(player);
this.matchResult = (this.moveDetails = "Illegal move");
this.playerResults.put(player, Integer.valueOf(-1));
this.playerResults.put(otherPlayer(player), Integer.valueOf(1));
}
public int[] getTileCounts() {
int playerOne = getPlayerType(this.playerToMove).ordinal();
int playerTwo = getPlayerType(otherPlayer(this.playerToMove)).ordinal();
int[] counts = {0, 0};
for (int[] i : this.grid) {
for (int j : i) {
if (j == playerOne) {
counts[0] += 1;
} else if (j == playerTwo) {
counts[1] += 1;
}
}
}
return counts;
}
private boolean isGridFull() {
for (int[] i : this.grid) {
for (int j : i) {
if (j == 0) {
return false;
}
}
}
return true;
}
private enum Direction {
UP(0, -1), LEFT_UP(-1, -1), LEFT(-1, 0), LEFT_DOWN(-1, 1), DOWN(0, 1), RIGHT_DOWN(1, 1),
RIGHT(1, 0), RIGHT_UP(1, -1);
public final int offsetX;
public final int offsetY;
Direction(int offsetX, int offsetY) {
this.offsetX = offsetX;
this.offsetY = offsetY;
}
}
}
| |
/*
* Copyright 2017 Axway Software
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.axway.ats.agent.core.action;
import java.lang.annotation.Annotation;
import java.lang.reflect.Array;
import java.lang.reflect.Field;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.util.ArrayList;
import java.util.List;
import org.apache.log4j.Logger;
import com.axway.ats.agent.core.exceptions.ActionExecutionException;
import com.axway.ats.agent.core.exceptions.InternalComponentException;
import com.axway.ats.agent.core.model.Action;
import com.axway.ats.agent.core.model.Parameter;
import com.axway.ats.core.utils.StringUtils;
import com.axway.ats.core.validation.ValidationType;
import com.axway.ats.core.validation.exceptions.InvalidInputArgumentsException;
import com.axway.ats.core.validation.exceptions.TypeException;
import com.axway.ats.core.validation.types.BaseType;
import com.axway.ats.core.validation.types.TypeFactory;
/**
* This class represents a single action-implementing method relation
* It can be used to invoke the method which implements the current action
*/
public class ActionMethod {
private static final Logger log = Logger.getLogger(ActionMethod.class);
protected String componentName;
protected String actionName; // as defined by the user in the name attribute of the Action annotation
private boolean registerActionExecution = true;
private boolean registerActionExecutionInQueueExecutionTime = true;
private String transferUnit;
private Method method;
private Class<?> actualClass;
private List<String> parameterNames;
private boolean isDeprecated;
protected boolean hasEnumParameter;
/**
* @param componentName name of the component
* @param actionName name of the action
* @param actionClassName the java class name implementing this action
* @param actionMethodName the java method name implementing this action
* @param method the java method
* @param actualClass the class with the java method implementation
*/
public ActionMethod( String componentName, String actionName, Method method, Class<?> actualClass ) {
this.componentName = componentName;
this.actionName = actionName;
this.method = method;
this.actualClass = actualClass;
this.isDeprecated = false;
this.hasEnumParameter = false;
this.transferUnit = "";
//get the annotation attributes
Action actionAnnotation = method.getAnnotation(Action.class);
if (actionAnnotation != null) {
this.transferUnit = actionAnnotation.transferUnit();
this.registerActionExecution = actionAnnotation.registerActionExecution();
if (registerActionExecution) {
this.registerActionExecutionInQueueExecutionTime = actionAnnotation.registerActionExecutionInQueueExecutionTime();
}
}
//get the parameter names in their order
//generate a map of the parameters
this.parameterNames = new ArrayList<String>();
Annotation[][] methodParameterAnnotations = method.getParameterAnnotations();
for (int i = 0; i < methodParameterAnnotations.length; i++) {
Annotation[] paramAnnotations = methodParameterAnnotations[i];
for (Annotation paramAnnotation : paramAnnotations) {
if (paramAnnotation instanceof Parameter) {
parameterNames.add( ((Parameter) paramAnnotation).name());
}
}
}
//check if this method is deprecated
Annotation deprecatedAnnotation = method.getAnnotation(Deprecated.class);
if (deprecatedAnnotation != null) {
this.isDeprecated = true;
}
//check if this method has an Enumeration parameter
for (Class<?> paramType : method.getParameterTypes()) {
if (paramType.isEnum() || (paramType.isArray() && paramType.getComponentType().isEnum())) {
this.hasEnumParameter = true;
break;
}
}
}
/**
* Invoke the given method
*
* @param instance an instance on which to invoke the method
* @param args arguments
* @return result of the method invocation
* @throws ActionExecutionException if there was an error while executing the action
* @throws InternalComponentException if an exception was thrown while executing the method
*/
public Object invoke( Object instance, Object[] parameterValues,
boolean validateArguments ) throws ActionExecutionException,
InternalComponentException {
try {
if (isDeprecated()) {
log.warn("Method '" + this.toString() + "' is deprecated");
}
//convert string to Enumerations if necessary
if (hasEnumParameter) {
parameterValues = convertToEnums(parameterValues);
}
//validate the arguments
if (validateArguments) {
validateArguments(parameterValues);
}
//invoke the action
return doInvoke(instance, this.parameterNames, parameterValues);
} catch (IllegalArgumentException ae) {
throw new ActionExecutionException("Illegal arguments passed to action '" + actionName + "'",
ae);
} catch (IllegalAccessException iae) {
throw new ActionExecutionException("Could not access action '" + actionName + "'", iae);
} catch (InvocationTargetException ite) {
throw new InternalComponentException(componentName, actionName, ite.getTargetException());
}
}
protected Object doInvoke( Object instance, List<String> parameterNames,
Object[] parameterValues ) throws IllegalArgumentException,
IllegalAccessException, InvocationTargetException,
ActionExecutionException {
/*
* Here we log the action we are going to be execute.
*
* ATS has some actions for internal usage and users should not see them.
* Currently we do not have some good way to distinguish these actions from the regular ones, for
* example we could use a new attribute in the Action annotation.
* For now we can filter these ATS internal actions by expecting their names match the next regular
* expression.
*/
if (log.isInfoEnabled()) {
if (!actionName.matches("Internal.*Operations.*")
&& !actionName.startsWith("InternalProcessTalker")) {
log.info("Executing '" + actionName + "' with arguments "
+ StringUtils.methodInputArgumentsToString(parameterValues));
} else {
// internal action
if (log.isDebugEnabled())
log.debug("Executing '" + actionName + "' with arguments "
+ StringUtils.methodInputArgumentsToString(parameterValues));
}
}
return method.invoke(instance, parameterValues);
}
/**
* Has this action method been deprecated
*
* @return true if the method is deprecated, false if not
*/
public boolean isDeprecated() {
return isDeprecated;
}
public boolean isRegisterActionExecution() {
return registerActionExecution;
}
public boolean isRegisterActionExecutionInQueueExecutionTime() {
return registerActionExecutionInQueueExecutionTime;
}
/**
* Get the transfer unit associated with this action
*
* @return the transfer unit, empty string if not set
*/
public String getTransferUnit() {
if (transferUnit.length() > 0) {
return transferUnit + "/sec";
} else {
return transferUnit; // default value is empty string
}
}
/**
* @return the method which implements the action
*/
public Method getMethod() {
return method;
}
/**
* In case of using abstract action methods, we need to make an
* instance of the child class where the implementation is, not the
* abstract parent class
*
* @return
*/
public Class<?> getTheActualClass() {
if (actualClass != null) {
return actualClass;
} else {
return method.getDeclaringClass();
}
}
/**
* @return get the names of all parameters that this method accepts
*/
public List<String> getParameterNames() {
return parameterNames;
}
/* (non-Javadoc)
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
return method.toString();
}
/**
* Convert any String arguments to proper Enumerations if
* necessary
*
* @param args the arguments
* @return arguments with Strings converted to Enums
* @throws ActionExecutionException if a given String cannot be converted to the proper Enum
*/
@SuppressWarnings( { "rawtypes", "unchecked" })
protected Object[] convertToEnums( Object[] args ) throws ActionExecutionException {
Object[] processedArgs = new Object[args.length];
//try to convert all strings to enums
Class<?>[] parameterTypes = method.getParameterTypes();
for (int i = 0; i < parameterTypes.length; i++) {
if (args[i] == null) {
processedArgs[i] = null;
continue;
}
boolean isParamArray = parameterTypes[i].isArray();
Class<?> paramType;
Class<?> argType;
if (isParamArray) {
paramType = parameterTypes[i].getComponentType();
argType = args[i].getClass().getComponentType();
} else {
paramType = parameterTypes[i];
argType = args[i].getClass();
}
if (argType == String.class && paramType.isEnum()) {
try {
if (isParamArray) {
Object convertedEnums = Array.newInstance(paramType, Array.getLength(args[i]));
//convert all array elements to enums
for (int j = 0; j < Array.getLength(args[i]); j++) {
String currentValue = (String) Array.get(args[i], j);
if (currentValue != null) {
Array.set(convertedEnums, j,
Enum.valueOf((Class<? extends Enum>) paramType,
currentValue));
}
}
processedArgs[i] = convertedEnums;
} else {
processedArgs[i] = Enum.valueOf((Class<? extends Enum>) paramType,
(String) args[i]);
}
} catch (IllegalArgumentException iae) {
throw new ActionExecutionException("Could not convert string " + args[i]
+ " to enumeration of type " + paramType.getName());
}
} else {
processedArgs[i] = args[i];
}
}
return processedArgs;
}
/**
* Validate the arguments according to the rules specified in the action
* using the Parameter annotations
*
* @param actionMethod the implementation of the action
* @param args the arguments to validate
* @throws ActionExecutionException if exception occurs during arguments validation
*/
protected void validateArguments( Object[] args ) throws ActionExecutionException {
Annotation[][] annotations = method.getParameterAnnotations();
for (int i = 0; i < annotations.length; i++) {
Annotation[] paramAnnotations = annotations[i];
for (Annotation paramAnnotation : paramAnnotations) {
if (paramAnnotation instanceof Parameter) {
Parameter paramDescriptionAnnotation = (Parameter) paramAnnotation;
ValidationType validationType = paramDescriptionAnnotation.validation();
String[] validationArgs;
// if we are checking for valid constants, then the
// args array should contain
// the name of the array holding the valid constants
if (validationType == ValidationType.STRING_CONSTANT
|| validationType == ValidationType.NUMBER_CONSTANT) {
try {
String arrayName = paramDescriptionAnnotation.args()[0];
// get the field and set access level if
// necessary
Field arrayField = method.getDeclaringClass().getDeclaredField(arrayName);
if (!arrayField.isAccessible()) {
arrayField.setAccessible(true);
}
Object arrayValidConstants = arrayField.get(null);
// convert the object array to string array
String[] arrayValidConstatnsStr = new String[Array.getLength(arrayValidConstants)];
for (int j = 0; j < Array.getLength(arrayValidConstants); j++) {
arrayValidConstatnsStr[j] = Array.get(arrayValidConstants, j).toString();
}
validationArgs = arrayValidConstatnsStr;
} catch (IndexOutOfBoundsException iobe) {
// this is a fatal error
throw new ActionExecutionException("You need to specify the name of the array with valid constants in the 'args' field of the Parameter annotation");
} catch (Exception e) {
// this is a fatal error
throw new ActionExecutionException("Could not get array with valid constants - action annotations are incorrect");
}
} else {
validationArgs = paramDescriptionAnnotation.args();
}
List<BaseType> typeValidators = createBaseTypes(paramDescriptionAnnotation.validation(),
paramDescriptionAnnotation.name(),
args[i], validationArgs);
//perform validation
for (BaseType baseType : typeValidators) {
if (baseType != null) {
try {
baseType.validate();
} catch (TypeException e) {
throw new InvalidInputArgumentsException("Validation failed while validating argument "
+ paramDescriptionAnnotation.name()
+ e.getMessage());
}
} else {
log.warn("Could not perform validation on argument "
+ paramDescriptionAnnotation.name());
}
}
}
}
}
}
/** Creates as much validation types as needed to validate the input data */
private List<BaseType> createBaseTypes( ValidationType type, String paramName, Object values,
Object[] args ) {
List<BaseType> typeValidators = new ArrayList<BaseType>();
// if this is an array of types to be validated, then add each
// of them separately to the list
if ( (values != null) && values.getClass().isArray()) {
for (int i = 0; i < Array.getLength(values); i++) {
Object value = Array.get(values, i);
TypeFactory factory = TypeFactory.getInstance();
BaseType baseType = factory.createValidationType(type, paramName, value, args);
typeValidators.add(baseType);
}
// otherwise just add the single validation type
} else {
TypeFactory factory = TypeFactory.getInstance();
BaseType baseType = factory.createValidationType(type, paramName, values, args);
typeValidators.add(baseType);
}
return typeValidators;
}
public ActionMethod getNewCopy() {
return new ActionMethod(componentName, actionName, method, actualClass);
}
/**
* Builds the action method name. It is the method class name + method name split by Camel-Case words<br>
* For example: getDescription -> MethodClassName get Description; getSMSCount -> MethodClassName get S M S Count
*
* @param actionMethod action method
* @return action method name
*/
public static String buildActionMethodName( Method actionMethod ) {
String methodName = actionMethod.getName();
StringBuilder actionMethodName = new StringBuilder();
int charIndex;
for (charIndex = 0; charIndex < methodName.length(); charIndex++) {
char ch = methodName.charAt(charIndex);
if (Character.isUpperCase(ch)) {
actionMethodName.append(' ');
}
actionMethodName.append(ch);
}
return actionMethod.getDeclaringClass().getSimpleName() + " " + actionMethodName.toString().trim();
}
}
| |
package io.cattle.platform.ha.monitor.impl;
import static io.cattle.platform.core.constants.ContainerEventConstants.*;
import static io.cattle.platform.core.constants.HostConstants.*;
import static io.cattle.platform.core.constants.InstanceConstants.*;
import static io.cattle.platform.process.instance.InstanceProcessOptions.*;
import io.cattle.platform.agent.AgentLocator;
import io.cattle.platform.agent.RemoteAgent;
import io.cattle.platform.archaius.util.ArchaiusUtil;
import io.cattle.platform.core.constants.CommonStatesConstants;
import io.cattle.platform.core.constants.InstanceConstants;
import io.cattle.platform.core.dao.AgentDao;
import io.cattle.platform.core.model.Agent;
import io.cattle.platform.core.model.ContainerEvent;
import io.cattle.platform.core.model.Host;
import io.cattle.platform.core.model.Instance;
import io.cattle.platform.engine.process.Predicate;
import io.cattle.platform.engine.process.ProcessDefinition;
import io.cattle.platform.engine.process.ProcessInstance;
import io.cattle.platform.engine.process.ProcessState;
import io.cattle.platform.eventing.model.Event;
import io.cattle.platform.framework.event.Ping;
import io.cattle.platform.framework.event.data.PingData;
import io.cattle.platform.ha.monitor.PingInstancesMonitor;
import io.cattle.platform.ha.monitor.dao.PingInstancesMonitorDao;
import io.cattle.platform.ha.monitor.event.InstanceForceStop;
import io.cattle.platform.ha.monitor.model.KnownInstance;
import io.cattle.platform.lock.LockDelegator;
import io.cattle.platform.object.ObjectManager;
import io.cattle.platform.object.meta.ObjectMetaDataManager;
import io.cattle.platform.object.process.ObjectProcessManager;
import io.cattle.platform.object.process.StandardProcess;
import io.cattle.platform.object.util.DataAccessor;
import io.cattle.platform.object.util.DataUtils;
import io.cattle.platform.process.instance.InstanceProcessOptions;
import io.cattle.platform.util.type.CollectionUtils;
import java.util.Arrays;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.TimeUnit;
import javax.inject.Inject;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.tuple.ImmutablePair;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.cache.CacheBuilder;
import com.google.common.cache.CacheLoader;
import com.google.common.cache.LoadingCache;
import com.google.common.util.concurrent.UncheckedExecutionException;
import com.netflix.config.DynamicLongProperty;
public class PingInstancesMonitorImpl implements PingInstancesMonitor {
private static final DynamicLongProperty CACHE_TIME = ArchaiusUtil.getLong("ha.instance.state.cache.millis");
private static final DynamicLongProperty HOST_ID_CACHE_TIME = ArchaiusUtil.getLong("ha.host.id.cache.millis");
private static final Logger log = LoggerFactory.getLogger(PingInstancesMonitorImpl.class);
private static final String UKNOWN_OUT_OF_SYNC_WARNING = "Instance out of sync and can't determine action to take. Uuid [{}]. Docker id [{}]. "
+ "State in rancher [{}]. State on host [{}]";
@Inject
AgentDao agentDao;
@Inject
ObjectMetaDataManager objectMetaDataManager;
@Inject
AgentLocator agentLocator;
@Inject
LockDelegator lockDelegator;
@Inject
PingInstancesMonitorDao monitorDao;
@Inject
ObjectManager objectManager;
@Inject
ObjectProcessManager processManager;
LoadingCache<Long, Map<String, KnownInstance>> instanceCache = CacheBuilder.newBuilder().expireAfterWrite(CACHE_TIME.get(), TimeUnit.MILLISECONDS)
.build(new CacheLoader<Long, Map<String, KnownInstance>>() {
@Override
public Map<String, KnownInstance> load(Long key) throws Exception {
return PingInstancesMonitorImpl.this.load(key);
}
});
LoadingCache<ImmutablePair<Long, String>, AgentAndHost> hostCache = CacheBuilder.newBuilder()
.expireAfterWrite(HOST_ID_CACHE_TIME.get(), TimeUnit.MILLISECONDS).build(new CacheLoader<ImmutablePair<Long, String>, AgentAndHost>() {
@Override
public AgentAndHost load(ImmutablePair<Long, String> key) throws Exception {
return PingInstancesMonitorImpl.this.loadAgentAndHostData(key);
}
});
@Override
public void pingReply(Ping ping) {
ReportedInstances reportedInstances = getInstances(ping);
if (reportedInstances == null || StringUtils.isEmpty(reportedInstances.hostUuid))
return;
long agentId = Long.parseLong(ping.getResourceId());
Map<String, KnownInstance> knownInstances = instanceCache.getUnchecked(agentId);
AgentAndHost agentAndHost = null;
try {
agentAndHost = hostCache.getUnchecked(new ImmutablePair<Long, String>(agentId, reportedInstances.hostUuid));
} catch (UncheckedExecutionException e) {
// CantFindAgentAndHostException can be ignored because the host may not exist yet. Rethrow all other exceptions.
if (!(e.getCause() instanceof CantFindAgentAndHostException)) {
throw e;
}
}
if (agentAndHost == null) {
log.info("Couldn't find host with uuid [{}] for agent [{}]", reportedInstances.hostUuid, agentId);
return;
}
try {
syncContainers(knownInstances, reportedInstances, agentAndHost.agentAccountId, agentId, agentAndHost.hostId, true);
} catch (ContainersOutOfSync e) {
knownInstances = load(agentId);
instanceCache.put(agentId, knownInstances);
syncContainers(knownInstances, reportedInstances, agentAndHost.agentAccountId, agentId, agentAndHost.hostId, false);
}
}
@Override
public void computeInstanceActivateReply(Event event) {
Long agentId = monitorDao.getAgentIdForInstanceHostMap(event.getResourceId());
if (agentId != null) {
instanceCache.invalidate(agentId);
}
}
/*
* If checkOnly is true, will raise a ContainersOutOfSync exception, indicating this should be reran with checkOnly set to false.
*/
void syncContainers(Map<String, KnownInstance> knownInstances, ReportedInstances reportedInstances, long agentAccountId, long agentId, long hostId,
boolean checkOnly) {
Map<String, ReportedInstance> needsSynced = new HashMap<String, ReportedInstance>();
Map<String, String> syncActions = new HashMap<String, String>();
Set<String> needsHaRestart = new HashSet<String>();
determineSyncActions(knownInstances, reportedInstances, needsSynced, syncActions, needsHaRestart, checkOnly);
for (String uuid : needsHaRestart) {
restart(uuid);
}
for (Map.Entry<String, ReportedInstance> syncEntry : needsSynced.entrySet()) {
ReportedInstance ri = syncEntry.getValue();
String syncAction = syncActions.get(syncEntry.getKey());
if (EVENT_INSTANCE_FORCE_STOP.equals(syncAction)) {
forceStop(ri.getExternalId(), agentId);
} else if (HA_RESTART.equals(syncAction)) {
String uuid = ri.getInstance().getUuid();
restart(uuid);
} else {
scheduleContainerEvent(agentAccountId, hostId, ri, syncAction);
}
}
}
void determineSyncActions(Map<String, KnownInstance> knownInstances, ReportedInstances reportedInstances, Map<String, ReportedInstance> needsSynced,
Map<String, String> syncActions, Set<String> needsHaRestart, boolean checkOnly) {
Map<String, KnownInstance> inRancher = new HashMap<String, KnownInstance>(knownInstances);
Map<String, ReportedInstance> onHost = new HashMap<String, ReportedInstance>(reportedInstances.byExternalId);
for (Map.Entry<String, ReportedInstance> reported : reportedInstances.byUuid.entrySet()) {
KnownInstance ki = knownInstances.get(reported.getKey());
if (ki != null) {
removeAndDetermineSyncAction(needsSynced, syncActions, checkOnly, inRancher, onHost, reported.getValue(), ki,
reported.getValue().getExternalId(), reported.getKey());
}
}
if (!onHost.isEmpty() || !inRancher.isEmpty()) {
Map<String, KnownInstance> knownByExternalId = new HashMap<String, KnownInstance>();
for (KnownInstance ki : knownInstances.values()) {
if (StringUtils.isNotEmpty(ki.getExternalId()))
knownByExternalId.put(ki.getExternalId(), ki);
}
for (Map.Entry<String, ReportedInstance> reported : reportedInstances.byExternalId.entrySet()) {
KnownInstance ki = knownByExternalId.get(reported.getKey());
if (ki != null) {
removeAndDetermineSyncAction(needsSynced, syncActions, checkOnly, inRancher, onHost, reported.getValue(), ki,
reported.getKey(), ki.getUuid());
}
}
}
// Anything left in onHost is on the host, but not in rancher.
for (Map.Entry<String, ReportedInstance> create : onHost.entrySet()) {
ReportedInstance ri = create.getValue();
if (StringUtils.isNotEmpty(ri.getSystemContainer())) {
// Unknown system container. Force stop.
if (!STATE_STOPPED.equals(ri.getState())) {
addSyncAction(needsSynced, syncActions, ri, EVENT_INSTANCE_FORCE_STOP, checkOnly);
}
continue;
}
addSyncAction(needsSynced, syncActions, ri, EVENT_START, checkOnly);
}
// Anything left in inRancher is in rancher, but not on the host.
for (KnownInstance ki : inRancher.values()) {
List<String> forRemove = Arrays.asList(CommonStatesConstants.REMOVING, InstanceConstants.STATE_ERROR,
InstanceConstants.STATE_ERRORING);
if (objectMetaDataManager.isTransitioningState(Instance.class, ki.getState()) || ki.getRemoved() != null
|| forRemove.contains(ki.getState())
|| (STATE_STOPPED.equals(ki.getState()) && StringUtils.isEmpty(ki.getExternalId())))
continue;
if (StringUtils.isNotEmpty(ki.getSystemContainer()) || StringUtils.isEmpty(ki.getExternalId()) || hasInstanceTriggeredStopConfigured(ki)) {
// System container, not enough info to perform no-op action, or has an instance triggered stop policy. Schedule potential restart.
// This is the one place we can't use addSyncAction, since we don't have (and can't construct) a ReportedInstance.
if (!STATE_STOPPED.equals(ki.getState())) {
if (checkOnly) {
throw new ContainersOutOfSync();
}
needsHaRestart.add(ki.getUuid());
}
} else {
ReportedInstance ri = new ReportedInstance();
ri.setExternalId(ki.getExternalId());
ri.setUuid(ki.getUuid());
Object imageUuid = CollectionUtils.getNestedValue(ki.getData(), DataUtils.FIELDS, FIELD_IMAGE_UUID);
String image = imageUuid != null ? imageUuid.toString() : null;
ri.setImage(image);
addSyncAction(needsSynced, syncActions, ri, EVENT_DESTROY, checkOnly);
}
}
}
boolean hasInstanceTriggeredStopConfigured(KnownInstance ki) {
return StringUtils.isNotEmpty(ki.getInstanceTriggeredStop()) && !ON_STOP_STOP.equals(ki.getInstanceTriggeredStop());
}
void removeAndDetermineSyncAction(Map<String, ReportedInstance> needsSynced, Map<String, String> syncActions, boolean checkOnly,
Map<String, KnownInstance> inRancher, Map<String, ReportedInstance> onHost, ReportedInstance reportedInstance, KnownInstance instance,
String onHostKey, String inRancherKey) {
onHost.remove(onHostKey);
inRancher.remove(inRancherKey);
reportedInstance.setInstance(instance);
determineSyncAction(instance, reportedInstance, needsSynced, syncActions, checkOnly);
}
void determineSyncAction(KnownInstance ki, ReportedInstance ri, Map<String, ReportedInstance> needsSynced, Map<String, String> syncActions,
boolean checkOnly) {
if (objectMetaDataManager.isTransitioningState(Instance.class, ki.getState()) || StringUtils.equals(ki.getState(), ri.getState()))
return;
boolean sysCon = StringUtils.isNotEmpty(ki.getSystemContainer()) || StringUtils.isNotEmpty(ri.getSystemContainer());
if (STATE_RUNNING.equals(ri.getState())) {
// Container is running on host but not in Rancher. Take action
if (ki.getRemoved() != null) {
// If rancher thinks it's removed, send an explicit stop down to host.
addSyncAction(needsSynced, syncActions, ri, EVENT_INSTANCE_FORCE_STOP, checkOnly);
} else if (STATE_STOPPED.equals(ki.getState())) {
// For system containers, rancher is source of truth, stop it. For user containers, do a no-op start to sync state.
String doAction = sysCon ? EVENT_INSTANCE_FORCE_STOP : EVENT_START;
addSyncAction(needsSynced, syncActions, ri, doAction, checkOnly);
} else {
log.warn(UKNOWN_OUT_OF_SYNC_WARNING, ki.getUuid(), ri.getExternalId(), ki.getState(), ri.getState());
}
} else if (STATE_RUNNING.equals(ki.getState())) {
if (STATE_STOPPED.equals(ri.getState())) {
// Container is running in Rancher, but is not running on host.
// For system containers or containers with a instance triggered stop action, schedule HA_RESTART (stop and possible start based
// on triggeredInstanceStop field). For user containers, a no-op stop to sync up.
String doAction = sysCon || hasInstanceTriggeredStopConfigured(ki) ? HA_RESTART : EVENT_STOP;
addSyncAction(needsSynced, syncActions, ri, doAction, checkOnly);
} else {
log.warn(UKNOWN_OUT_OF_SYNC_WARNING, ki.getUuid(), ri.getExternalId(), ki.getState(), ri.getState());
}
}
}
void addSyncAction(Map<String, ReportedInstance> needsSynced, Map<String, String> syncActions, ReportedInstance ri, String action, boolean checkOnly) {
if (checkOnly) {
throw new ContainersOutOfSync();
}
needsSynced.put(ri.getExternalId(), ri);
syncActions.put(ri.getExternalId(), action);
}
void scheduleContainerEvent(Long agentId, Long hostId, ReportedInstance ri, String event) {
if (StringUtils.isEmpty(ri.getImage()) || StringUtils.isEmpty(ri.getExternalId()) || StringUtils.isEmpty(ri.getUuid())) {
log.error("Not enough information to schedule container event: [" + ri.toString() + "].");
return;
}
ContainerEvent ce = objectManager.newRecord(ContainerEvent.class);
ce.setAccountId(agentId);
ce.setExternalFrom(ri.getImage());
ce.setExternalId(ri.getExternalId());
ce.setExternalStatus(event);
ce.setExternalTimestamp(ri.getCreated());
ce.setKind(CONTAINER_EVENT_KIND);
ce.setHostId(hostId);
Map<String, Object> data = new HashMap<String, Object>();
data.put(CONTAINER_EVENT_SYNC_NAME, ri.getUuid());
data.put(CONTAINER_EVENT_SYNC_LABELS, ri.getLabels());
ce = objectManager.create(ce);
processManager.scheduleStandardProcess(StandardProcess.CREATE, ce, data);
}
protected void forceStop(final String containerId, Long agentId) {
final Event event = new InstanceForceStop(containerId);
final RemoteAgent agent = agentLocator.lookupAgent(agentId);
agent.publish(event);
}
protected void restart(final String uuid) {
Instance instance = objectManager.findOne(Instance.class, ObjectMetaDataManager.UUID_FIELD, uuid);
Map<String, Object> data = new HashMap<String, Object>();
DataAccessor.fromMap(data).withScope(InstanceProcessOptions.class).withKey(HA_RESTART).set(true);
processManager.scheduleProcessInstance(PROCESS_RESTART, instance, data, new Predicate() {
@Override
public boolean evaluate(ProcessState state, ProcessInstance processInstance, ProcessDefinition definition) {
Instance instance = objectManager.findOne(Instance.class, ObjectMetaDataManager.UUID_FIELD, uuid);
return STATE_RUNNING.equals(instance.getState());
}
});
}
protected ReportedInstances getInstances(Ping ping) {
PingData data = ping.getData();
if (data == null || ping.getResourceId() == null) {
return null;
}
List<Map<String, Object>> resources = data.getResources();
if (resources == null || !ping.getOption(Ping.INSTANCES)) {
return null;
}
ReportedInstances reportedInstances = new ReportedInstances();
for (Map<String, Object> resource : resources) {
Object type = DataAccessor.fromMap(resource).withKey(ObjectMetaDataManager.TYPE_FIELD).as(String.class);
if (FIELD_HOST_UUID.equals(type))
reportedInstances.hostUuid = DataAccessor.fromMap(resource).withKey(ObjectMetaDataManager.UUID_FIELD).as(String.class);
if (!InstanceConstants.TYPE.equals(type))
continue;
ReportedInstance ri = new ReportedInstance(resource);
if (!StringUtils.equals("rancher-agent", ri.getSystemContainer())) {
reportedInstances.byUuid.put(ri.getUuid(), ri);
reportedInstances.byExternalId.put(ri.getExternalId(), ri);
}
}
return reportedInstances;
}
protected class ContainersOutOfSync extends RuntimeException {
private static final long serialVersionUID = 1L;
}
protected AgentAndHost loadAgentAndHostData(ImmutablePair<Long, String> agentIdAndHostUuid) {
Long agentId = agentIdAndHostUuid.left;
String hostUuid = agentIdAndHostUuid.right;
Agent agent = objectManager.loadResource(Agent.class, agentId);
Host host = null;
Map<String, Host> hosts = null;
if (agent != null) {
hosts = agentDao.getHosts(agent.getId());
host = hosts.get(hostUuid);
}
if (agent == null || host == null)
throw new CantFindAgentAndHostException();
return new AgentAndHost(agent.getAccountId(), host.getId());
}
protected Map<String, KnownInstance> load(Long agentId) {
if (agentId == null) {
return new HashMap<String, KnownInstance>();
}
return monitorDao.getInstances(agentId.longValue());
}
private class AgentAndHost {
Long agentAccountId;
Long hostId;
AgentAndHost(Long agentAccountId, Long hostId) {
this.agentAccountId = agentAccountId;
this.hostId = hostId;
}
}
private class CantFindAgentAndHostException extends IllegalArgumentException {
private static final long serialVersionUID = 1L;
}
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.prestosql.server.security.jwt;
import com.google.common.io.Resources;
import io.airlift.security.pem.PemReader;
import io.jsonwebtoken.Claims;
import io.jsonwebtoken.Jws;
import io.jsonwebtoken.JwsHeader;
import io.jsonwebtoken.Jwts;
import io.jsonwebtoken.SignatureAlgorithm;
import io.jsonwebtoken.SigningKeyResolver;
import io.prestosql.server.security.jwt.JwkDecoder.JwkEcPublicKey;
import io.prestosql.server.security.jwt.JwkDecoder.JwkRsaPublicKey;
import org.testng.annotations.Test;
import java.io.File;
import java.security.Key;
import java.security.PrivateKey;
import java.security.PublicKey;
import java.security.interfaces.ECPublicKey;
import java.security.interfaces.RSAPublicKey;
import java.security.spec.ECParameterSpec;
import java.time.ZonedDateTime;
import java.util.Date;
import java.util.Map;
import java.util.Optional;
import static io.prestosql.server.security.jwt.JwkDecoder.decodeKeys;
import static java.nio.charset.StandardCharsets.UTF_8;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertNotNull;
import static org.testng.Assert.assertSame;
import static org.testng.Assert.assertTrue;
public class TestJwkDecoder
{
@Test
public void testReadRsaKeys()
{
Map<String, PublicKey> keys = decodeKeys("" +
"{\n" +
" \"keys\": [\n" +
" {\n" +
" \"e\": \"AQAB\",\n" +
" \"n\": \"mvj-0waJ2owQlFWrlC06goLs9PcNehIzCF0QrkdsYZJXOsipcHCFlXBsgQIdTdLvlCzNI07jSYA-zggycYi96lfDX-FYv_CqC8dRLf9TBOPvUgCyFMCFNUTC69hsrEYMR_J79Wj0MIOffiVr6eX-AaCG3KhBMZMh15KCdn3uVrl9coQivy7bk2Uw-aUJ_b26C0gWYj1DnpO4UEEKBk1X-lpeUMh0B_XorqWeq0NYK2pN6CoEIh0UrzYKlGfdnMU1pJJCsNxMiha-Vw3qqxez6oytOV_AswlWvQc7TkSX6cHfqepNskQb7pGxpgQpy9sA34oIxB_S-O7VS7_h0Qh4vQ\",\n" +
" \"alg\": \"RS256\",\n" +
" \"use\": \"sig\",\n" +
" \"kty\": \"RSA\",\n" +
" \"kid\": \"example-rsa\"\n" +
" },\n" +
" {\n" +
" \"kty\": \"EC\",\n" +
" \"use\": \"sig\",\n" +
" \"crv\": \"P-256\",\n" +
" \"kid\": \"example-ec\",\n" +
" \"x\": \"W9pnAHwUz81LldKjL3BzxO1iHe1Pc0fO6rHkrybVy6Y\",\n" +
" \"y\": \"XKSNmn_xajgOvWuAiJnWx5I46IwPVJJYPaEpsX3NPZg\",\n" +
" \"alg\": \"ES256\"\n" +
" }\n" +
" ]\n" +
"}");
assertEquals(keys.size(), 2);
assertTrue(keys.get("example-rsa") instanceof JwkRsaPublicKey);
assertTrue(keys.get("example-ec") instanceof JwkEcPublicKey);
}
@Test
public void testNoKeyId()
{
Map<String, PublicKey> keys = decodeKeys("" +
"{\n" +
" \"keys\": [\n" +
" {\n" +
" \"e\": \"AQAB\",\n" +
" \"n\": \"mvj-0waJ2owQlFWrlC06goLs9PcNehIzCF0QrkdsYZJXOsipcHCFlXBsgQIdTdLvlCzNI07jSYA-zggycYi96lfDX-FYv_CqC8dRLf9TBOPvUgCyFMCFNUTC69hsrEYMR_J79Wj0MIOffiVr6eX-AaCG3KhBMZMh15KCdn3uVrl9coQivy7bk2Uw-aUJ_b26C0gWYj1DnpO4UEEKBk1X-lpeUMh0B_XorqWeq0NYK2pN6CoEIh0UrzYKlGfdnMU1pJJCsNxMiha-Vw3qqxez6oytOV_AswlWvQc7TkSX6cHfqepNskQb7pGxpgQpy9sA34oIxB_S-O7VS7_h0Qh4vQ\",\n" +
" \"alg\": \"RS256\",\n" +
" \"use\": \"sig\",\n" +
" \"kty\": \"RSA\"\n" +
" },\n" +
" {\n" +
" \"kty\": \"EC\",\n" +
" \"use\": \"sig\",\n" +
" \"crv\": \"P-256\",\n" +
" \"x\": \"W9pnAHwUz81LldKjL3BzxO1iHe1Pc0fO6rHkrybVy6Y\",\n" +
" \"y\": \"XKSNmn_xajgOvWuAiJnWx5I46IwPVJJYPaEpsX3NPZg\",\n" +
" \"alg\": \"ES256\"\n" +
" }\n" +
" ]\n" +
"}");
assertEquals(keys.size(), 0);
}
@Test
public void testRsaNoModulus()
{
Map<String, PublicKey> keys = decodeKeys("" +
"{\n" +
" \"keys\": [\n" +
" {\n" +
" \"e\": \"AQAB\",\n" +
" \"alg\": \"RS256\",\n" +
" \"use\": \"sig\",\n" +
" \"kty\": \"RSA\",\n" +
" \"kid\": \"2c6fa6f5950a7ce465fcf247aa0b094828ac952c\"\n" +
" }\n" +
" ]\n" +
"}");
assertEquals(keys.size(), 0);
}
@Test
public void testRsaNoExponent()
{
Map<String, PublicKey> keys = decodeKeys("" +
"{\n" +
" \"keys\": [\n" +
" {\n" +
" \"n\": \"mvj-0waJ2owQlFWrlC06goLs9PcNehIzCF0QrkdsYZJXOsipcHCFlXBsgQIdTdLvlCzNI07jSYA-zggycYi96lfDX-FYv_CqC8dRLf9TBOPvUgCyFMCFNUTC69hsrEYMR_J79Wj0MIOffiVr6eX-AaCG3KhBMZMh15KCdn3uVrl9coQivy7bk2Uw-aUJ_b26C0gWYj1DnpO4UEEKBk1X-lpeUMh0B_XorqWeq0NYK2pN6CoEIh0UrzYKlGfdnMU1pJJCsNxMiha-Vw3qqxez6oytOV_AswlWvQc7TkSX6cHfqepNskQb7pGxpgQpy9sA34oIxB_S-O7VS7_h0Qh4vQ\",\n" +
" \"alg\": \"RS256\",\n" +
" \"use\": \"sig\",\n" +
" \"kty\": \"RSA\",\n" +
" \"kid\": \"2c6fa6f5950a7ce465fcf247aa0b094828ac952c\"\n" +
" }\n" +
" ]\n" +
"}");
assertEquals(keys.size(), 0);
}
@Test
public void testRsaInvalidModulus()
{
Map<String, PublicKey> keys = decodeKeys("" +
"{\n" +
" \"keys\": [\n" +
" {\n" +
" \"e\": \"AQAB\",\n" +
" \"n\": \"!!INVALID!!\",\n" +
" \"alg\": \"RS256\",\n" +
" \"use\": \"sig\",\n" +
" \"kty\": \"RSA\",\n" +
" \"kid\": \"2c6fa6f5950a7ce465fcf247aa0b094828ac952c\"\n" +
" }\n" +
" ]\n" +
"}");
assertEquals(keys.size(), 0);
}
@Test
public void testRsaInvalidExponent()
{
Map<String, PublicKey> keys = decodeKeys("" +
"{\n" +
" \"keys\": [\n" +
" {\n" +
" \"e\": \"!!INVALID!!\",\n" +
" \"n\": \"mvj-0waJ2owQlFWrlC06goLs9PcNehIzCF0QrkdsYZJXOsipcHCFlXBsgQIdTdLvlCzNI07jSYA-zggycYi96lfDX-FYv_CqC8dRLf9TBOPvUgCyFMCFNUTC69hsrEYMR_J79Wj0MIOffiVr6eX-AaCG3KhBMZMh15KCdn3uVrl9coQivy7bk2Uw-aUJ_b26C0gWYj1DnpO4UEEKBk1X-lpeUMh0B_XorqWeq0NYK2pN6CoEIh0UrzYKlGfdnMU1pJJCsNxMiha-Vw3qqxez6oytOV_AswlWvQc7TkSX6cHfqepNskQb7pGxpgQpy9sA34oIxB_S-O7VS7_h0Qh4vQ\",\n" +
" \"alg\": \"RS256\",\n" +
" \"use\": \"sig\",\n" +
" \"kty\": \"RSA\",\n" +
" \"kid\": \"2c6fa6f5950a7ce465fcf247aa0b094828ac952c\"\n" +
" }\n" +
" ]\n" +
"}");
assertEquals(keys.size(), 0);
}
@Test
public void testJwtRsa()
throws Exception
{
String jwkKeys = Resources.toString(Resources.getResource("jwk/jwk-public.json"), UTF_8);
Map<String, PublicKey> keys = decodeKeys(jwkKeys);
RSAPublicKey publicKey = (RSAPublicKey) keys.get("test-rsa");
assertNotNull(publicKey);
RSAPublicKey expectedPublicKey = (RSAPublicKey) PemReader.loadPublicKey(new File(Resources.getResource("jwk/jwk-rsa-public.pem").getPath()));
assertEquals(publicKey.getPublicExponent(), expectedPublicKey.getPublicExponent());
assertEquals(publicKey.getModulus(), expectedPublicKey.getModulus());
PrivateKey privateKey = PemReader.loadPrivateKey(new File(Resources.getResource("jwk/jwk-rsa-private.pem").getPath()), Optional.empty());
String jwt = Jwts.builder()
.signWith(SignatureAlgorithm.RS256, privateKey)
.setHeaderParam(JwsHeader.KEY_ID, "test-rsa")
.setSubject("test-user")
.setExpiration(Date.from(ZonedDateTime.now().plusMinutes(5).toInstant()))
.compact();
Jws<Claims> claimsJws = Jwts.parser()
.setSigningKeyResolver(new SigningKeyResolver() {
@Override
public Key resolveSigningKey(JwsHeader header, Claims claims)
{
return getKey(header);
}
@Override
public Key resolveSigningKey(JwsHeader header, String plaintext)
{
return getKey(header);
}
private Key getKey(JwsHeader<?> header)
{
String keyId = header.getKeyId();
assertEquals(keyId, "test-rsa");
return publicKey;
}
})
.parseClaimsJws(jwt);
assertEquals(claimsJws.getBody().getSubject(), "test-user");
}
@Test
public void testEcKey()
{
Map<String, PublicKey> keys = decodeKeys("" +
"{\n" +
" \"keys\": [\n" +
" {\n" +
" \"kid\": \"test-ec\",\n" +
" \"kty\": \"EC\",\n" +
" \"crv\": \"P-256\",\n" +
" \"x\": \"W9pnAHwUz81LldKjL3BzxO1iHe1Pc0fO6rHkrybVy6Y\",\n" +
" \"y\": \"XKSNmn_xajgOvWuAiJnWx5I46IwPVJJYPaEpsX3NPZg\"\n" +
" }\n" +
" ]\n" +
"}");
assertEquals(keys.size(), 1);
assertTrue(keys.get("test-ec") instanceof JwkEcPublicKey);
}
@Test
public void testEcInvalidCurve()
{
Map<String, PublicKey> keys = decodeKeys("" +
"{\n" +
" \"keys\": [\n" +
" {\n" +
" \"kid\": \"test-ec\",\n" +
" \"kty\": \"EC\",\n" +
" \"crv\": \"taco\",\n" +
" \"x\": \"W9pnAHwUz81LldKjL3BzxO1iHe1Pc0fO6rHkrybVy6Y\",\n" +
" \"y\": \"XKSNmn_xajgOvWuAiJnWx5I46IwPVJJYPaEpsX3NPZg\"\n" +
" }\n" +
" ]\n" +
"}");
assertEquals(keys.size(), 0);
}
@Test
public void testEcInvalidX()
{
Map<String, PublicKey> keys = decodeKeys("" +
"{\n" +
" \"keys\": [\n" +
" {\n" +
" \"kid\": \"test-ec\",\n" +
" \"kty\": \"EC\",\n" +
" \"crv\": \"P-256\",\n" +
" \"x\": \"!!INVALID!!\",\n" +
" \"y\": \"XKSNmn_xajgOvWuAiJnWx5I46IwPVJJYPaEpsX3NPZg\"\n" +
" }\n" +
" ]\n" +
"}");
assertEquals(keys.size(), 0);
}
@Test
public void testEcInvalidY()
{
Map<String, PublicKey> keys = decodeKeys("" +
"{\n" +
" \"keys\": [\n" +
" {\n" +
" \"kid\": \"test-ec\",\n" +
" \"kty\": \"EC\",\n" +
" \"crv\": \"P-256\",\n" +
" \"x\": \"W9pnAHwUz81LldKjL3BzxO1iHe1Pc0fO6rHkrybVy6Y\",\n" +
" \"y\": \"!!INVALID!!\"\n" +
" }\n" +
" ]\n" +
"}");
assertEquals(keys.size(), 0);
}
@Test
public void testJwtEc()
throws Exception
{
assertJwtEc("jwk-ec-p256", SignatureAlgorithm.ES256, EcCurve.P_256);
assertJwtEc("jwk-ec-p384", SignatureAlgorithm.ES384, EcCurve.P_384);
assertJwtEc("jwk-ec-p512", SignatureAlgorithm.ES512, EcCurve.P_521);
assertJwtEc("jwk-ec-secp256k1", SignatureAlgorithm.ES256, EcCurve.SECP256K1);
}
private static void assertJwtEc(String keyName, SignatureAlgorithm signatureAlgorithm, ECParameterSpec expectedSpec)
throws Exception
{
String jwkKeys = Resources.toString(Resources.getResource("jwk/jwk-public.json"), UTF_8);
Map<String, PublicKey> keys = decodeKeys(jwkKeys);
ECPublicKey publicKey = (ECPublicKey) keys.get(keyName);
assertNotNull(publicKey);
assertSame(publicKey.getParams(), expectedSpec);
ECPublicKey expectedPublicKey = (ECPublicKey) PemReader.loadPublicKey(new File(Resources.getResource("jwk/" + keyName + "-public.pem").getPath()));
assertEquals(publicKey.getW(), expectedPublicKey.getW());
assertEquals(publicKey.getParams().getCurve(), expectedPublicKey.getParams().getCurve());
assertEquals(publicKey.getParams().getGenerator(), expectedPublicKey.getParams().getGenerator());
assertEquals(publicKey.getParams().getOrder(), expectedPublicKey.getParams().getOrder());
assertEquals(publicKey.getParams().getCofactor(), expectedPublicKey.getParams().getCofactor());
PrivateKey privateKey = PemReader.loadPrivateKey(new File(Resources.getResource("jwk/" + keyName + "-private.pem").getPath()), Optional.empty());
String jwt = Jwts.builder()
.signWith(signatureAlgorithm, privateKey)
.setHeaderParam(JwsHeader.KEY_ID, keyName)
.setSubject("test-user")
.setExpiration(Date.from(ZonedDateTime.now().plusMinutes(5).toInstant()))
.compact();
Jws<Claims> claimsJws = Jwts.parser()
.setSigningKeyResolver(new SigningKeyResolver() {
@Override
public Key resolveSigningKey(JwsHeader header, Claims claims)
{
return getKey(header);
}
@Override
public Key resolveSigningKey(JwsHeader header, String plaintext)
{
return getKey(header);
}
private Key getKey(JwsHeader<?> header)
{
String keyId = header.getKeyId();
assertEquals(keyId, keyName);
return publicKey;
}
})
.parseClaimsJws(jwt);
assertEquals(claimsJws.getBody().getSubject(), "test-user");
}
}
| |
package com.sequenceiq.cloudbreak.orchestrator.salt.poller;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.allOf;
import static org.hamcrest.Matchers.both;
import static org.hamcrest.core.IsNot.not;
import static org.hamcrest.core.StringContains.containsString;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.anyString;
import static org.mockito.ArgumentMatchers.eq;
import static org.mockito.Mockito.doCallRealMethod;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.ArgumentCaptor;
import org.mockito.Captor;
import org.mockito.Mockito;
import org.mockito.MockitoAnnotations;
import org.powermock.api.mockito.PowerMockito;
import org.powermock.core.classloader.annotations.PrepareForTest;
import org.powermock.modules.junit4.PowerMockRunner;
import com.google.common.collect.ArrayListMultimap;
import com.google.common.collect.Multimap;
import com.google.common.collect.Sets;
import com.sequenceiq.cloudbreak.orchestrator.exception.CloudbreakOrchestratorFailedException;
import com.sequenceiq.cloudbreak.orchestrator.exception.CloudbreakOrchestratorInProgressException;
import com.sequenceiq.cloudbreak.orchestrator.exception.CloudbreakOrchestratorTerminateException;
import com.sequenceiq.cloudbreak.orchestrator.salt.SaltErrorResolver;
import com.sequenceiq.cloudbreak.orchestrator.salt.client.SaltConnector;
import com.sequenceiq.cloudbreak.orchestrator.salt.client.target.Target;
import com.sequenceiq.cloudbreak.orchestrator.salt.domain.JobId;
import com.sequenceiq.cloudbreak.orchestrator.salt.domain.JobState;
import com.sequenceiq.cloudbreak.orchestrator.salt.domain.RunningJobsResponse;
import com.sequenceiq.cloudbreak.orchestrator.salt.states.SaltStates;
import edu.umd.cs.findbugs.annotations.SuppressFBWarnings;
@RunWith(PowerMockRunner.class)
@PrepareForTest(SaltStates.class)
public class SaltJobIdTrackerTest {
@Captor
private ArgumentCaptor<Target<String>> targetCaptor;
@Before
public void setUp() {
MockitoAnnotations.initMocks(this);
}
@SuppressFBWarnings("RV_RETURN_VALUE_IGNORED_NO_SIDE_EFFECT")
@Test
public void callWithNotStarted() throws Exception {
String jobId = "1";
SaltConnector saltConnector = Mockito.mock(SaltConnector.class);
SaltJobRunner saltJobRunner = Mockito.mock(SaltJobRunner.class);
PowerMockito.mockStatic(SaltStates.class);
PowerMockito.when(SaltStates.jobIsRunning(any(), any())).thenReturn(true);
RunningJobsResponse jobsResponse = new RunningJobsResponse();
jobsResponse.setResult(List.of());
PowerMockito.when(SaltStates.getRunningJobs(saltConnector)).thenReturn(jobsResponse);
Set<String> targets = new HashSet<>();
targets.add("10.0.0.1");
targets.add("10.0.0.2");
targets.add("10.0.0.3");
when(saltJobRunner.getTargetHostnames()).thenReturn(targets);
when(saltJobRunner.getJid()).thenReturn(JobId.jobId(jobId));
when(saltJobRunner.getJobState()).thenReturn(JobState.NOT_STARTED, JobState.IN_PROGRESS);
when(saltJobRunner.submit(any(SaltConnector.class))).thenReturn(jobId);
SaltJobIdTracker saltJobIdTracker = new SaltJobIdTracker(saltConnector, saltJobRunner);
try {
saltJobIdTracker.call();
fail("should throw exception");
} catch (CloudbreakOrchestratorInProgressException e) {
assertThat(e.getMessage(), allOf(containsString("Target:"), containsString("10.0.0.1"),
containsString("10.0.0.2"), containsString("10.0.0.3")));
}
PowerMockito.verifyStatic(SaltStates.class);
SaltStates.jobIsRunning(any(), eq(jobId));
checkTargets(targets, targetCaptor.getAllValues());
verify(saltJobRunner, times(2)).getJobState();
}
@SuppressFBWarnings("RV_RETURN_VALUE_IGNORED_NO_SIDE_EFFECT")
@Test
public void callWithNotStartedWithAlreadyRunning() throws Exception {
String jobId = "1";
SaltConnector saltConnector = Mockito.mock(SaltConnector.class);
SaltJobRunner saltJobRunner = Mockito.mock(SaltJobRunner.class);
PowerMockito.mockStatic(SaltStates.class);
PowerMockito.when(SaltStates.jobIsRunning(any(), any())).thenReturn(true);
RunningJobsResponse jobsResponse = new RunningJobsResponse();
jobsResponse.setResult(List.of(Map.of("runningJob", Map.of())));
PowerMockito.when(SaltStates.getRunningJobs(saltConnector)).thenReturn(jobsResponse);
Set<String> targets = new HashSet<>();
targets.add("10.0.0.1");
targets.add("10.0.0.2");
targets.add("10.0.0.3");
when(saltJobRunner.getTargetHostnames()).thenReturn(targets);
when(saltJobRunner.getJid()).thenReturn(JobId.jobId(jobId));
when(saltJobRunner.getJobState()).thenReturn(JobState.NOT_STARTED);
when(saltJobRunner.submit(any(SaltConnector.class))).thenReturn(jobId);
SaltJobIdTracker saltJobIdTracker = new SaltJobIdTracker(saltConnector, saltJobRunner);
try {
saltJobIdTracker.call();
fail("should throw exception");
} catch (CloudbreakOrchestratorInProgressException e) {
assertThat(e.getMessage(), allOf(containsString("There are running job(s) with id:"), containsString("runningJob")));
}
verify(saltJobRunner, times(1)).getJobState();
}
@Test
public void callWithFailed() throws Exception {
String jobId = "1";
SaltConnector saltConnector = Mockito.mock(SaltConnector.class);
PowerMockito.mockStatic(SaltStates.class);
PowerMockito.when(SaltStates.jobIsRunning(any(), any())).thenReturn(true);
SaltJobRunner saltJobRunner = Mockito.mock(BaseSaltJobRunner.class);
when(saltJobRunner.getJid()).thenReturn(JobId.jobId(jobId));
when(saltJobRunner.getJobState()).thenCallRealMethod();
doCallRealMethod().when(saltJobRunner).setJobState(any());
Set<String> targets = new HashSet<>();
targets.add("10.0.0.1");
targets.add("10.0.0.3");
when(saltJobRunner.getTargetHostnames()).thenReturn(targets);
when(saltJobRunner.submit(any(SaltConnector.class))).thenReturn(jobId);
saltJobRunner.setJobState(JobState.FAILED);
Multimap<String, String> multimap = ArrayListMultimap.create();
multimap.put("10.0.0.1", "some error");
when(saltJobRunner.getNodesWithError()).thenReturn(multimap);
SaltJobIdTracker saltJobIdTracker = new SaltJobIdTracker(saltConnector, saltJobRunner, false);
try {
saltJobIdTracker.call();
fail("should throw exception");
} catch (CloudbreakOrchestratorTerminateException e) {
assertThat(e.getMessage(), allOf(containsString("Target:"), containsString("10.0.0.1"), containsString("10.0.0.3"),
containsString("Node: 10.0.0.1 Error(s): some error")));
}
}
private void checkTargets(Set<String> targets, List<Target<String>> allValues) {
for (Target<String> allValue : allValues) {
for (String target : targets) {
assertThat(allValue.getTarget(), containsString(target));
}
}
}
@Test
public void callWithInProgressAndJobIsRunning() throws Exception {
String jobId = "1";
SaltConnector saltConnector = Mockito.mock(SaltConnector.class);
SaltJobRunner saltJobRunner = Mockito.mock(BaseSaltJobRunner.class);
when(saltJobRunner.getJid()).thenReturn(JobId.jobId(jobId));
when(saltJobRunner.getJobState()).thenCallRealMethod();
doCallRealMethod().when(saltJobRunner).setJobState(any());
when(saltJobRunner.submit(any(SaltConnector.class))).thenReturn(jobId);
saltJobRunner.setJobState(JobState.IN_PROGRESS);
Set<String> targets = new HashSet<>();
targets.add("10.0.0.1");
targets.add("10.0.0.2");
targets.add("10.0.0.3");
when(saltJobRunner.getTargetHostnames()).thenReturn(targets);
PowerMockito.mockStatic(SaltStates.class);
PowerMockito.when(SaltStates.jobIsRunning(any(), any())).thenReturn(true);
SaltJobIdTracker saltJobIdTracker = new SaltJobIdTracker(saltConnector, saltJobRunner);
try {
saltJobIdTracker.call();
} catch (CloudbreakOrchestratorInProgressException e) {
assertThat(e.getMessage(), allOf(containsString("Target:"), containsString("10.0.0.1"), containsString("10.0.0.2"),
containsString("10.0.0.3")));
}
PowerMockito.verifyStatic(SaltStates.class);
SaltStates.jobIsRunning(any(), eq(jobId));
checkTargets(targets, targetCaptor.getAllValues());
}
@Test
public void callWithInProgressAndJobIsFinished() throws Exception {
String jobId = "1";
SaltConnector saltConnector = Mockito.mock(SaltConnector.class);
SaltJobRunner saltJobRunner = Mockito.mock(BaseSaltJobRunner.class);
when(saltJobRunner.getJid()).thenReturn(JobId.jobId(jobId));
when(saltJobRunner.getJobState()).thenCallRealMethod();
doCallRealMethod().when(saltJobRunner).setJobState(any());
SaltErrorResolver saltErrorResolver = Mockito.mock(SaltErrorResolver.class);
when(saltConnector.getSaltErrorResolver()).thenReturn(saltErrorResolver);
when(saltJobRunner.submit(any(SaltConnector.class))).thenReturn(jobId);
saltJobRunner.setJobState(JobState.IN_PROGRESS);
Set<String> targets = new HashSet<>();
targets.add("10.0.0.1");
targets.add("10.0.0.2");
targets.add("10.0.0.3");
when(saltJobRunner.getTargetHostnames()).thenReturn(targets);
PowerMockito.mockStatic(SaltStates.class);
PowerMockito.when(SaltStates.jobIsRunning(any(), any())).thenReturn(false);
Multimap<String, Map<String, String>> missingNodesWithReason = ArrayListMultimap.create();
Multimap<String, String> missingNodesWithResolvedReason = ArrayListMultimap.create();
PowerMockito.when(SaltStates.jidInfo(any(), any(), any())).thenReturn(missingNodesWithReason);
when(saltErrorResolver.resolveErrorMessages(missingNodesWithReason)).thenReturn(missingNodesWithResolvedReason);
SaltJobIdTracker underTest = new SaltJobIdTracker(saltConnector, saltJobRunner);
assertTrue(underTest.call());
assertEquals(JobState.FINISHED, saltJobRunner.getJobState());
PowerMockito.verifyStatic(SaltStates.class);
SaltStates.jobIsRunning(any(), eq(jobId));
checkTargets(targets, targetCaptor.getAllValues());
}
@Test
public void callWithInProgressAndMissingNodes() throws Exception {
String jobId = "1";
try (SaltConnector saltConnector = Mockito.mock(SaltConnector.class)) {
SaltJobRunner saltJobRunner = Mockito.mock(BaseSaltJobRunner.class);
when(saltJobRunner.getJid()).thenReturn(JobId.jobId(jobId));
when(saltJobRunner.getJobState()).thenCallRealMethod();
doCallRealMethod().when(saltJobRunner).setJobState(any());
when(saltJobRunner.getNodesWithError()).thenCallRealMethod();
doCallRealMethod().when(saltJobRunner).setNodesWithError(any());
when(saltJobRunner.submit(any(SaltConnector.class))).thenReturn(jobId);
SaltErrorResolver saltErrorResolver = Mockito.mock(SaltErrorResolver.class);
when(saltConnector.getSaltErrorResolver()).thenReturn(saltErrorResolver);
saltJobRunner.setJobState(JobState.IN_PROGRESS);
Set<String> targets = new HashSet<>();
targets.add("10.0.0.1");
targets.add("10.0.0.2");
targets.add("10.0.0.3");
when(saltJobRunner.getTargetHostnames()).thenReturn(targets);
Multimap<String, Map<String, String>> missingNodesWithReason = ArrayListMultimap.create();
Multimap<String, String> missingNodesWithResolvedReason = ArrayListMultimap.create();
String missingMachine = "10.0.0.1";
missingNodesWithReason.put(missingMachine, Collections.singletonMap("Name", "some-script.sh"));
missingNodesWithResolvedReason.put(missingMachine, "Failed to execute: {Name=some-script.sh}");
when(saltErrorResolver.resolveErrorMessages(missingNodesWithReason)).thenReturn(missingNodesWithResolvedReason);
PowerMockito.mockStatic(SaltStates.class);
PowerMockito.when(SaltStates.jobIsRunning(any(), any())).thenReturn(false);
PowerMockito.when(SaltStates.jidInfo(any(SaltConnector.class), anyString(), any())).thenReturn(missingNodesWithReason);
try {
new SaltJobIdTracker(saltConnector, saltJobRunner).call();
fail("should throw exception");
} catch (CloudbreakOrchestratorFailedException e) {
assertThat(e.getMessage(), allOf(containsString("Node: 10.0.0.1 Error(s): Failed to execute: {Name=some-script.sh}"),
containsString("Target:"), containsString("10.0.0.1"), containsString("10.0.0.2"),
containsString("10.0.0.3")));
}
PowerMockito.verifyStatic(SaltStates.class);
SaltStates.jobIsRunning(any(), eq(jobId));
checkTargets(targets, targetCaptor.getAllValues());
}
}
@Test
public void callWithMissingNodesUsingStderrFailures() throws Exception {
String jobId = "1";
try (SaltConnector saltConnector = Mockito.mock(SaltConnector.class)) {
SaltJobRunner saltJobRunner = Mockito.mock(BaseSaltJobRunner.class);
when(saltJobRunner.getJid()).thenReturn(JobId.jobId(jobId));
when(saltJobRunner.getJobState()).thenCallRealMethod();
doCallRealMethod().when(saltJobRunner).setJobState(any());
when(saltJobRunner.getNodesWithError()).thenCallRealMethod();
doCallRealMethod().when(saltJobRunner).setNodesWithError(any());
when(saltJobRunner.submit(any(SaltConnector.class))).thenReturn(jobId);
SaltErrorResolver saltErrorResolver = Mockito.mock(SaltErrorResolver.class);
when(saltConnector.getSaltErrorResolver()).thenReturn(saltErrorResolver);
saltJobRunner.setJobState(JobState.IN_PROGRESS);
Set<String> targets = new HashSet<>();
targets.add("10.0.0.1");
targets.add("10.0.0.2");
targets.add("10.0.0.3");
when(saltJobRunner.getTargetHostnames()).thenReturn(targets);
Multimap<String, Map<String, String>> missingNodesWithReason = ArrayListMultimap.create();
Multimap<String, String> missingNodesWithResolvedReason = ArrayListMultimap.create();
String missingMachine = "10.0.0.1";
missingNodesWithReason.put(missingMachine, Map.of("Name", "/opt/salt/scripts/backup_db.sh", "Stderr", "Could not create backup"));
missingNodesWithResolvedReason.put(missingMachine, "Could not create backup");
when(saltErrorResolver.resolveErrorMessages(missingNodesWithReason)).thenReturn(missingNodesWithResolvedReason);
PowerMockito.mockStatic(SaltStates.class);
PowerMockito.when(SaltStates.jobIsRunning(any(), any())).thenReturn(false);
PowerMockito.when(SaltStates.jidInfo(any(SaltConnector.class), anyString(), any())).thenReturn(missingNodesWithReason);
try {
new SaltJobIdTracker(saltConnector, saltJobRunner).call();
fail("should throw exception");
} catch (CloudbreakOrchestratorFailedException e) {
assertThat(e.getMessage(), allOf(containsString("Node: 10.0.0.1 Error(s): Could not create backup"),
containsString("Target:"), containsString("10.0.0.1"), containsString("10.0.0.2"),
containsString("10.0.0.3")));
}
PowerMockito.verifyStatic(SaltStates.class);
SaltStates.jobIsRunning(any(), eq(jobId));
checkTargets(targets, targetCaptor.getAllValues());
}
}
@Test
public void callWithInProgressAndMissingNodesAndNoRetryOnFail() throws Exception {
String jobId = "1";
try (SaltConnector saltConnector = Mockito.mock(SaltConnector.class)) {
SaltJobRunner saltJobRunner = Mockito.mock(BaseSaltJobRunner.class);
when(saltJobRunner.getJid()).thenReturn(JobId.jobId(jobId));
when(saltJobRunner.getJobState()).thenCallRealMethod();
doCallRealMethod().when(saltJobRunner).setJobState(any());
Multimap<String, String> missingNodesWithReason = ArrayListMultimap.create();
String missingMachine = "10.0.0.1";
String errorMessage = "Name: some-script.sh";
missingNodesWithReason.put(missingMachine, errorMessage);
when(saltJobRunner.getNodesWithError()).thenReturn(missingNodesWithReason);
when(saltJobRunner.submit(any(SaltConnector.class))).thenReturn(jobId);
saltJobRunner.setJobState(JobState.FAILED);
Set<String> targets = Sets.newHashSet("10.0.0.1", "10.0.0.2", "10.0.0.3");
when(saltJobRunner.getTargetHostnames()).thenReturn(targets);
try {
new SaltJobIdTracker(saltConnector, saltJobRunner, false).call();
fail("should throw exception");
} catch (CloudbreakOrchestratorTerminateException e) {
assertThat(e.getMessage(), both(containsString(missingMachine)).and(containsString(errorMessage)));
}
checkTargets(targets, targetCaptor.getAllValues());
}
}
@Test
public void callWithNotStartedAndSlsWithError() throws Exception {
String jobId = "1";
try (SaltConnector saltConnector = Mockito.mock(SaltConnector.class)) {
SaltJobRunner saltJobRunner = Mockito.mock(BaseSaltJobRunner.class);
when(saltJobRunner.getJid()).thenReturn(JobId.jobId(jobId));
when(saltJobRunner.getJobState()).thenCallRealMethod();
doCallRealMethod().when(saltJobRunner).setJobState(any());
when(saltJobRunner.getNodesWithError()).thenCallRealMethod();
doCallRealMethod().when(saltJobRunner).setNodesWithError(any());
when(saltJobRunner.submit(any(SaltConnector.class))).thenReturn(jobId);
saltJobRunner.setJobState(JobState.NOT_STARTED);
Set<String> targets = Sets.newHashSet("10.0.0.1", "10.0.0.2", "10.0.0.3");
when(saltJobRunner.getTargetHostnames()).thenReturn(targets);
PowerMockito.mockStatic(SaltStates.class);
PowerMockito.when(SaltStates.jobIsRunning(any(), any())).thenReturn(false);
PowerMockito.when(SaltStates.jidInfo(any(SaltConnector.class), anyString(), any()))
.thenThrow(new RuntimeException("Salt execution went wrong: saltErrorDetails"));
RunningJobsResponse jobsResponse = new RunningJobsResponse();
jobsResponse.setResult(List.of());
PowerMockito.when(SaltStates.getRunningJobs(saltConnector)).thenReturn(jobsResponse);
try {
new SaltJobIdTracker(saltConnector, saltJobRunner).call();
fail("should throw exception");
} catch (CloudbreakOrchestratorFailedException e) {
assertThat(e.getMessage(), containsString("Salt execution went wrong: saltErrorDetails"));
assertThat(e.getMessage(), not(containsString("Exception")));
}
PowerMockito.verifyStatic(SaltStates.class);
SaltStates.jobIsRunning(any(), eq(jobId));
checkTargets(targets, targetCaptor.getAllValues());
}
}
}
| |
/*
* Copyright 2009-2012 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.cloudfoundry.client.lib.util;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.List;
import java.util.Map;
import java.util.UUID;
import org.cloudfoundry.client.lib.domain.CloudDomain;
import org.cloudfoundry.client.lib.domain.CloudEntity;
import org.cloudfoundry.client.lib.domain.CloudOrganization;
import org.cloudfoundry.client.lib.domain.CloudRoute;
import org.cloudfoundry.client.lib.domain.CloudService;
import org.cloudfoundry.client.lib.domain.CloudServiceOffering;
import org.cloudfoundry.client.lib.domain.CloudServicePlan;
import org.cloudfoundry.client.lib.domain.CloudSpace;
import org.cloudfoundry.client.lib.domain.CloudStack;
/**
* Class handling the mapping of the cloud domain objects
*
* @author Thomas Risberg
*/
//TODO: use some more advanced JSON mapping framework?
public class CloudEntityResourceMapper {
private static SimpleDateFormat dateFormatter = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ssZ");
public String getNameOfResource(Map<String, Object> resource) {
return getEntityAttribute(resource, "name", String.class);
}
public UUID getGuidOfResource(Map<String, Object> resource) {
return getMeta(resource).getGuid();
}
@SuppressWarnings("unchecked")
public <T> T mapResource(Map<String, Object> resource, Class<T> targetClass) {
if (targetClass == CloudSpace.class) {
return (T) mapSpaceResource(resource);
}
if (targetClass == CloudOrganization.class) {
return (T) mapOrganizationResource(resource);
}
if (targetClass == CloudDomain.class) {
return (T) mapDomainResource(resource);
}
if (targetClass == CloudRoute.class) {
return (T) mapRouteResource(resource);
}
// if (targetClass == CloudApplication.class) {
// return (T) mapApplicationResource(resource);
// }
if (targetClass == CloudService.class) {
return (T) mapServiceInstanceResource(resource);
}
if (targetClass == CloudServiceOffering.class) {
return (T) mapServiceResource(resource);
}
if (targetClass == CloudStack.class) {
return (T) mapStackResource(resource);
}
throw new IllegalArgumentException(
"Error during mapping - unsupported class for entity mapping " + targetClass.getName());
}
private CloudSpace mapSpaceResource(Map<String, Object> resource) {
Map<String, Object> organizationMap = getEmbeddedResource(resource, "organization");
CloudOrganization organization = null;
if (organizationMap != null) {
organization = mapOrganizationResource(organizationMap);
}
return new CloudSpace(getMeta(resource), getNameOfResource(resource), organization);
}
private CloudOrganization mapOrganizationResource(Map<String, Object> resource) {
Boolean billingEnabled = getEntityAttribute(resource, "billing_enabled", Boolean.class);
return new CloudOrganization(getMeta(resource), getNameOfResource(resource), billingEnabled);
}
private CloudDomain mapDomainResource(Map<String, Object> resource) {
@SuppressWarnings("unchecked")
Map<String, Object> ownerResource = getEntityAttribute(resource, "owning_organization", Map.class);
CloudOrganization owner;
if (ownerResource == null) {
owner = new CloudOrganization(CloudEntity.Meta.defaultMeta(), "none");
} else {
owner = mapOrganizationResource(ownerResource);
}
return new CloudDomain(getMeta(resource), getNameOfResource(resource), owner);
}
private CloudRoute mapRouteResource(Map<String, Object> resource) {
@SuppressWarnings("unchecked")
List<Object> apps = getEntityAttribute(resource, "apps", List.class);
String host = getEntityAttribute(resource, "host", String.class);
CloudDomain domain = mapDomainResource(getEmbeddedResource(resource, "domain"));
return new CloudRoute(getMeta(resource), host, domain, apps.size());
}
// @SuppressWarnings({ "unchecked", "rawtypes" })
// private CloudApplication mapApplicationResource(Map<String, Object> resource) {
// CloudApplication app = new CloudApplication(
// getMeta(resource),
// getNameOfResource(resource));
// app.setInstances(getEntityAttribute(resource, "instances", Integer.class));
// app.setServices(new ArrayList<String>());
// app.setState(CloudApplication.AppState.valueOf(getEntityAttribute(resource, "state", String.class)));
// //TODO: debug
// app.setDebug(null);
//
// Integer runningInstancesAttribute = getEntityAttribute(resource, "running_instances", Integer.class);
// if (runningInstancesAttribute != null) {
// app.setRunningInstances(runningInstancesAttribute);
// }
// String command = getEntityAttribute(resource, "command", String.class);
// String buildpack = getEntityAttribute(resource, "buildpack", String.class);
// Map<String, Object> stackResource = getEmbeddedResource(resource, "stack");
// CloudStack stack = mapStackResource(stackResource);
// Integer healthCheckTimeout = getEntityAttribute(resource, "health_check_timeout", Integer.class);
// Staging staging = new Staging(command, buildpack, stack.getName(), healthCheckTimeout);
// app.setStaging(staging);
//
// Map envMap = getEntityAttribute(resource, "environment_json", Map.class);
// if (envMap.size() > 0) {
// app.setEnv(envMap);
// }
// app.setMemory(getEntityAttribute(resource, "memory", Integer.class));
// app.setDiskQuota(getEntityAttribute(resource, "disk_quota", Integer.class));
// List<Map<String, Object>> serviceBindings = getEntityAttribute(resource, "service_bindings", List.class);
// List<String> serviceList = new ArrayList<String>();
// for (Map<String, Object> binding : serviceBindings) {
// Map<String, Object> service = getEntityAttribute(binding, "service_instance", Map.class);
// String serviceName = getNameOfResource(service);
// if (serviceName != null) {
// serviceList.add(serviceName);
// }
// }
// app.setServices(serviceList);
// return app;
// }
private CloudService mapServiceInstanceResource(Map<String, Object> resource) {
CloudService cloudService = new CloudService(
getMeta(resource),
getNameOfResource(resource));
Map<String, Object> servicePlanResource = getEmbeddedResource(resource, "service_plan");
if (servicePlanResource != null) {
cloudService.setPlan(getEntityAttribute(servicePlanResource, "name", String.class));
Map<String, Object> serviceResource = getEmbeddedResource(servicePlanResource, "service");
if (serviceResource != null) {
//TODO: assuming vendor corresponds to the service.provider and not service_instance.vendor_data
cloudService.setLabel(getEntityAttribute(serviceResource, "label", String.class));
cloudService.setProvider(getEntityAttribute(serviceResource, "provider", String.class));
cloudService.setVersion(getEntityAttribute(serviceResource, "version", String.class));
}
}
return cloudService;
}
private CloudServiceOffering mapServiceResource(Map<String, Object> resource) {
CloudServiceOffering cloudServiceOffering = new CloudServiceOffering(
getMeta(resource),
getEntityAttribute(resource, "label", String.class),
getEntityAttribute(resource, "provider", String.class),
getEntityAttribute(resource, "version", String.class),
getEntityAttribute(resource, "description", String.class),
getEntityAttribute(resource, "active", Boolean.class),
getEntityAttribute(resource, "bindable", Boolean.class),
getEntityAttribute(resource, "url", String.class),
getEntityAttribute(resource, "info_url", String.class),
getEntityAttribute(resource, "unique_id", String.class),
getEntityAttribute(resource, "extra", String.class),
getEntityAttribute(resource, "documentation_url", String.class));
List<Map<String, Object>> servicePlanList = getEmbeddedResourceList(getEntity(resource), "service_plans");
if (servicePlanList != null) {
for (Map<String, Object> servicePlanResource : servicePlanList) {
CloudServicePlan servicePlan =
new CloudServicePlan(
getMeta(servicePlanResource),
getEntityAttribute(servicePlanResource, "name", String.class),
getEntityAttribute(servicePlanResource, "description", String.class),
getEntityAttribute(servicePlanResource, "free", Boolean.class),
getEntityAttribute(servicePlanResource, "public", Boolean.class),
getEntityAttribute(servicePlanResource, "extra", String.class),
getEntityAttribute(servicePlanResource, "unique_id", String.class),
cloudServiceOffering);
cloudServiceOffering.addCloudServicePlan(servicePlan);
}
}
return cloudServiceOffering;
}
private CloudStack mapStackResource(Map<String, Object> resource) {
return new CloudStack(getMeta(resource),
getNameOfResource(resource),
getEntityAttribute(resource, "description", String.class));
}
@SuppressWarnings("unchecked")
public static CloudEntity.Meta getMeta(Map<String, Object> resource) {
Map<String, Object> metadata = (Map<String, Object>) resource.get("metadata");
UUID guid = UUID.fromString(String.valueOf(metadata.get("guid")));
Date createdDate = parseDate(String.valueOf(metadata.get("created_at")));
Date updatedDate = parseDate(String.valueOf(metadata.get("updated_at")));
return new CloudEntity.Meta(guid, createdDate, updatedDate);
}
private static Date parseDate(String dateString) {
if (dateString != null) {
try {
// if the time zone part of the dateString contains a colon (e.g. 2013-09-19T21:56:36+00:00)
// then remove it before parsing
String isoDateString = dateString.replaceFirst(":(?=[0-9]{2}$)", "");
return dateFormatter.parse(isoDateString);
} catch (Exception ignore) {}
}
return null;
}
@SuppressWarnings("unchecked")
public static Map<String, Object> getEntity(Map<String, Object> resource) {
return (Map<String, Object>) resource.get("entity");
}
@SuppressWarnings("unchecked")
public static <T> T getEntityAttribute(Map<String, Object> resource, String attributeName, Class<T> targetClass) {
if (resource == null) {
return null;
}
Map<String, Object> entity = (Map<String, Object>) resource.get("entity");
Object attributeValue = entity.get(attributeName);
if (attributeValue == null) {
return null;
}
if (targetClass == String.class) {
return (T) String.valueOf(attributeValue);
}
if (targetClass == Integer.class || targetClass == Boolean.class || targetClass == Map.class || targetClass == List.class) {
return (T) attributeValue;
}
if (targetClass == UUID.class && attributeValue instanceof String) {
return (T) UUID.fromString((String)attributeValue);
}
throw new IllegalArgumentException(
"Error during mapping - unsupported class for attribute mapping " + targetClass.getName());
}
@SuppressWarnings("unchecked")
public static Map<String, Object> getEmbeddedResource(Map<String, Object> resource, String embeddedResourceName) {
Map<String, Object> entity = (Map<String, Object>) resource.get("entity");
return (Map<String, Object>) entity.get(embeddedResourceName);
}
@SuppressWarnings("unchecked")
public static List<Map<String, Object>> getEmbeddedResourceList(Map<String, Object> resource, String embeddedResourceName) {
return (List<Map<String, Object>>) resource.get(embeddedResourceName);
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.msiiplab.recsys.implicit;
import java.util.Collection;
import org.apache.mahout.cf.taste.common.Refreshable;
import org.apache.mahout.cf.taste.common.TasteException;
import org.apache.mahout.cf.taste.impl.common.FastByIDMap;
import org.apache.mahout.cf.taste.impl.common.FastIDSet;
import org.apache.mahout.cf.taste.impl.common.LongPrimitiveIterator;
import org.apache.mahout.cf.taste.impl.common.RefreshHelper;
import org.apache.mahout.cf.taste.impl.similarity.AbstractItemSimilarity;
import org.apache.mahout.cf.taste.model.DataModel;
import org.apache.mahout.cf.taste.model.Preference;
import org.apache.mahout.cf.taste.model.PreferenceArray;
import org.apache.mahout.cf.taste.similarity.PreferenceInferrer;
import org.apache.mahout.cf.taste.similarity.UserSimilarity;
/**
* <p>
* An implementation of a "similarity" based on the <a href=
* "http://en.wikipedia.org/wiki/Jaccard_index#Tanimoto_coefficient_.28extended_Jaccard_coefficient.29"
* > Tanimoto coefficient</a>, or extended <a
* href="http://en.wikipedia.org/wiki/Jaccard_index">Jaccard coefficient</a>.
* </p>
*
* <p>
* This is intended for "binary" data sets where a user either expresses a
* generic "yes" preference for an item or has no preference. The actual
* preference values do not matter here, only their presence or absence.
* </p>
*
* <p>
* The value returned is in [0,1].
* </p>
*
* <p>
* Modified by Heda Wang, by adding IDF weight to items.
* </p>
*/
public final class TanimotoIDF2CoefficientSimilarity extends
AbstractItemSimilarity implements UserSimilarity {
private FastByIDMap<Integer> mUserPrefNum;
private FastByIDMap<Integer> mItemPrefNum;
private void refreshUserAndItemPrefNum() throws TasteException {
mUserPrefNum = new FastByIDMap<Integer>();
mItemPrefNum = new FastByIDMap<Integer>();
LongPrimitiveIterator it_user = getDataModel().getUserIDs();
while (it_user.hasNext()) {
long userID = it_user.nextLong();
mUserPrefNum.put(userID, getDataModel().getPreferencesFromUser(userID).length());
}
LongPrimitiveIterator it_item = getDataModel().getItemIDs();
while (it_item.hasNext()) {
long itemID = it_item.nextLong();
mItemPrefNum.put(itemID, getDataModel().getNumUsersWithPreferenceFor(itemID));
}
}
public TanimotoIDF2CoefficientSimilarity(DataModel dataModel) {
super(dataModel);
try {
refreshUserAndItemPrefNum();
} catch (TasteException e) {
e.printStackTrace();
}
}
/**
* @throws UnsupportedOperationException
*/
@Override
public void setPreferenceInferrer(PreferenceInferrer inferrer) {
throw new UnsupportedOperationException();
}
@Override
public double userSimilarity(long userID1, long userID2)
throws TasteException {
DataModel dataModel = getDataModel();
FastIDSet xPrefs = dataModel.getItemIDsFromUser(userID1);
FastIDSet yPrefs = dataModel.getItemIDsFromUser(userID2);
int xPrefsSize = xPrefs.size();
int yPrefsSize = yPrefs.size();
if (xPrefsSize == 0 && yPrefsSize == 0) {
return Double.NaN;
}
if (xPrefsSize == 0 || yPrefsSize == 0) {
return 0.0;
}
double intersection = 0.0;
double union = 0.0;
for (LongPrimitiveIterator it_item = xPrefs.iterator(); it_item.hasNext();) {
long itemID = (long) it_item.nextLong();
double weight = (double) getDataModel().getNumUsers() / mItemPrefNum.get(itemID);
if (yPrefs.contains(itemID)) {
intersection += weight;
union -= weight;
}
union += weight;
}
for (LongPrimitiveIterator it_item = yPrefs.iterator(); it_item.hasNext();) {
long itemID = (long) it_item.nextLong();
double weight = (double) getDataModel().getNumUsers() / mItemPrefNum.get(itemID);
union += weight;
}
return Math.log(intersection) / Math.log(union);
}
@Override
public double itemSimilarity(long itemID1, long itemID2)
throws TasteException {
FastIDSet preferring1 = toUserFastIDSet(getDataModel().getPreferencesForItem(itemID1));
return doItemSimilarity(itemID1, itemID2, preferring1);
}
private FastIDSet toUserFastIDSet(PreferenceArray array) {
FastIDSet fastIDSet = new FastIDSet();
for (Preference preference : array) {
fastIDSet.add(preference.getUserID());
}
return fastIDSet;
}
@Override
public double[] itemSimilarities(long itemID1, long[] itemID2s)
throws TasteException {
FastIDSet preferring1 = toUserFastIDSet(getDataModel().getPreferencesForItem(itemID1));
int length = itemID2s.length;
double[] result = new double[length];
for (int i = 0; i < length; i++) {
result[i] = doItemSimilarity(itemID1, itemID2s[i], preferring1);
}
return result;
}
private double doItemSimilarity(long itemID1, long itemID2, FastIDSet preferring1)
throws TasteException {
double intersection = 0.0;
double union = 0.0;
for (Preference pref : getDataModel().getPreferencesForItem(itemID2)) {
long userID = pref.getUserID();
double weight = (double) getDataModel().getNumItems() / mUserPrefNum.get(userID);
if (preferring1.contains(userID)) {
intersection += weight;
union -= weight;
}
union += weight;
}
for (LongPrimitiveIterator it_user = preferring1.iterator(); it_user.hasNext();) {
long userID = (long) it_user.nextLong();
double weight = (double) getDataModel().getNumItems() / mUserPrefNum.get(userID);
union += weight;
}
if (intersection == 0) {
return Double.NaN;
}
return Math.log(intersection) / Math.log(union);
}
@Override
public void refresh(Collection<Refreshable> alreadyRefreshed) {
alreadyRefreshed = RefreshHelper.buildRefreshed(alreadyRefreshed);
RefreshHelper.maybeRefresh(alreadyRefreshed, getDataModel());
try {
refreshUserAndItemPrefNum();
} catch (TasteException e) {
e.printStackTrace();
}
}
@Override
public String toString() {
return "TanimotoCoefficientSimilarity[dataModel:" + getDataModel()
+ ']';
}
}
| |
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package Export;
import com.itextpdf.text.pdf.PdfPCell;
import com.itextpdf.text.pdf.PdfPTable;
import conexao.Call;
import dao.ViagemDao;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.sql.ResultSet;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.function.Consumer;
import java.util.logging.Level;
import java.util.logging.Logger;
import org.apache.poi.hssf.usermodel.HSSFWorkbook;
import org.apache.poi.ss.usermodel.Cell;
import org.apache.poi.ss.usermodel.CellStyle;
import org.apache.poi.ss.usermodel.Font;
import org.apache.poi.ss.usermodel.Row;
import org.apache.poi.ss.usermodel.Sheet;
import org.apache.poi.ss.usermodel.Workbook;
import org.primefaces.context.RequestContext;
import static Export.GenericExcel.createCell;
import static Export.GenericExcel.createCellM;
/**
*
* @author ahmedjorge
*/
public class ExportViagemSemanaExcel {
static final String DATA="DATA", APOLICE = "APOLICE", INICIO = "INICIO", FIM ="FIM", DIAS = "DIAS", CLIENTE = "CLIENTE", RECIBO= "RECIBO", PREMIO = "PREMIO",
CONSUMO = "CONSUMO", SELO = "SELO", NETOUT ="NET OUT", TOTAL ="TOTAL", COMISSAO = "COMISAO", IMP_CONSUMO="IMP_CONSUMO", IMP_SELO ="IMP_SELO",
NUMEROAPOLICE = "NUMERO APOLICE", PAISDESTINO="PAIS DESTINO", CIDADEDESTINO ="CIDADE DESTINO", ZONADESTINO="ZONA DESTINO",
DATANASCIMENTO ="DATA NASCIMENTO", TELEFONE="TELEFONE", ENDERECO="ENDERECO" , LOCALNASCIMENTO ="LOCAL NASCIMENTO",
NACIONALIDADE ="NACIONALIDADE";
public static void criarDoc(Date dataInicio, Date dateFim, String user, String nomeFuncinario) {
Workbook wb = new HSSFWorkbook();
Font fTitulo = wb.createFont();
fTitulo.setBoldweight(Font.BOLDWEIGHT_BOLD);
fTitulo.setFontHeightInPoints((short) 14);
Font fTituloP = wb.createFont();
fTituloP.setBoldweight(Font.BOLDWEIGHT_BOLD);
fTituloP.setFontHeightInPoints((short) 12);
// fTituloP.setStrikeout(true);
fTituloP.setUnderline(Font.U_SINGLE);
Font fTituloTabela = wb.createFont();
fTituloTabela.setBoldweight(Font.BOLDWEIGHT_BOLD);
fTituloTabela.setFontHeightInPoints((short) 8);
Font fCorpoTabela = wb.createFont();
fCorpoTabela.setBoldweight(Font.BOLDWEIGHT_NORMAL);
fCorpoTabela.setFontHeightInPoints((short) 8.5);
Font fRodapeTabela = wb.createFont();
fRodapeTabela.setBoldweight(Font.BOLDWEIGHT_BOLD);
fRodapeTabela.setFontHeightInPoints((short) 8.5);
Font fNormal = wb.createFont();
fNormal.setBoldweight(Font.BOLDWEIGHT_BOLD);
fNormal.setFontHeightInPoints((short) 8.5);
CellStyle csTitulo = wb.createCellStyle();
csTitulo.setFont(fTitulo);
csTitulo.setAlignment((short) 1);
csTitulo.setVerticalAlignment(CellStyle.VERTICAL_BOTTOM);
csTitulo.setBorderBottom((short) 0);
csTitulo.setBorderTop((short) 0);
csTitulo.setBorderRight((short) 0);
csTitulo.setBorderLeft((short) 0);
csTitulo.setWrapText(true);
CellStyle csTituloP = wb.createCellStyle();
csTituloP.setFont(fTituloP);
csTituloP.setAlignment((short) 1);
csTituloP.setVerticalAlignment((short) 1);
csTituloP.setBorderBottom((short) 0);
csTituloP.setBorderTop((short) 0);
csTituloP.setBorderRight((short) 0);
csTituloP.setBorderLeft((short) 0);
csTituloP.setWrapText(true);
CellStyle csTituloT = wb.createCellStyle();
csTituloT.setFont(fTituloP);
csTituloT.setAlignment((short) 1);
csTituloT.setVerticalAlignment((short) 1);
csTituloT.setBorderBottom((short) 0);
csTituloT.setBorderTop((short) 0);
csTituloT.setBorderRight((short) 0);
csTituloT.setBorderLeft((short) 0);
csTituloT.setWrapText(true);
CellStyle csTituloTabela = wb.createCellStyle();
csTituloTabela.setFont(fTituloTabela);
csTituloTabela.setAlignment(CellStyle.ALIGN_CENTER);
csTituloTabela.setVerticalAlignment((short) 2);
csTituloTabela.setBorderBottom((short) 2);
csTituloTabela.setBorderTop((short) 2);
csTituloTabela.setBorderRight((short) 2);
csTituloTabela.setBorderLeft((short) 2);
csTituloTabela.setWrapText(true);
CellStyle csTituloTabelaNBorder = wb.createCellStyle();
csTituloTabelaNBorder.setFont(fTituloTabela);
csTituloTabelaNBorder.setAlignment(CellStyle.ALIGN_CENTER);
csTituloTabelaNBorder.setVerticalAlignment((short) 2);
csTituloTabelaNBorder.setBorderBottom((short) 2);
csTituloTabelaNBorder.setBorderTop((short) 2);
csTituloTabelaNBorder.setBorderRight((short) 2);
csTituloTabelaNBorder.setBorderLeft((short) 2);
csTituloTabelaNBorder.setWrapText(true);
CellStyle csCorpoTabela = wb.createCellStyle();
csCorpoTabela.setFont(fCorpoTabela);
csCorpoTabela.setAlignment((short) 2);
csCorpoTabela.setVerticalAlignment((short) 1);
csCorpoTabela.setBorderBottom((short) 1);
csCorpoTabela.setBorderTop((short) 1);
csCorpoTabela.setBorderRight((short) 1);
csCorpoTabela.setBorderLeft((short) 1);
csCorpoTabela.setWrapText(true);
CellStyle csCorpoTabelaR = wb.createCellStyle();
csCorpoTabelaR.setFont(fCorpoTabela);
csCorpoTabelaR.setAlignment(CellStyle.ALIGN_RIGHT);
csCorpoTabelaR.setVerticalAlignment((short) 1);
csCorpoTabelaR.setBorderBottom((short) 1);
csCorpoTabelaR.setBorderTop((short) 1);
csCorpoTabelaR.setBorderRight((short) 1);
csCorpoTabelaR.setBorderLeft((short) 1);
csCorpoTabelaR.setWrapText(true);
CellStyle csCorpoTabelaL = wb.createCellStyle();
csCorpoTabelaL.setFont(fCorpoTabela);
csCorpoTabelaL.setAlignment(CellStyle.ALIGN_LEFT);
csCorpoTabelaL.setVerticalAlignment((short) 1);
csCorpoTabelaL.setBorderBottom((short) 1);
csCorpoTabelaL.setBorderTop((short) 1);
csCorpoTabelaL.setBorderRight((short) 1);
csCorpoTabelaL.setBorderLeft((short) 1);
csCorpoTabelaL.setWrapText(true);
CellStyle csRodapeTabela = wb.createCellStyle();
csRodapeTabela.setFont(fRodapeTabela);
csRodapeTabela.setAlignment((short) 1);
csRodapeTabela.setVerticalAlignment((short) 2);
csRodapeTabela.setBorderBottom((short) 2);
csRodapeTabela.setBorderTop((short) 2);
csRodapeTabela.setBorderRight((short) 2);
csRodapeTabela.setBorderLeft((short) 2);
csRodapeTabela.setWrapText(true);
CellStyle csRodapeTabelaR = wb.createCellStyle();
csRodapeTabelaR.setFont(fRodapeTabela);
csRodapeTabelaR.setAlignment(CellStyle.ALIGN_RIGHT);
csRodapeTabelaR.setVerticalAlignment((short) 2);
csRodapeTabelaR.setBorderBottom((short) 2);
csRodapeTabelaR.setBorderTop((short) 2);
csRodapeTabelaR.setBorderRight((short) 2);
csRodapeTabelaR.setBorderLeft((short) 2);
csRodapeTabelaR.setWrapText(true);
CellStyle csNomal = wb.createCellStyle();
csNomal.setFont(fCorpoTabela);
csNomal.setAlignment((short) 1);
csNomal.setVerticalAlignment((short) 1);
csNomal.setBorderBottom((short) 0);
csNomal.setBorderTop((short) 0);
csNomal.setBorderRight((short) 0);
csNomal.setBorderLeft((short) 0);
csNomal.setWrapText(true);
OutputStream outputStraem;
try {
SimpleDateFormat sdf = new SimpleDateFormat("dd-MM-yyyy hh.mm.ss");
SimpleDateFormat sdfTitile = new SimpleDateFormat("dd-MM-yyyy");
File ff = new File(ConfigDoc.Fontes.getDiretorio() + "/" + user + "/Seguro Viagem/");
ff.mkdirs();
String Ddata = sdf.format(new Date());
ff = new File(ff.getAbsoluteFile() + "/" + "Export Mapa Viagem Semanal " + Ddata + ".xls");
String reString = "../Documentos/" + user + "/Seguro Viagem/" + "Export Mapa Viagem Semanal " + Ddata + ".xls";
outputStraem = new FileOutputStream(ff);
int linha = 0;
Sheet s = wb.createSheet("RELATORIO SEMANAL");
Row r = s.createRow(linha);
Cell c = r.createCell(2);
createCellM(c, r, s, csTitulo, linha, linha + 3, ConfigDoc.Empresa.NOME, 1, 22);
linha += 4;
r = s.createRow(linha);
createCellM(c, r, s, csTituloP, linha, linha, ConfigDoc.Empresa.ENDERECO, 1, 22);
linha++;
r = s.createRow(linha);
createCellM(c, r, s, csTituloP, linha, linha, ConfigDoc.Empresa.CAIXAPOSTAL, 1, 22);
linha++;
r = s.createRow(linha);
createCellM(c, r, s, csTituloP, linha, linha, ConfigDoc.Empresa.TELEFAX + " " + ConfigDoc.Empresa.EMAIL, 1, 22);
linha++;
r = s.createRow(linha);
createCellM(c, r, s, csTituloP, linha, linha, ConfigDoc.Empresa.SOCIEDADE, 1, 22);
linha += 3;
r = s.createRow(linha);
createCellM(c, r, s, csTituloTabelaNBorder, linha, linha+1, "RELATORIO SEMANAL NO. " + "" + "\n" + ((dataInicio != null) ? sdfTitile.format(dataInicio) + " - " : "") + ((dateFim != null) ? sdfTitile.format(dateFim) : ""), 1, 10);
linha += 3;
r = s.createRow(linha);
c = r.createCell(2);
createCell(c, r, s, csTituloTabela, linha, linha+1, titileTable(0), 1, 4); //1
createCell(c, r, s, csTituloTabela, linha, linha+1, titileTable(1), 2, 6); //2
createCell(c, r, s, csTituloTabela, linha, linha+1, titileTable(2), 3, 6); //1
createCell(c, r, s, csTituloTabela, linha, linha+1, titileTable(3), 4, 6); //1
createCell(c, r, s, csTituloTabela, linha, linha+1, titileTable(4), 5, 6); //2
createCell(c, r, s, csTituloTabela, linha, linha+1, titileTable(5), 6, 20); //3
createCell(c, r, s, csTituloTabela, linha, linha+1, titileTable(6), 7, 4); //1
createCell(c, r, s, csTituloTabela, linha, linha+1, titileTable(7), 8, 4); //1
createCell(c, r, s, csTituloTabela, linha, linha+1, titileTable(8), 9, 8); //2
createCell(c, r, s, csTituloTabela, linha, linha+1, titileTable(9), 10, 8); //1
createCell(c, r, s, csTituloTabela, linha, linha+1, titileTable(10), 11, 6); //2
createCell(c, r, s, csTituloTabela, linha, linha+1, titileTable(11), 12, 6); //2
createCell(c, r, s, csTituloTabela, linha, linha+1, titileTable(12), 13, 6); //2
dataViagem(dataInicio, dateFim);
float premiototal = 0;
linha++;
for (HashMap<String, Object> data: hasList) {
linha++;
r = s.createRow(linha);
c = r.createCell(2);
createCell(c, r, s, csCorpoTabelaL, linha, linha, toString(data.get(DATA)), 1, 4);
createCell(c, r, s, csCorpoTabelaL, linha, linha, toString(data.get(NUMEROAPOLICE)), 2, 6);
createCell(c, r, s, csCorpoTabelaL, linha, linha, ConfigDoc.toFormat(toString(data.get(INICIO)), "dd-MM-yyyy", "yyyy-MM-dd"), 3, 6);
createCell(c, r, s, csCorpoTabelaL, linha, linha, ConfigDoc.toFormat(toString(data.get(FIM)), "dd-MM-yyyy", "yyyy-MM-dd"), 4, 6);
premiototal += toFloat(data.get(PREMIO));
createCell(c, r, s, csCorpoTabelaR, linha, linha, ConfigDoc.toMoeda(toFloat(data.get(PREMIO)), ""), 5, 6);
createCell(c, r, s, csCorpoTabelaL, linha, linha, toString(data.get(CLIENTE)), 6, 20);
createCell(c, r, s, csCorpoTabelaL, linha, linha, toString(data.get(DATANASCIMENTO)), 7, 4);
createCell(c, r, s, csCorpoTabelaL, linha, linha, toString(data.get(TELEFONE)), 8, 4);
createCell(c, r, s, csCorpoTabelaL, linha, linha, toString(data.get(ENDERECO)), 9, 8);
createCell(c, r, s, csCorpoTabelaL, linha, linha, toString(data.get(LOCALNASCIMENTO)), 10, 8);
createCell(c, r, s, csCorpoTabelaL, linha, linha, toString(data.get(PAISDESTINO)), 11, 6);
createCell(c, r, s, csCorpoTabelaL, linha, linha, toString(data.get(CIDADEDESTINO)), 12, 6);
createCell(c, r, s, csCorpoTabelaL, linha, linha, toString(data.get(ZONADESTINO)), 13, 6);
}
linha++;
r = s.createRow(linha);
c = r.createCell(2);
createCellM(c, r, s, csRodapeTabela, linha, linha, "AL AMOUNT..........................................", 1, 5);
createCellM(c, r, s, csRodapeTabelaR, linha, linha, ConfigDoc.toMoeda(premiototal, ""), 6, 7);
createCellM(c, r, s, csRodapeTabela, linha, linha, " ", 8, 13);
try (FileOutputStream out = new FileOutputStream(ff)) { wb.write(out); }
catch (IOException ex) { Logger.getLogger(GenericExcel.class.getName()).log(Level.SEVERE, null, ex); }
RequestContext.getCurrentInstance().execute("openAllDocument('" + reString + "')");
} catch (FileNotFoundException ex) {
Logger.getLogger(ExportViagemSemanaExcel.class.getName()).log(Level.SEVERE, null, ex);
}
}
public static void main(String[] args) {
ExportViagemSemanaExcel.criarDoc( null, null, "Ah","Ahmed Ferreira");
// ExportDocViagemSemanal.criarDoc(new Date(), new Date(), "Ah", "Ahmed Ferreira");
}
public static String titileTable(int i) {
if (i == 0) {
return "DATA";
} else if (i == 1) {
return "APOLICE";
} else if (i == 2) {
return "DATA INICIO";
} else if (i == 3) {
return "DATA FIM";
} else if (i == 4) {
return "PROPOSTA DE EUROP ASSIST.";
} else if (i == 5) {
return "NOME";
} else if (i == 6) {
return "DATA NASC";
} else if (i == 7) {
return "TELEFONE";
} else if (i == 8) {
return "MORADA";
} else if (i == 9) {
return "NACIONALIDADE";
} else if(i==10){
return "DESTINO-PAIS";
} else if(i==11) {
return "DESTINO-CIDADE";
} else {
return "DESTINO-ZONA";
}
}
private static PdfPTable cellEspecial(PdfPCell cellEspcial) {
PdfPTable pTable = new PdfPTable(1);
pTable.addCell(cellEspcial);
cellEspcial.setPadding(3f);
return pTable;
}
static ArrayList<HashMap<String,Object>> hasList= new ArrayList<>();
static private void dataViagem(Date dataInicio,Date dataFim)
{
hasList = new ArrayList<>();
ResultSet rs = ViagemDao.relatorioTravel(dataInicio, dataFim);
Consumer <HashMap<String, Object>> act = (map)->
{
hasList.add(new LinkedHashMap<>(map));
};
Call.forEchaResultSet(act, rs);
}
static private Float toFloat(Object o)
{
return ((o!=null && !o.toString().isEmpty() ) ? Float.valueOf(o.toString().replace(",", ".").replace(" ", "0")): 0f );
}
static private String toString(Object o)
{
return ((o == null) ? " " : o.toString());
}
}
| |
/* Copyright 2016 The TensorFlow Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==============================================================================*/
package org.tensorflow;
import java.lang.reflect.Array;
import java.nio.Buffer;
import java.nio.BufferOverflowException;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.DoubleBuffer;
import java.nio.FloatBuffer;
import java.nio.IntBuffer;
import java.nio.LongBuffer;
import java.util.Arrays;
/**
* A typed multi-dimensional array.
*
* <p>Instances of a Tensor are <b>not</b> thread-safe.
*
* <p><b>WARNING:</b> Resources consumed by the Tensor object <b>must</b> be explicitly freed by
* invoking the {@link #close()} method when the object is no longer needed. For example, using a
* try-with-resources block like:
*
* <pre>{@code
* try(Tensor t = Tensor.create(...)) {
* doSomethingWith(t);
* }
* }</pre>
*/
public final class Tensor implements AutoCloseable {
/**
* Create a Tensor from a Java object.
*
* <p>A Tensor is a multi-dimensional array of elements of a limited set of types ({@link
* DataType}). Thus, not all Java objects can be converted to a Tensor. In particular, {@code obj}
* must be either a primitive (float, double, int, long, boolean) or a multi-dimensional array of
* one of those primitives. For example:
*
* <pre>{@code
* // Valid: A 64-bit integer scalar.
* Tensor s = Tensor.create(42L);
*
* // Valid: A 3x2 matrix of floats.
* float[][] matrix = new float[3][2];
* Tensor m = Tensor.create(matrix);
*
* // Invalid: Will throw an IllegalArgumentException as an arbitrary Object
* // does not fit into the TensorFlow type system.
* Tensor o = Tensor.create(new Object());
*
* // Invalid: Will throw an IllegalArgumentException since there are
* // a differing number of elements in each row of this 2-D array.
* int[][] twoD = new int[2][];
* twoD[0] = new int[1];
* twoD[1] = new int[2];
* Tensor x = Tensor.create(twoD);
* }</pre>
*
* @throws IllegalArgumentException if {@code obj} is not compatible with the TensorFlow type
* system, or if obj does not disambiguate between multiple DataTypes. In that case, consider
* using {@link #create(DataType, long[], ByteBuffer)} instead.
*/
public static Tensor create(Object obj) {
Tensor t = new Tensor();
t.dtype = dataTypeOf(obj);
t.shapeCopy = new long[numDimensions(obj)];
fillShape(obj, 0, t.shapeCopy);
if (t.dtype != DataType.STRING) {
int byteSize = elemByteSize(t.dtype) * numElements(t.shapeCopy);
t.nativeHandle = allocate(t.dtype.c(), t.shapeCopy, byteSize);
setValue(t.nativeHandle, obj);
} else if (t.shapeCopy.length != 0) {
throw new UnsupportedOperationException(
String.format(
"non-scalar DataType.STRING tensors are not supported yet (version %s). Please file a feature request at https://github.com/tensorflow/tensorflow/issues/new",
TensorFlow.version()));
} else {
t.nativeHandle = allocateScalarBytes((byte[]) obj);
}
return t;
}
/**
* Create an {@link DataType#INT32} Tensor with data from the given buffer.
*
* <p>Creates a Tensor with the given shape by copying elements from the buffer (starting from its
* current position) into the tensor. For example, if {@code shape = {2,3} } (which represents a
* 2x3 matrix) then the buffer must have 6 elements remaining, which will be consumed by this
* method.
*
* @param shape the tensor shape.
* @param data a buffer containing the tensor data.
* @throws IllegalArgumentException If the tensor shape is not compatible with the buffer
*/
public static Tensor create(long[] shape, IntBuffer data) {
Tensor t = allocateForBuffer(DataType.INT32, shape, data.remaining());
t.buffer().asIntBuffer().put(data);
return t;
}
/**
* Create a {@link DataType#FLOAT} Tensor with data from the given buffer.
*
* <p>Creates a Tensor with the given shape by copying elements from the buffer (starting from its
* current position) into the tensor. For example, if {@code shape = {2,3} } (which represents a
* 2x3 matrix) then the buffer must have 6 elements remaining, which will be consumed by this
* method.
*
* @param shape the tensor shape.
* @param data a buffer containing the tensor data.
* @throws IllegalArgumentException If the tensor shape is not compatible with the buffer
*/
public static Tensor create(long[] shape, FloatBuffer data) {
Tensor t = allocateForBuffer(DataType.FLOAT, shape, data.remaining());
t.buffer().asFloatBuffer().put(data);
return t;
}
/**
* Create a {@link DataType#DOUBLE} Tensor with data from the given buffer.
*
* <p>Creates a Tensor with the given shape by copying elements from the buffer (starting from its
* current position) into the tensor. For example, if {@code shape = {2,3} } (which represents a
* 2x3 matrix) then the buffer must have 6 elements remaining, which will be consumed by this
* method.
*
* @param shape the tensor shape.
* @param data a buffer containing the tensor data.
* @throws IllegalArgumentException If the tensor shape is not compatible with the buffer
*/
public static Tensor create(long[] shape, DoubleBuffer data) {
Tensor t = allocateForBuffer(DataType.DOUBLE, shape, data.remaining());
t.buffer().asDoubleBuffer().put(data);
return t;
}
/**
* Create an {@link DataType#INT64} Tensor with data from the given buffer.
*
* <p>Creates a Tensor with the given shape by copying elements from the buffer (starting from its
* current position) into the tensor. For example, if {@code shape = {2,3} } (which represents a
* 2x3 matrix) then the buffer must have 6 elements remaining, which will be consumed by this
* method.
*
* @param shape the tensor shape.
* @param data a buffer containing the tensor data.
* @throws IllegalArgumentException If the tensor shape is not compatible with the buffer
*/
public static Tensor create(long[] shape, LongBuffer data) {
Tensor t = allocateForBuffer(DataType.INT64, shape, data.remaining());
t.buffer().asLongBuffer().put(data);
return t;
}
/**
* Create a Tensor with data from the given buffer.
*
* <p>Creates a Tensor with the provided shape of any type where the tensor's data has been
* encoded into {@code data} as per the specification of the TensorFlow <a
* href="https://www.tensorflow.org/code/tensorflow/c/c_api.h">C API</a>.
*
* @param dataType the tensor datatype.
* @param shape the tensor shape.
* @param data a buffer containing the tensor data.
* @throws IllegalArgumentException If the tensor datatype or shape is not compatible with the
* buffer
*/
public static Tensor create(DataType dataType, long[] shape, ByteBuffer data) {
int nremaining = 0;
if (dataType != DataType.STRING) {
int elemBytes = elemByteSize(dataType);
if (data.remaining() % elemBytes != 0) {
throw new IllegalArgumentException(
String.format(
"ByteBuffer with %d bytes is not compatible with a %s Tensor (%d bytes/element)",
data.remaining(), dataType.toString(), elemBytes));
}
nremaining = data.remaining() / elemBytes;
} else {
nremaining = data.remaining();
}
Tensor t = allocateForBuffer(dataType, shape, nremaining);
t.buffer().put(data);
return t;
}
// Helper function to allocate a Tensor for the create() methods that create a Tensor from
// a java.nio.Buffer.
private static Tensor allocateForBuffer(final DataType dataType, final long[] shape, final int nBuffered) {
int nbytes = 0;
if (dataType != DataType.STRING) {
final int nflattened = numElements(shape);
if (nBuffered != nflattened) {
throw incompatibleBuffer(nBuffered, shape);
}
nbytes = nflattened * elemByteSize(dataType);
} else {
// DT_STRING tensor encoded in a ByteBuffer.
nbytes = nBuffered;
}
Tensor t = new Tensor();
t.dtype = dataType;
t.shapeCopy = Arrays.copyOf(shape, shape.length);
t.nativeHandle = allocate(t.dtype.c(), t.shapeCopy, nbytes);
return t;
}
/**
* Release resources associated with the Tensor.
*
* <p><b>WARNING:</b>If not invoked, memory will be leaked.
*
* <p>The Tensor object is no longer usable after {@code close} returns.
*/
@Override
public void close() {
if (nativeHandle != 0) {
delete(nativeHandle);
nativeHandle = 0;
}
}
/** Returns the {@link DataType} of elements stored in the Tensor. */
public DataType dataType() {
return dtype;
}
/**
* Returns the number of dimensions (sometimes referred to as <a
* href="https://www.tensorflow.org/resources/dims_types.html#rank">rank</a>) of the Tensor.
*
* <p>Will be 0 for a scalar, 1 for a vector, 2 for a matrix, 3 for a 3-dimensional tensor etc.
*/
public int numDimensions() {
return shapeCopy.length;
}
/** Returns the size, in bytes, of the tensor data. */
public int numBytes() {
return buffer().remaining();
}
/** Returns the number of elements in a flattened (1-D) view of the tensor. */
public int numElements() {
return numElements(shapeCopy);
}
/**
* Returns the <a href="https://www.tensorflow.org/resources/dims_types.html#shape">shape</a> of
* the Tensor, i.e., the sizes of each dimension.
*
* @return an array where the i-th element is the size of the i-th dimension of the tensor.
*/
public long[] shape() {
return shapeCopy;
}
/**
* Returns the value in a scalar {@link DataType#FLOAT} tensor.
*
* @throws IllegalArgumentException if the Tensor does not represent a float scalar.
*/
public float floatValue() {
return scalarFloat(nativeHandle);
}
/**
* Returns the value in a scalar {@link DataType#DOUBLE} tensor.
*
* @throws IllegalArgumentException if the Tensor does not represent a double scalar.
*/
public double doubleValue() {
return scalarDouble(nativeHandle);
}
/**
* Returns the value in a scalar {@link DataType#INT32} tensor.
*
* @throws IllegalArgumentException if the Tensor does not represent a int scalar.
*/
public int intValue() {
return scalarInt(nativeHandle);
}
/**
* Returns the value in a scalar {@link DataType#INT64} tensor.
*
* @throws IllegalArgumentException if the Tensor does not represent a long scalar.
*/
public long longValue() {
return scalarLong(nativeHandle);
}
/**
* Returns the value in a scalar {@link DataType#BOOL} tensor.
*
* @throws IllegalArgumentException if the Tensor does not represent a boolean scalar.
*/
public boolean booleanValue() {
return scalarBoolean(nativeHandle);
}
/**
* Returns the value in a scalar {@link DataType#STRING} tensor.
*
* @throws IllegalArgumentException if the Tensor does not represent a boolean scalar.
*/
public byte[] bytesValue() {
return scalarBytes(nativeHandle);
}
/**
* Copies the contents of the tensor to {@code dst} and returns {@code dst}.
*
* <p>For non-scalar tensors, this method copies the contents of the underlying tensor to a Java
* array. For scalar tensors, use one of {@link #floatValue()}, {@link #doubleValue()}, {@link
* #intValue()}, {@link #longValue()} or {@link #booleanValue()} instead. The type and shape of
* {@code dst} must be compatible with the tensor. For example:
*
* <pre>{@code
* int matrix[2][2] = {{1,2},{3,4}};
* try(Tensor t = Tensor.create(matrix)) {
* // Succeeds and prints "3"
* int[][] copy = new int[2][2];
* System.out.println(t.copyTo(copy)[1][0]);
*
* // Throws IllegalArgumentException since the shape of dst does not match the shape of t.
* int[][] dst = new int[4][1];
* t.copyTo(dst);
* }
* }</pre>
*
* @throws IllegalArgumentException if the tensor is a scalar or if {@code dst} is not compatible
* with the tensor (for example, mismatched data types or shapes).
*/
public <T> T copyTo(T dst) {
throwExceptionIfTypeIsIncompatible(dst);
readNDArray(nativeHandle, dst);
return dst;
}
/**
* Write the data of a {@link DataType#INT32} tensor into the given buffer.
*
* <p>Copies {@code numElements()} elements to the buffer.
*
* @param dst the destination buffer
* @throws BufferOverflowException If there is insufficient space in the given buffer for the data
* in this tensor
* @throws IllegalArgumentException If the tensor datatype is not {@link DataType#INT32}
*/
public void writeTo(IntBuffer dst) {
if (dtype != DataType.INT32) {
throw incompatibleBuffer(dst, dtype);
}
ByteBuffer src = buffer();
dst.put(src.asIntBuffer());
}
/**
* Write the data of a {@link DataType#FLOAT} tensor into the given buffer.
*
* <p>Copies {@code numElements()} elements to the buffer.
*
* @param dst the destination buffer
* @throws BufferOverflowException If there is insufficient space in the given buffer for the data
* in this tensor
* @throws IllegalArgumentException If the tensor datatype is not {@link DataType#FLOAT}
*/
public void writeTo(FloatBuffer dst) {
if (dtype != DataType.FLOAT) {
throw incompatibleBuffer(dst, dtype);
}
ByteBuffer src = buffer();
dst.put(src.asFloatBuffer());
}
/**
* Write the data of a {@link DataType#DOUBLE} tensor into the given buffer.
*
* <p>Copies {@code numElements()} elements to the buffer.
*
* @param dst the destination buffer
* @throws BufferOverflowException If there is insufficient space in the given buffer for the data
* in this tensor
* @throws IllegalArgumentException If the tensor datatype is not {@link DataType#DOUBLE}
*/
public void writeTo(DoubleBuffer dst) {
if (dtype != DataType.DOUBLE) {
throw incompatibleBuffer(dst, dtype);
}
ByteBuffer src = buffer();
dst.put(src.asDoubleBuffer());
}
/**
* Write the data of a {@link DataType#INT64} tensor into the given buffer.
*
* <p>Copies {@code numElements()} elements to the buffer.
*
* @param dst the destination buffer
* @throws BufferOverflowException If there is insufficient space in the given buffer for the data
* in this tensor
* @throws IllegalArgumentException If the tensor datatype is not {@link DataType#INT64}
*/
public void writeTo(LongBuffer dst) {
if (dtype != DataType.INT64) {
throw incompatibleBuffer(dst, dtype);
}
ByteBuffer src = buffer();
dst.put(src.asLongBuffer());
}
/**
* Write the tensor data into the given buffer.
*
* <p>Copies {@code numBytes()} bytes to the buffer in native byte order for primitive types.
*
* @param dst the destination buffer
* @throws BufferOverflowException If there is insufficient space in the given buffer for the data
* in this tensor
*/
public void writeTo(ByteBuffer dst) {
ByteBuffer src = buffer();
dst.put(src);
}
/** Returns a string describing the type and shape of the Tensor. */
@Override
public String toString() {
return String.format("%s tensor with shape %s", dtype.toString(), Arrays.toString(shape()));
}
/**
* Create a Tensor object from a handle to the C TF_Tensor object.
*
* <p>Takes ownership of the handle.
*/
static Tensor fromHandle(long handle) {
Tensor t = new Tensor();
t.dtype = DataType.fromC(dtype(handle));
t.shapeCopy = shape(handle);
t.nativeHandle = handle;
return t;
}
long getNativeHandle() {
return nativeHandle;
}
private long nativeHandle;
private DataType dtype;
private long[] shapeCopy = null;
private Tensor() {}
private ByteBuffer buffer() {
return buffer(nativeHandle).order(ByteOrder.nativeOrder());
}
private static IllegalArgumentException incompatibleBuffer(Buffer buf, DataType dataType) {
return new IllegalArgumentException(
String.format("cannot use %s with Tensor of type %s", buf.getClass().getName(), dataType));
}
private static IllegalArgumentException incompatibleBuffer(int numElements, long[] shape) {
return new IllegalArgumentException(
String.format(
"buffer with %d elements is not compatible with a Tensor with shape %s",
numElements, Arrays.toString(shape)));
}
private static int numElements(long[] shape) {
// assumes a fully-known shape
int n = 1;
for (int i = 0; i < shape.length; i++) {
n *= (int) shape[i];
}
return n;
}
private static int elemByteSize(DataType dataType) {
switch (dataType) {
case UINT8:
return 1;
case FLOAT:
case INT32:
return 4;
case DOUBLE:
case INT64:
return 8;
case BOOL:
return 1;
case STRING:
throw new IllegalArgumentException("STRING tensors do not have a fixed element size");
}
throw new IllegalArgumentException("DataType " + dataType + " is not supported yet");
}
private static DataType dataTypeOf(Object o) {
if (o.getClass().isArray()) {
if (Array.getLength(o) == 0) {
throw new IllegalArgumentException("cannot create Tensors with a 0 dimension");
}
// byte[] is a DataType.STRING scalar.
Object e = Array.get(o, 0);
if (Byte.class.isInstance(e) || byte.class.isInstance(e)) {
return DataType.STRING;
}
return dataTypeOf(e);
}
if (Float.class.isInstance(o) || float.class.isInstance(o)) {
return DataType.FLOAT;
} else if (Double.class.isInstance(o) || double.class.isInstance(o)) {
return DataType.DOUBLE;
} else if (Integer.class.isInstance(o) || int.class.isInstance(o)) {
return DataType.INT32;
} else if (Long.class.isInstance(o) || long.class.isInstance(o)) {
return DataType.INT64;
} else if (Boolean.class.isInstance(o) || boolean.class.isInstance(o)) {
return DataType.BOOL;
} else {
throw new IllegalArgumentException("cannot create Tensors of " + o.getClass().getName());
}
}
private static int numDimensions(Object o) {
if (o.getClass().isArray()) {
// byte[] is a DataType.STRING scalar.
Object e = Array.get(o, 0);
if (Byte.class.isInstance(e) || byte.class.isInstance(e)) {
return 0;
}
return 1 + numDimensions(e);
}
return 0;
}
private static void fillShape(Object o, int dim, long[] shape) {
if (shape == null || dim == shape.length) {
return;
}
final int len = Array.getLength(o);
if (shape[dim] == 0) {
shape[dim] = len;
} else if (shape[dim] != len) {
throw new IllegalArgumentException(
String.format("mismatched lengths (%d and %d) in dimension %d", shape[dim], len, dim));
}
for (int i = 0; i < len; ++i) {
fillShape(Array.get(o, i), dim + 1, shape);
}
}
private void throwExceptionIfTypeIsIncompatible(Object o) {
if (numDimensions(o) != numDimensions()) {
throw new IllegalArgumentException(
String.format(
"cannot copy Tensor with %d dimensions into an object with %d",
numDimensions(), numDimensions(o)));
}
if (dataTypeOf(o) != dtype) {
throw new IllegalArgumentException(
String.format(
"cannot copy Tensor with DataType %s into an object of type %s",
dtype.toString(), o.getClass().getName()));
}
long[] oShape = new long[numDimensions()];
fillShape(o, 0, oShape);
for (int i = 0; i < oShape.length; ++i) {
if (oShape[i] != shape()[i]) {
throw new IllegalArgumentException(
String.format(
"cannot copy Tensor with shape %s into object with shape %s",
Arrays.toString(shape()), Arrays.toString(oShape)));
}
}
}
private static native long allocate(int dtype, long[] shape, long byteSize);
private static native long allocateScalarBytes(byte[] value);
private static native void delete(long handle);
private static native ByteBuffer buffer(long handle);
private static native int dtype(long handle);
private static native long[] shape(long handle);
private static native void setValue(long handle, Object value);
private static native float scalarFloat(long handle);
private static native double scalarDouble(long handle);
private static native int scalarInt(long handle);
private static native long scalarLong(long handle);
private static native boolean scalarBoolean(long handle);
private static native byte[] scalarBytes(long handle);
private static native void readNDArray(long handle, Object value);
static {
TensorFlow.init();
}
}
| |
/*
* Copyright (C) 2016 Borja Bravo
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hewking.custom.textview;
import android.content.Context;
import android.content.res.TypedArray;
import android.graphics.Canvas;
import android.graphics.Color;
import android.os.Build;
import androidx.core.content.ContextCompat;
import androidx.appcompat.widget.AppCompatTextView;
import android.text.SpannableStringBuilder;
import android.text.Spanned;
import android.text.TextPaint;
import android.text.method.LinkMovementMethod;
import android.text.style.ClickableSpan;
import android.util.AttributeSet;
import android.view.View;
import android.view.ViewTreeObserver;
import com.hewking.custom.R;
public class ReadMoreTextView extends AppCompatTextView {
private static final int TRIM_MODE_LINES = 0;
private static final int TRIM_MODE_LENGTH = 1;
private static final int DEFAULT_TRIM_LENGTH = 240;
private static final int DEFAULT_TRIM_LINES = 3;
private static final int INVALID_END_INDEX = -1;
private static final boolean DEFAULT_SHOW_TRIM_EXPANDED_TEXT = true;
private static final String ELLIPSIZE = "... ";
private CharSequence text;
private BufferType bufferType;
private boolean readMore = true;
private int trimLength;
private CharSequence trimCollapsedText;
private CharSequence trimExpandedText;
private ReadMoreClickableSpan viewMoreSpan;
private int colorClickableText;
private boolean showTrimExpandedText;
protected boolean enableReadmore = true;
private int trimMode;
private int lineEndIndex;
private int trimLines;
public ReadMoreTextView(Context context) {
this(context, null);
}
public ReadMoreTextView(Context context, AttributeSet attrs) {
super(context, attrs);
TypedArray typedArray = context.obtainStyledAttributes(attrs, R.styleable.ReadMoreTextView);
this.trimLength = typedArray.getInt(R.styleable.ReadMoreTextView_trimLength, DEFAULT_TRIM_LENGTH);
int resourceIdTrimCollapsedText =
typedArray.getResourceId(R.styleable.ReadMoreTextView_trimCollapsedText, R.string.chat_button_readMore);
int resourceIdTrimExpandedText =
typedArray.getResourceId(R.styleable.ReadMoreTextView_trimExpandedText, R.string.chat_button_readLess);
this.trimCollapsedText = getResources().getString(resourceIdTrimCollapsedText);
this.trimExpandedText = getResources().getString(resourceIdTrimExpandedText);
this.trimLines = typedArray.getInt(R.styleable.ReadMoreTextView_trimLines, DEFAULT_TRIM_LINES);
this.colorClickableText = typedArray.getColor(R.styleable.ReadMoreTextView_colorClickableText,
ContextCompat.getColor(context, R.color.colorAccent));
this.showTrimExpandedText =
typedArray.getBoolean(R.styleable.ReadMoreTextView_showTrimExpandedText, DEFAULT_SHOW_TRIM_EXPANDED_TEXT);
this.trimMode = typedArray.getInt(R.styleable.ReadMoreTextView_trimMode, TRIM_MODE_LINES);
this.enableReadmore = typedArray.getBoolean(R.styleable.ReadMoreTextView_enableReadmore, enableReadmore);
typedArray.recycle();
viewMoreSpan = new ReadMoreClickableSpan();
onGlobalLayoutLineEndIndex();
setText();
}
public void enableReadmore(boolean enable) {
this.enableReadmore = enable;
if (enable) {
onGlobalLayoutLineEndIndex();
setText();
}
}
@Override
protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
super.onMeasure(widthMeasureSpec, heightMeasureSpec);
}
private void setText() {
super.setText(getDisplayableText(), bufferType);
setMovementMethod(LinkMovementMethod.getInstance());
setHighlightColor(Color.TRANSPARENT);
}
private CharSequence getDisplayableText() {
return getTrimmedText(text);
}
@Override
public void setText(CharSequence text, BufferType type) {
if (enableReadmore) {
this.text = text;
bufferType = type;
setText();
} else {
super.setText(text, type);
}
}
private CharSequence getTrimmedText(CharSequence text) {
if (trimMode == TRIM_MODE_LENGTH) {
if (text != null && text.length() > trimLength) {
if (readMore) {
return updateCollapsedText();
} else {
return updateExpandedText();
}
}
}
if (trimMode == TRIM_MODE_LINES) {
if (text != null && lineEndIndex > 0) {
if (readMore) {
if (getLayout() != null && getLayout().getLineCount() > trimLines) {
return updateCollapsedText();
}
} else {
return updateExpandedText();
}
}
}
return text;
}
private CharSequence updateCollapsedText() {
int trimEndIndex = text.length();
switch (trimMode) {
case TRIM_MODE_LINES:
trimEndIndex = lineEndIndex - (ELLIPSIZE.length() + trimCollapsedText.length() + 1);
if (trimEndIndex < 0) {
trimEndIndex = trimLength + 1;
}
break;
case TRIM_MODE_LENGTH:
trimEndIndex = trimLength + 1;
break;
}
SpannableStringBuilder s = new SpannableStringBuilder(text, 0, trimEndIndex)
.append(ELLIPSIZE)
.append(trimCollapsedText);
return addClickableSpan(s, trimCollapsedText);
}
private CharSequence updateExpandedText() {
if (showTrimExpandedText) {
SpannableStringBuilder s = new SpannableStringBuilder(text, 0, text.length()).append(trimExpandedText);
return addClickableSpan(s, trimExpandedText);
}
return text;
}
private CharSequence addClickableSpan(SpannableStringBuilder s, CharSequence trimText) {
s.setSpan(viewMoreSpan, s.length() - trimText.length(), s.length(), Spanned.SPAN_EXCLUSIVE_EXCLUSIVE);
return s;
}
public void setTrimLength(int trimLength) {
this.trimLength = trimLength;
setText();
}
public void setColorClickableText(int colorClickableText) {
this.colorClickableText = colorClickableText;
}
public void setTrimCollapsedText(CharSequence trimCollapsedText) {
this.trimCollapsedText = trimCollapsedText;
}
public void setTrimExpandedText(CharSequence trimExpandedText) {
this.trimExpandedText = trimExpandedText;
}
public void setTrimMode(int trimMode) {
this.trimMode = trimMode;
}
public void setTrimLines(int trimLines) {
this.trimLines = trimLines;
}
private class ReadMoreClickableSpan extends ClickableSpan {
@Override
public void onClick(View widget) {
readMore = !readMore;
setText();
}
@Override
public void updateDrawState(TextPaint ds) {
ds.setColor(colorClickableText);
}
}
private void onGlobalLayoutLineEndIndex() {
if (trimMode == TRIM_MODE_LINES && enableReadmore) {
getViewTreeObserver().addOnGlobalLayoutListener(new ViewTreeObserver.OnGlobalLayoutListener() {
@Override
public void onGlobalLayout() {
ViewTreeObserver obs = getViewTreeObserver();
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN) {
obs.removeOnGlobalLayoutListener(this);
} else {
obs.removeGlobalOnLayoutListener(this);
}
if (enableReadmore) {
refreshLineEndIndex();
setText();
}
}
});
}
}
private void refreshLineEndIndex() {
try {
if(getLayout() == null) {
return;
}
if (trimLines == 0) {
lineEndIndex = getLayout().getLineEnd(0);
} else if (trimLines > 0 && getLineCount() >= trimLines) {
lineEndIndex = getLayout().getLineEnd(trimLines - 1);
} else {
lineEndIndex = INVALID_END_INDEX;
}
} catch (Exception e) {
e.printStackTrace();
}
}
@Override
protected void onDraw(Canvas canvas) {
super.onDraw(canvas);
}
}
| |
/*
* Copyright 2000-2015 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.openapi.editor.impl;
import com.intellij.openapi.application.Result;
import com.intellij.openapi.command.WriteCommandAction;
import com.intellij.openapi.command.impl.UndoManagerImpl;
import com.intellij.openapi.command.undo.UndoManager;
import com.intellij.openapi.editor.*;
import com.intellij.openapi.editor.event.DocumentAdapter;
import com.intellij.openapi.editor.event.DocumentEvent;
import com.intellij.openapi.editor.ex.DocumentEx;
import com.intellij.openapi.editor.ex.MarkupModelEx;
import com.intellij.openapi.editor.ex.RangeHighlighterEx;
import com.intellij.openapi.editor.ex.RangeMarkerEx;
import com.intellij.openapi.editor.markup.HighlighterTargetArea;
import com.intellij.openapi.editor.markup.MarkupModel;
import com.intellij.openapi.editor.markup.RangeHighlighter;
import com.intellij.openapi.util.Pair;
import com.intellij.openapi.util.TextRange;
import com.intellij.openapi.util.ThrowableComputable;
import com.intellij.openapi.util.Trinity;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.psi.PsiDocumentManager;
import com.intellij.psi.PsiFile;
import com.intellij.psi.impl.PsiDocumentManagerImpl;
import com.intellij.psi.impl.PsiToDocumentSynchronizer;
import com.intellij.testFramework.LeakHunter;
import com.intellij.testFramework.LightPlatformTestCase;
import com.intellij.testFramework.PlatformTestUtil;
import com.intellij.testFramework.Timings;
import com.intellij.util.CommonProcessors;
import com.intellij.util.ThrowableRunnable;
import com.intellij.util.containers.WeakList;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.annotations.NotNull;
import java.util.*;
/**
* @author mike
*/
public class RangeMarkerTest extends LightPlatformTestCase {
private PsiDocumentManagerImpl documentManager;
private PsiToDocumentSynchronizer synchronizer;
private Document document;
private PsiFile psiFile;
@Override
protected void runTest() throws Throwable {
if (getTestName(false).contains("NoVerify")) {
super.runTest();
return;
}
boolean oldVerify = RedBlackTree.VERIFY;
RedBlackTree.VERIFY = !isPerformanceTest();
final Throwable[] ex = {null};
try {
if (getTestName(false).contains("NoCommand")) {
super.runTest();
return;
}
WriteCommandAction.runWriteCommandAction(getProject(), new ThrowableComputable<Void, Throwable>() {
@Override
public Void compute() throws Throwable {
RangeMarkerTest.super.runTest();
return null;
}
});
}
finally {
RedBlackTree.VERIFY = oldVerify;
}
if (ex[0] != null) throw ex[0];
}
@Override
protected void setUp() throws Exception {
super.setUp();
documentManager = (PsiDocumentManagerImpl)PsiDocumentManager.getInstance(getProject());
synchronizer = documentManager.getSynchronizer();
}
public void testCreation() throws Exception {
RangeMarker marker = createMarker("0123456789", 2, 5);
assertEquals(2, marker.getStartOffset());
assertEquals(5, marker.getEndOffset());
assertTrue(marker.isValid());
}
public void testDeleteBeforeStart() throws Exception {
RangeMarker marker = createMarker("01[234]56789");
marker.getDocument().deleteString(0, 1);
assertEquals(1, marker.getStartOffset());
assertEquals(4, marker.getEndOffset());
assertTrue(marker.isValid());
}
public void testInsertIntoRange() throws Exception {
RangeMarker marker = createMarker("0123456789", 2, 5);
marker.getDocument().insertString(4, "xxx");
assertEquals(2, marker.getStartOffset());
assertEquals(8, marker.getEndOffset());
assertTrue(marker.isValid());
}
public void testInsertIntoPoint() throws Exception {
RangeMarker marker = createMarker("0123456789", 2, 2);
marker.getDocument().insertString(2, "xxx");
assertEquals(2, marker.getStartOffset());
assertEquals(2, marker.getEndOffset());
assertTrue(marker.isValid());
}
public void testDeletePoint() throws Exception {
RangeMarker marker = createMarker("0123456789", 2, 2);
marker.getDocument().deleteString(1, 3);
assertFalse(marker.isValid());
}
public void testDeleteRangeInside() throws Exception {
RangeMarker marker = createMarker("0123456789", 1, 7);
marker.getDocument().deleteString(2, 5);
assertTrue(marker.isValid());
}
public void testReplaceRangeToSingleChar() throws Exception {
RangeMarker marker = createMarker("0123456789", 1, 7);
marker.getDocument().replaceString(2, 5, " ");
assertTrue(marker.isValid());
}
public void testReplaceWholeRange() throws Exception {
RangeMarker marker = createMarker("0123456789", 1, 7);
marker.getDocument().replaceString(1, 7, "abc");
assertValidMarker(marker, 1, 4);
}
public void testUpdateInvalid() throws Exception {
RangeMarker marker = createMarker("01[]23456789");
marker.getDocument().deleteString(1, 3);
assertFalse(marker.isValid());
marker.getDocument().insertString(2, "xxx");
assertEquals(2, marker.getStartOffset());
assertEquals(2, marker.getEndOffset());
assertFalse(marker.isValid());
}
public void testInsertAfterEnd() throws Exception {
RangeMarker marker = createMarker("0123456789", 2, 5);
marker.getDocument().insertString(6, "xxx");
assertEquals(2, marker.getStartOffset());
assertEquals(5, marker.getEndOffset());
assertTrue(marker.isValid());
}
public void testDeleteEndPart() throws Exception {
RangeMarker marker = createMarker("0123456789", 2, 5);
marker.getDocument().deleteString(4, 6);
assertValidMarker(marker, 2, 4);
}
public void testDeleteStartPart() throws Exception {
RangeMarker marker = createMarker("0123456789", 2, 5);
marker.getDocument().deleteString(0, 4);
assertValidMarker(marker, 0, 1);
}
public void testReplaceStartPartInvalid() throws Exception {
RangeMarker marker = createMarker("0123456789", 2, 5);
marker.getDocument().replaceString(0, 4, "xxxx");
assertValidMarker(marker, 4, 5);
}
public void testDeleteFirstChar() throws Exception {
RangeMarker marker = createMarker("0123456789", 0, 5);
marker.getDocument().deleteString(0, 1);
assertValidMarker(marker, 0, 4);
}
public void testInsertBeforeStart() throws Exception {
RangeMarker marker = createMarker("0123456789", 2, 5);
marker.getDocument().insertString(0, "xxx");
assertEquals(5, marker.getStartOffset());
assertEquals(8, marker.getEndOffset());
assertTrue(marker.isValid());
}
public void testInsertIntoStart() throws Exception {
RangeMarker marker = createMarker("0123456789", 2, 5);
marker.getDocument().insertString(2, "xxx");
assertValidMarker(marker, 5, 8);
}
public void testInsertIntoStartExpandToLeft() throws Exception {
RangeMarker marker = createMarker("0123456789", 2, 5);
marker.setGreedyToLeft(true);
marker.getDocument().insertString(2, "xxx");
assertEquals(2, marker.getStartOffset());
assertEquals(8, marker.getEndOffset());
assertTrue(marker.isValid());
}
public void testInsertIntoEnd() throws Exception {
RangeMarker marker = createMarker("0123456789", 2, 5);
marker.getDocument().insertString(5, "xxx");
assertEquals(2, marker.getStartOffset());
assertEquals(5, marker.getEndOffset());
assertTrue(marker.isValid());
}
public void testInsertIntoEndExpandRight() throws Exception {
RangeMarker marker = createMarker("0123456789", 2, 5);
marker.setGreedyToRight(true);
marker.getDocument().insertString(5, "xxx");
assertEquals(2, marker.getStartOffset());
assertEquals(8, marker.getEndOffset());
assertTrue(marker.isValid());
}
public void testNoNegative() throws Exception {
RangeMarker marker = createMarker("package safd;\n\n[import javax.swing.JPanel;]\nimport java.util.ArrayList;\n\nclass T{}");
marker.getDocument()
.replaceString(15, 15 + "import javax.swing.JPanel;\nimport java.util.ArrayList;".length(), "import java.util.ArrayList;");
assertEquals(15, marker.getStartOffset());
}
public void testReplaceRightIncludingFirstChar() throws Exception {
String s = "12345\n \n12345";
RangeMarker marker = createMarker(s, 6, 8);
marker.getDocument().replaceString(0, s.length(), s.replaceAll(" ", ""));
assertValidMarker(marker, 6, 7);
}
public void testDeleteRightPart() throws Exception {
RangeMarker marker = createMarker("0123456789", 2, 5);
marker.getDocument().deleteString(4, 6);
assertValidMarker(marker, 2, 4);
}
public void testDeleteRightPart2() throws Exception {
RangeMarker marker = createMarker("0123456789", 2, 5);
marker.getDocument().deleteString(4, 5);
assertValidMarker(marker, 2, 4);
}
public void testReplaceRightPartInvalid() throws Exception {
RangeMarker marker = createMarker("0123456789", 2, 5);
marker.getDocument().replaceString(4, 6, "xxx");
assertValidMarker(marker, 2, 4);
}
public void testDeleteWholeRange() throws Exception {
RangeMarker marker = createMarker("0123456789", 2, 5);
marker.getDocument().deleteString(1, 6);
assertFalse(marker.isValid());
}
public void testDeleteExactRange() throws Exception {
RangeMarker marker = createMarker("0123456789", 2, 5);
marker.getDocument().deleteString(2, 5);
assertValidMarker(marker, 2, 2);
}
public void testDeleteJustBeforeStart() throws Exception {
RangeMarker marker = createMarker("0123456789", 2, 5);
marker.getDocument().deleteString(0, 2);
assertValidMarker(marker, 0, 3);
}
public void testDeleteRightAfterEnd() throws Exception {
RangeMarker marker = createMarker("0123456789", 2, 2);
marker.getDocument().deleteString(2, 5);
assertValidMarker(marker, 2, 2);
}
public void testReplacementWithOldTextOverlap() throws Exception {
RangeMarker marker = createMarker("0123456789", 2, 5);
marker.getDocument().replaceString(0, 10, "0123456789");
assertValidMarker(marker, 2, 5);
}
// Psi -> Document synchronization
public void testPsi2Doc1() throws Exception {
StringBuilder buffer = new StringBuilder("0123456789");
RangeMarker marker = createMarker(buffer.toString(), 2, 5);
synchronizer.startTransaction(getProject(), document, psiFile);
synchronizer.insertString(document, 3, "a");
buffer.insert(3, "a");
synchronizer.commitTransaction(this.document);
assertEquals(buffer.toString(), document.getText());
assertValidMarker(marker, 2, 6);
}
public void testDocSynchronizerPrefersLineBoundaryChanges() throws Exception {
String text = "import java.awt.List;\n" +
"[import java.util.ArrayList;\n]" +
"import java.util.HashMap;\n" +
"import java.util.Map;";
RangeMarker marker = createMarker(text);
synchronizer.startTransaction(getProject(), document, psiFile);
String newText = StringUtil.replaceSubstring(document.getText(), TextRange.create(marker), "");
synchronizer.replaceString(document, 0, document.getTextLength(), newText);
final List<DocumentEvent> events = new ArrayList<DocumentEvent>();
document.addDocumentListener(new DocumentAdapter() {
@Override
public void documentChanged(DocumentEvent e) {
events.add(e);
}
});
synchronizer.commitTransaction(document);
assertEquals(newText, document.getText());
DocumentEvent event = assertOneElement(events);
assertEquals("DocumentEventImpl[myOffset=22, myOldLength=28, myNewLength=0, myOldString='import java.util.ArrayList;\n', myNewString=''].", event.toString());
}
public void testPsi2DocReplaceAfterAdd() throws Exception {
StringBuilder buffer = new StringBuilder("0123456789");
RangeMarker marker = createMarker(buffer.toString(), 2, 5);
synchronizer.startTransaction(getProject(), document, psiFile);
synchronizer.insertString(document, 1, "a");
buffer.insert(1, "a");
synchronizer.replaceString(document, 3, 4, "a");
buffer.replace(3, 4, "a");
synchronizer.commitTransaction(document);
assertEquals(buffer.toString(), document.getText());
assertValidMarker(marker, 3, 6);
}
public void testPsi2DocMergeReplaceAfterAdd() throws Exception {
StringBuilder buffer = new StringBuilder("0123456789");
RangeMarker marker = createMarker(buffer.toString(), 2, 5);
synchronizer.startTransaction(getProject(), document, psiFile);
synchronizer.insertString(document, 1, "a");
buffer.insert(1, "a");
synchronizer.replaceString(document, 3, 4, "a");
buffer.replace(3, 4, "a");
synchronizer.replaceString(document, 3, 5, "bb");
buffer.replace(3, 5, "bb");
final PsiToDocumentSynchronizer.DocumentChangeTransaction transaction = synchronizer.getTransaction(document);
final Set<Pair<PsiToDocumentSynchronizer.MutableTextRange, StringBuffer>> affectedFragments = transaction.getAffectedFragments();
assertEquals(affectedFragments.size(), 2);
synchronizer.commitTransaction(document);
assertEquals(buffer.toString(), document.getText());
assertValidMarker(marker, 3, 6);
}
public void testPsi2DocMergeReplaceWithMultipleAdditions() throws Exception {
StringBuilder buffer = new StringBuilder("0123456789");
RangeMarker marker = createMarker(buffer.toString(), 2, 5);
synchronizer.startTransaction(getProject(), document, psiFile);
synchronizer.replaceString(document, 0, 10, "0");
buffer.replace(0, 10, "0");
for (int i = 1; i < 10; i++) {
synchronizer.insertString(document, i, "" + i);
buffer.insert(i, "" + i);
}
final PsiToDocumentSynchronizer.DocumentChangeTransaction transaction = synchronizer.getTransaction(document);
final Set<Pair<PsiToDocumentSynchronizer.MutableTextRange, StringBuffer>> affectedFragments = transaction.getAffectedFragments();
assertEquals(1, affectedFragments.size());
synchronizer.commitTransaction(document);
assertEquals(buffer.toString(), document.getText());
assertValidMarker(marker, 2, 5);
}
public void testPsi2DocMergeMultipleAdditionsWithReplace() throws Exception {
StringBuilder buffer = new StringBuilder("0123456789");
RangeMarker marker = createMarker(buffer.toString(), 2, 5);
synchronizer.startTransaction(getProject(), document, psiFile);
final PsiToDocumentSynchronizer.DocumentChangeTransaction transaction = synchronizer.getTransaction(document);
final Set<Pair<PsiToDocumentSynchronizer.MutableTextRange, StringBuffer>> affectedFragments = transaction.getAffectedFragments();
for (int i = 0; i < 10; i++) {
synchronizer.insertString(document, i, "" + i);
buffer.insert(i, "" + i);
}
assertEquals(1, affectedFragments.size());
synchronizer.replaceString(document, 0, 20, "0123456789");
buffer.replace(0, 20, "0123456789");
assertEquals(1, affectedFragments.size());
synchronizer.commitTransaction(document);
assertEquals(buffer.toString(), document.getText());
assertValidMarker(marker, 2, 5);
}
public void testPsi2DocSurround() throws Exception {
StringBuilder buffer = new StringBuilder("0123456789");
RangeMarker marker = createMarker(buffer.toString(), 2, 5);
synchronizer.startTransaction(getProject(), document, psiFile);
synchronizer.replaceString(document, 3, 5, "3a4");
buffer.replace(3, 5, "3a4");
synchronizer.insertString(document, 3, "b");
buffer.insert(3, "b");
synchronizer.insertString(document, 7, "d");
buffer.insert(7, "d");
final PsiToDocumentSynchronizer.DocumentChangeTransaction transaction = synchronizer.getTransaction(document);
final Set<Pair<PsiToDocumentSynchronizer.MutableTextRange, StringBuffer>> affectedFragments = transaction.getAffectedFragments();
assertEquals(3, affectedFragments.size());
synchronizer.commitTransaction(document);
assertEquals(buffer.toString(), document.getText());
assertValidMarker(marker, 2, 7);
}
public void testPsi2DocForwardRangesChanges() throws Exception {
StringBuilder buffer = new StringBuilder("0123456789");
RangeMarker marker = createMarker(buffer.toString(), 2, 5);
synchronizer.startTransaction(getProject(), document, psiFile);
synchronizer.replaceString(document, 4, 5, "3a4");
buffer.replace(4, 5, "3a4");
synchronizer.insertString(document, 7, "b");
buffer.insert(7, "b");
synchronizer.insertString(document, 1, "b");
buffer.insert(1, "b");
synchronizer.commitTransaction(document);
assertEquals(buffer.toString(), document.getText());
assertValidMarker(marker, 3, 8);
}
private static void assertValidMarker(@NotNull RangeMarker marker, int start, int end) {
assertTrue(marker.isValid());
assertEquals(start, marker.getStartOffset());
assertEquals(end, marker.getEndOffset());
}
public void testNested() {
RangeMarker marker1 = createMarker("0[12345678]9");
Document document = marker1.getDocument();
RangeMarker marker2 = document.createRangeMarker(2, 5);
RangeMarker marker3 = document.createRangeMarker(3, 4);
document.insertString(0, "x");
assertEquals(2, marker1.getStartOffset());
assertEquals(3, marker2.getStartOffset());
assertEquals(4, marker3.getStartOffset());
}
public void testNestedAfter() {
RangeMarker marker1 = createMarker("0[12345678]90123");
Document document = marker1.getDocument();
RangeMarker marker2 = document.createRangeMarker(2, 5);
RangeMarker marker3 = document.createRangeMarker(3, 4);
document.insertString(10, "x");
assertEquals(1, marker1.getStartOffset());
assertEquals(2, marker2.getStartOffset());
assertEquals(3, marker3.getStartOffset());
}
public void testNested3() {
RangeMarker marker1 = createMarker("01[23]4567890123");
DocumentEx document = (DocumentEx)marker1.getDocument();
RangeMarker marker2 = document.createRangeMarker(9, 11);
RangeMarker marker3 = document.createRangeMarker(1, 12);
marker3.dispose();
document.deleteString(marker1.getEndOffset(), marker2.getStartOffset());
}
public void testBranched() {
RangeMarker marker1 = createMarker("01234567890123456", 0, 1);
DocumentEx document = (DocumentEx)marker1.getDocument();
RangeMarker marker2 = document.createRangeMarker(2, 3);
RangeMarker marker3 = document.createRangeMarker(4, 5);
RangeMarker marker4 = document.createRangeMarker(6, 7);
RangeMarker marker5 = document.createRangeMarker(8, 9);
RangeMarker marker6 = document.createRangeMarker(10, 11);
RangeMarker marker7 = document.createRangeMarker(12, 13);
RangeMarker marker8 = document.createRangeMarker(14, 15);
document.deleteString(1, 2);
}
public void testDevourMarkerWithDeletion() {
RangeMarker marker1 = createMarker("012345[67890123456]7");
DocumentEx document = (DocumentEx)marker1.getDocument();
document.deleteString(1, document.getTextLength());
}
public void testLL() {
RangeMarker marker1 = createMarker("012345678901234567", 5,6);
DocumentEx document = (DocumentEx)marker1.getDocument();
document.createRangeMarker(4, 5);
document.createRangeMarker(6, 7);
document.createRangeMarker(0, 4);
document.deleteString(1, 2);
document.createRangeMarker(0, 7);
document.createRangeMarker(0, 7);
}
public void testSwap() {
RangeMarkerEx marker1 = createMarker("012345678901234567", 5,6);
DocumentEx document = (DocumentEx)marker1.getDocument();
document.createRangeMarker(3, 5);
document.createRangeMarker(6, 7);
document.createRangeMarker(4, 4);
marker1.dispose();
}
public void testX() {
RangeMarkerEx marker1 = createMarker(StringUtil.repeatSymbol(' ', 10), 3,6);
DocumentEx document = (DocumentEx)marker1.getDocument();
document.createRangeMarker(2, 3);
document.createRangeMarker(3, 8);
document.createRangeMarker(7, 9);
RangeMarkerEx r1 = (RangeMarkerEx)document.createRangeMarker(6, 8);
r1.dispose();
marker1.dispose();
}
private static List<RangeMarker> add(DocumentEx document, int... offsets) {
List<RangeMarker> result = new ArrayList<RangeMarker>();
for (int i=0; i<offsets.length; i+=2) {
int start = offsets[i];
int end = offsets[i+1];
RangeMarker m = document.createRangeMarker(start, end);
result.add(m);
}
return result;
}
private static void delete(List<RangeMarker> mm, int... indexes) {
for (int index : indexes) {
RangeMarker m = mm.get(index);
m.dispose();
}
}
public void testX2() {
DocumentEx document = (DocumentEx)EditorFactory.getInstance().createDocument(StringUtil.repeatSymbol(' ', 10));
List<RangeMarker> mm =
add(document, 2,9, 0,0, 7,7
);
delete(mm, 0);
}
public void testX3() {
DocumentEx document = (DocumentEx)EditorFactory.getInstance().createDocument(StringUtil.repeatSymbol(' ', 10));
List<RangeMarker> mm =
add(document, 1,9, 8,8, 8,8, 0,5, 4,5
);
delete(mm, 0);
}
public void _testRandomAddRemove() {
int N = 100;
for (int ti=0; ;ti++) {
if (ti%10000 ==0) System.out.println(ti);
DocumentEx document = (DocumentEx)EditorFactory.getInstance().createDocument(StringUtil.repeatSymbol(' ', N));
Random gen = new Random();
List<Pair<RangeMarker, TextRange>> adds = new ArrayList<Pair<RangeMarker, TextRange>>();
List<Pair<RangeMarker, TextRange>> dels = new ArrayList<Pair<RangeMarker, TextRange>>();
try {
for (int i = 0; i < 30; i++) {
int x = gen.nextInt(N);
int y = x + gen.nextInt(N - x);
if (gen.nextBoolean()) {
x = 0;
y = document.getTextLength();
}
RangeMarkerEx r = (RangeMarkerEx)document.createRangeMarker(x, y);
adds.add(Pair.create((RangeMarker)r, TextRange.create(r)));
}
List<Pair<RangeMarker, TextRange>> candidates = new ArrayList<Pair<RangeMarker, TextRange>>(adds);
while (!candidates.isEmpty()) {
int size = candidates.size();
int x = gen.nextInt(size);
Pair<RangeMarker, TextRange> c = candidates.remove(x);
RangeMarkerEx r = (RangeMarkerEx)c.first;
assertEquals(size-1, candidates.size());
dels.add(c);
r.dispose();
}
}
catch (AssertionError e) {
String s= "adds: ";
for (Pair<RangeMarker, TextRange> c : adds) {
TextRange t = c.second;
s += t.getStartOffset() + "," + t.getEndOffset() + ", ";
}
s += "\ndels: ";
for (Pair<RangeMarker, TextRange> c : dels) {
int index = adds.indexOf(c);
assertSame(c, adds.get(index));
s += index + ", ";
}
System.err.println(s);
throw e;
}
}
}
private static void edit(DocumentEx document, int... offsets) {
for (int i = 0; i < offsets.length; i+=3) {
int offset = offsets[i];
int oldlength = offsets[i+1];
int newlength = offsets[i+2];
document.replaceString(offset, offset + oldlength, StringUtil.repeatSymbol(' ', newlength));
}
}
public void testE1() {
DocumentEx document = (DocumentEx)EditorFactory.getInstance().createDocument(StringUtil.repeatSymbol(' ', 10));
List<RangeMarker> mm =
add(document, 3,5, 0,1, 9,9
);
edit(document, 3,6,0);
delete(mm, 0);
}
public void testE2() {
DocumentEx document = (DocumentEx)EditorFactory.getInstance().createDocument(StringUtil.repeatSymbol(' ', 10));
List<RangeMarker> mm =
add(document, 0,3, 6,9, 8,8
);
edit(document, 0,3,0);
delete(mm, 0);
}
public void testE3() {
DocumentEx document = (DocumentEx)EditorFactory.getInstance().createDocument(StringUtil.repeatSymbol(' ', 10));
List<RangeMarker> mm =
add(document, 4,5, 6,8, 3,4, 4,9, 2,9
);
edit(document, 4,6,0);
delete(mm, 0);
}
public void testE4() {
DocumentEx document = (DocumentEx)EditorFactory.getInstance().createDocument(StringUtil.repeatSymbol(' ', 10));
List<RangeMarker> mm =
add(document, 3,5, 5,6, 4,8, 6,9, 8,9
);
edit(document, 6,0,0, 3,0,2);
delete(mm, 1,0);
}
public void testE5() {
DocumentEx document = (DocumentEx)EditorFactory.getInstance().createDocument(StringUtil.repeatSymbol(' ', 10));
List<RangeMarker> mm =
add(document, 9,9, 4,4, 1,7, 7,7, 4,7
);
edit(document, 1,5,0);
delete(mm, 3);
}
public void testE6() {
DocumentEx document = (DocumentEx)EditorFactory.getInstance().createDocument(StringUtil.repeatSymbol(' ', 10));
List<RangeMarker> mm =
add(document, 4,8, 4,4, 4,9, 0,2, 6,8
);
edit(document, 3,2,0);
}
public void testE7() {
DocumentEx document = (DocumentEx)EditorFactory.getInstance().createDocument(StringUtil.repeatSymbol(' ', 10));
List<RangeMarker> mm =
add(document, 6,7, 0,3, 3,6, 5,9, 2,9
);
edit(document, 5,2,0);
}
public void testE8() {
DocumentEx document = (DocumentEx)EditorFactory.getInstance().createDocument(StringUtil.repeatSymbol(' ', 10));
List<RangeMarker> mm =
add(document, 5,5, 8,8, 1,3, 3,9
);
edit(document, 4,3,0);
}
public void testE9() {
DocumentEx document = (DocumentEx)EditorFactory.getInstance().createDocument(StringUtil.repeatSymbol(' ', 10));
List<RangeMarker> mm =
add(document, 4,5, 9,9, 1,2, 0,3
);
edit(document, 0,3,0);
}
public void testE10() {
DocumentEx document = (DocumentEx)EditorFactory.getInstance().createDocument(StringUtil.repeatSymbol(' ', 10));
List<RangeMarker> mm =
add(document, 9,9, 6,8, 8,8, 5,9
);
edit(document, 2,6,0, 2,0,4);
}
public void testE11() {
DocumentEx document = (DocumentEx)EditorFactory.getInstance().createDocument(StringUtil.repeatSymbol(' ', 10));
List<RangeMarker> mm =
add(document, 9,9, 7,7, 1,6, 3,7
);
//edit(document, 0,0,0);
delete(mm, 1);
}
public void testE12() {
DocumentEx document = (DocumentEx)EditorFactory.getInstance().createDocument(StringUtil.repeatSymbol(' ', 10));
List<RangeMarker> mm =
add(document, 3,3, 8,8, 5,5, 5,6
);
edit(document, 2,0,2);
delete(mm, 2);
}
public void testE13() {
DocumentEx document = (DocumentEx)EditorFactory.getInstance().createDocument(StringUtil.repeatSymbol(' ', 10));
List<RangeMarker> mm = add(document, 5,9, 9,9, 7,7, 6,8);
edit(document, 2,1,0);
delete(mm, 0, 2);
}
public void testE14() {
DocumentEx document = (DocumentEx)EditorFactory.getInstance().createDocument(StringUtil.repeatSymbol(' ', 100));
List<RangeMarker> mm = add(document, 6,11, 2,13, 17,17, 13,19, 2,3, 9,10, 10,11, 14,14, 1,3, 4,12, 14,15, 3,10, 14,14, 4,4, 4,8, 6,14, 8,16, 2,12, 11,19, 10,13
);
edit(document, 19,0,0, 7,3,0, 16,0,3);
}
public void testE15() {
DocumentEx document = (DocumentEx)EditorFactory.getInstance().createDocument(StringUtil.repeatSymbol(' ', 100));
List<RangeMarker> mm = add(document, 90,93, 0,9, 44,79, 4,48, 44,99, 53,64, 59,82, 12,99, 81,86, 8,40, 24,55, 32,50, 74,79, 14,94, 7,14
);
edit(document, 34,0,4, 99,0,3);
}
public void testE16() {
DocumentEx document = (DocumentEx)EditorFactory.getInstance().createDocument(StringUtil.repeatSymbol(' ', 100));
List<RangeMarker> mm = add(document, 29,63, 47,52, 72,86, 19,86, 13,55, 18,57, 92,95, 83,99, 41,80, 53,85, 10,30, 28,44, 23,32, 70,95, 14,28
);
edit(document, 67,5,0, 1,0,4);
delete(mm, 11);
}
public void testE17() {
DocumentEx document = (DocumentEx)EditorFactory.getInstance().createDocument(StringUtil.repeatSymbol(' ', 100));
List<RangeMarker> mm = add(document, 15,85, 79,88, 90,94, 43,67, 54,89, 81,98, 1,34, 58,93, 22,23, 44,45, 63,84, 45,76, 58,87, 40,59, 5,81, 95,95, 12,61, 52,65, 80,95, 6,16, 7,67, 59,63, 91,96, 99,99, 50,96, 72,78, 78,78, 85,85, 5,51, 90,91
);
edit(document, 20,26,0, 15,0,4, 64,4,0);
}
public void testRandomEdit_NoCommand() {
final int N = 100;
final Random gen = new Random();
int N_TRIES = Timings.adjustAccordingToMySpeed(7000, false);
System.out.println("N_TRIES = " + N_TRIES);
DocumentEx document = null;
for (int tryn=0; tryn < N_TRIES;tryn++) {
((UndoManagerImpl)UndoManager.getInstance(getProject())).flushCurrentCommandMerger();
((UndoManagerImpl)UndoManager.getGlobalInstance()).flushCurrentCommandMerger();
if (document != null) {
((UndoManagerImpl)UndoManager.getInstance(getProject())).clearUndoRedoQueueInTests(document);
((UndoManagerImpl)UndoManager.getGlobalInstance()).clearUndoRedoQueueInTests(document);
}
if (tryn % 10000 == 0) {
System.out.println(tryn);
}
document = (DocumentEx)EditorFactory.getInstance().createDocument(StringUtil.repeatSymbol(' ', N));
final DocumentEx finalDocument = document;
new WriteCommandAction(getProject()) {
@Override
protected void run(@NotNull Result result) throws Exception {
List<Pair<RangeMarker, TextRange>> adds = new ArrayList<Pair<RangeMarker, TextRange>>();
List<Pair<RangeMarker, TextRange>> dels = new ArrayList<Pair<RangeMarker, TextRange>>();
List<Trinity<Integer, Integer, Integer>> edits = new ArrayList<Trinity<Integer, Integer, Integer>>();
try {
for (int i = 0; i < 30; i++) {
int x = gen.nextInt(N);
int y = x + gen.nextInt(N - x);
RangeMarkerEx r = (RangeMarkerEx)finalDocument.createRangeMarker(x, y);
adds.add(Pair.create((RangeMarker)r, TextRange.create(r)));
}
for (int i = 0; i < 10; i++) {
int offset = gen.nextInt(finalDocument.getTextLength());
if (gen.nextBoolean()) {
int length = gen.nextInt(5);
edits.add(Trinity.create(offset, 0, length));
finalDocument.insertString(offset, StringUtil.repeatSymbol(' ', length));
}
else {
int length = gen.nextInt(finalDocument.getTextLength() - offset);
edits.add(Trinity.create(offset, length, 0));
finalDocument.deleteString(offset, offset + length);
}
}
List<Pair<RangeMarker, TextRange>> candidates = new ArrayList<Pair<RangeMarker, TextRange>>(adds);
while (!candidates.isEmpty()) {
int size = candidates.size();
int x = gen.nextInt(size);
Pair<RangeMarker, TextRange> c = candidates.remove(x);
RangeMarkerEx r = (RangeMarkerEx)c.first;
assertEquals(size - 1, candidates.size());
dels.add(c);
r.dispose();
}
}
catch (AssertionError e) {
String s = "adds: ";
for (Pair<RangeMarker, TextRange> c : adds) {
TextRange t = c.second;
s += t.getStartOffset() + "," + t.getEndOffset() + ", ";
}
s += "\nedits: ";
for (Trinity<Integer, Integer, Integer> edit : edits) {
s += edit.first + "," + edit.second + "," + edit.third + ", ";
}
s += "\ndels: ";
for (Pair<RangeMarker, TextRange> c : dels) {
int index = adds.indexOf(c);
assertSame(c, adds.get(index));
s += index + ", ";
}
System.err.println(s);
throw e;
}
}
}.execute();
}
}
private RangeMarkerEx createMarker(String text, final int start, final int end) {
psiFile = createFile("x.txt", text);
return createMarker(psiFile, start, end);
}
private RangeMarkerEx createMarker(PsiFile psiFile, final int start, final int end) {
document = documentManager.getDocument(psiFile);
return (RangeMarkerEx)document.createRangeMarker(start, end);
}
private RangeMarkerEx createMarker(@NonNls String string) {
int start = string.indexOf('[');
assertTrue(start != -1);
string = string.replace("[", "");
int end = string.indexOf(']');
assertTrue(end != -1);
string = string.replace("]", "");
return createMarker(string, start, end);
}
public void testRangeMarkersAreWeakReferenced_NoVerify() throws Exception {
final Document document = EditorFactory.getInstance().createDocument("[xxxxxxxxxxxxxx]");
for (int i = 0; i < 10; i++) {
document.createRangeMarker(0, document.getTextLength());
}
LeakHunter.checkLeak(document, RangeMarker.class);
}
public void testRangeMarkersAreLazyCreated() throws Exception {
final Document document = EditorFactory.getInstance().createDocument("[xxxxxxxxxxxxxx]");
RangeMarker m1 = document.createRangeMarker(2, 4);
RangeMarker m2 = document.createRangeMarker(2, 4);
assertEquals(2, ((DocumentImpl)document).getRangeMarkersSize());
assertEquals(1, ((DocumentImpl)document).getRangeMarkersNodeSize());
RangeMarker m3 = document.createRangeMarker(2, 5);
assertEquals(2, ((DocumentImpl)document).getRangeMarkersNodeSize());
document.deleteString(4,5);
assertTrue(m1.isValid());
assertTrue(m2.isValid());
assertTrue(m3.isValid());
assertEquals(1, ((DocumentImpl)document).getRangeMarkersNodeSize());
m1.setGreedyToLeft(true);
assertTrue(m1.isValid());
assertEquals(3, ((DocumentImpl)document).getRangeMarkersSize());
assertEquals(2, ((DocumentImpl)document).getRangeMarkersNodeSize());
m3.dispose();
assertTrue(m1.isValid());
assertTrue(m2.isValid());
assertFalse(m3.isValid());
assertEquals(2, ((DocumentImpl)document).getRangeMarkersSize());
assertEquals(2, ((DocumentImpl)document).getRangeMarkersNodeSize());
}
public void testRangeHighlightersRecreateBug() throws Exception {
Document document = EditorFactory.getInstance().createDocument("[xxxxxxxxxxxxxx]");
MarkupModel markupModel = DocumentMarkupModel.forDocument(document, ourProject, true);
for (int i=0; i<2; i++) {
RangeMarker m = markupModel.addRangeHighlighter(1, 6, 0, null, HighlighterTargetArea.EXACT_RANGE);
RangeMarker m2 = markupModel.addRangeHighlighter(2, 7, 0, null, HighlighterTargetArea.EXACT_RANGE);
RangeMarker m3 = markupModel.addRangeHighlighter(1, 6, 0, null, HighlighterTargetArea.EXACT_RANGE);
markupModel.removeAllHighlighters();
}
}
public void testValidationBug() throws Exception {
Document document = EditorFactory.getInstance().createDocument("[xxxxxxxxxxxxxx]");
final Editor editor = EditorFactory.getInstance().createEditor(document);
try {
final FoldRegion[] fold = new FoldRegion[1];
editor.getFoldingModel().runBatchFoldingOperation(new Runnable() {
@Override
public void run() {
fold[0] = editor.getFoldingModel().addFoldRegion(0, 2, "");
}
});
RangeMarker marker = document.createRangeMarker(0, 2);
document.deleteString(1,2);
assertTrue(marker.isValid());
//assertFalse(fold[0].isValid());
}
finally {
EditorFactory.getInstance().releaseEditor(editor);
}
}
public void testPersistent() throws Exception {
String text = "xxx\nzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz";
Document document = EditorFactory.getInstance().createDocument(text);
int startOffset = text.indexOf('z');
int endOffset = text.lastIndexOf('z');
RangeMarker marker = document.createRangeMarker(startOffset, endOffset, true);
document.replaceString(startOffset+1, endOffset-1, "ccc");
assertTrue(marker.isValid());
}
public void testPersistentMarkerDoesntImpactNormalMarkers() {
Document doc = new DocumentImpl("text");
RangeMarker normal = doc.createRangeMarker(1, 3);
RangeMarker persistent = doc.createRangeMarker(1, 3, true);
doc.replaceString(0, 4, "before\ntext\nafter");
assertTrue(persistent.isValid());
assertFalse(normal.isValid());
}
public void testMoveTextRetargetsMarkers() throws Exception {
RangeMarkerEx marker1 = createMarker("01234567890", 1, 3);
DocumentEx document = (DocumentEx)marker1.getDocument();
RangeMarker marker2 = document.createRangeMarker(2, 4);
document.moveText(0, 5, 8);
assertEquals("56701234890", document.getText());
assertValidMarker(marker1, 4, 6);
assertValidMarker(marker2, 5, 7);
}
public void testMoveTextToTheBeginningRetargetsMarkers() throws Exception {
RangeMarkerEx marker1 = createMarker("01234567890", 5, 5);
DocumentEx document = (DocumentEx)marker1.getDocument();
RangeMarker marker2 = document.createRangeMarker(5, 7);
document.moveText(4, 7, 1);
assertEquals("04561237890", document.getText());
assertValidMarker(marker1, 2, 2);
assertValidMarker(marker2, 2, 4);
}
public void testRangeHighlighterDisposeVsRemoveAllConflict() throws Exception {
Document document = EditorFactory.getInstance().createDocument("[xxxxxxxxxxxxxx]");
MarkupModel markupModel = DocumentMarkupModel.forDocument(document, ourProject, true);
RangeMarker m = markupModel.addRangeHighlighter(1, 6, 0, null, HighlighterTargetArea.EXACT_RANGE);
assertTrue(m.isValid());
markupModel.removeAllHighlighters();
assertFalse(m.isValid());
assertEmpty(markupModel.getAllHighlighters());
m.dispose();
assertFalse(m.isValid());
}
public void testRangeHighlighterLinesInRangeForLongLinePerformance() throws Exception {
final int N = 50000;
Document document = EditorFactory.getInstance().createDocument(StringUtil.repeatSymbol('x', 2 * N));
final MarkupModelEx markupModel = (MarkupModelEx)DocumentMarkupModel.forDocument(document, ourProject, true);
for (int i=0; i<N-1;i++) {
markupModel.addRangeHighlighter(2*i, 2*i+1, 0, null, HighlighterTargetArea.EXACT_RANGE);
}
markupModel.addRangeHighlighter(N / 2, N / 2 + 1, 0, null, HighlighterTargetArea.LINES_IN_RANGE);
PlatformTestUtil.startPerformanceTest("slow highlighters lookup", (int)(N*Math.log(N)/1000), new ThrowableRunnable() {
@Override
public void run() {
List<RangeHighlighterEx> list = new ArrayList<RangeHighlighterEx>();
CommonProcessors.CollectProcessor<RangeHighlighterEx> coll = new CommonProcessors.CollectProcessor<RangeHighlighterEx>(list);
for (int i=0; i<N-1;i++) {
list.clear();
markupModel.processRangeHighlightersOverlappingWith(2*i, 2*i+1, coll);
assertEquals(2, list.size()); // 1 line plus one exact range marker
}
}
}).assertTiming();
}
public void testRangeHighlighterIteratorOrder() throws Exception {
Document document = EditorFactory.getInstance().createDocument("1234567890");
final MarkupModelEx markupModel = (MarkupModelEx)DocumentMarkupModel.forDocument(document, ourProject, true);
RangeHighlighter exact = markupModel.addRangeHighlighter(3, 6, 0, null, HighlighterTargetArea.EXACT_RANGE);
RangeHighlighter line = markupModel.addRangeHighlighter(4, 5, 0, null, HighlighterTargetArea.LINES_IN_RANGE);
List<RangeHighlighter> list = new ArrayList<RangeHighlighter>();
markupModel.processRangeHighlightersOverlappingWith(2, 9, new CommonProcessors.CollectProcessor<RangeHighlighter>(list));
assertEquals(Arrays.asList(line, exact), list);
}
public void testLazyRangeMarkers() {
psiFile = createFile("x.txt", "xxx");
LazyRangeMarkerFactoryImpl factory = (LazyRangeMarkerFactoryImpl)LazyRangeMarkerFactory.getInstance(getProject());
VirtualFile virtualFile = psiFile.getVirtualFile();
LazyRangeMarkerFactoryImpl.LazyMarker marker = (LazyRangeMarkerFactoryImpl.LazyMarker)factory.createRangeMarker(virtualFile, 0);
WeakList<LazyRangeMarkerFactoryImpl.LazyMarker> markers = LazyRangeMarkerFactoryImpl.getMarkers(virtualFile);
assertSame(marker, assertOneElement(markers));
assertFalse(marker.isDelegated());
assertTrue(marker.isValid());
assertEquals(0, marker.getStartOffset());
assertFalse(marker.isDelegated());
marker.dispose();
assertFalse(marker.isValid());
assertEmpty(LazyRangeMarkerFactoryImpl.getMarkers(virtualFile));
marker = (LazyRangeMarkerFactoryImpl.LazyMarker)factory.createRangeMarker(virtualFile, 0);
assertFalse(marker.isDelegated());
assertTrue(marker.isValid());
assertEquals(0, marker.getStartOffset());
assertFalse(marker.isDelegated());
Document document = marker.getDocument();
document.insertString(2, "yyy");
assertTrue(marker.isDelegated());
assertTrue(marker.isValid());
assertEquals(0, marker.getStartOffset());
assertEmpty(LazyRangeMarkerFactoryImpl.getMarkers(virtualFile));
marker.dispose();
assertEmpty(LazyRangeMarkerFactoryImpl.getMarkers(virtualFile));
}
}
| |
/* ========================================================================= *
* Boarder *
* http://boarder.mikuz.org/ *
* ========================================================================= *
* Copyright (C) 2013 Boarder *
* *
* Licensed under the Apache License, Version 2.0 (the "License"); *
* you may not use this file except in compliance with the License. *
* You may obtain a copy of the License at *
* *
* http://www.apache.org/licenses/LICENSE-2.0 *
* *
* Unless required by applicable law or agreed to in writing, software *
* distributed under the License is distributed on an "AS IS" BASIS, *
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. *
* See the License for the specific language governing permissions and *
* limitations under the License. *
* ========================================================================= */
package fi.mikuz.boarder.util.editor;
import java.io.File;
import java.io.IOException;
import java.util.List;
import java.util.ListIterator;
import android.content.Context;
import android.util.Log;
import fi.mikuz.boarder.component.soundboard.GraphicalSound;
import fi.mikuz.boarder.component.soundboard.GraphicalSoundboard;
import fi.mikuz.boarder.component.soundboard.GraphicalSoundboardHolder;
import fi.mikuz.boarder.util.FileProcessor;
/**
* Provides the soundboard editor a relevant soundboard from soundboard holder.
*/
public class GraphicalSoundboardProvider {
public static final String TAG = GraphicalSoundboardProvider.class.getSimpleName();
public enum OverridePage {OVERRIDE_CURRENT, OVERRIDE_NEW, NO_OVERRIDE}
GraphicalSoundboardHolder boardHolder;
/**
* Creates new provider with specified orientation
*
* @param orientation
*/
public GraphicalSoundboardProvider(int orientation) {
boardHolder = new GraphicalSoundboardHolder();
setOrientationMode(orientation);
GraphicalSoundboard initialGsb = new GraphicalSoundboard();
boardHolder.allocateBoardResources(initialGsb);
}
/**
* Loads provider for boardName
*
* @param boardName
*/
public GraphicalSoundboardProvider(String boardName) {
try {
boardHolder = FileProcessor.loadGraphicalSoundboardHolder(boardName, true);
} catch (IOException e) {
Log.w(TAG, "Unable to load board holder", e);
}
}
public GraphicalSoundboardHolder.OrientationMode getOrientationMode() {
return boardHolder.getOrientationMode();
}
public void setOrientationMode(GraphicalSoundboardHolder.OrientationMode orientationMode) {
boardHolder.setOrientationMode(orientationMode);
}
public void setOrientationMode(int screenOrientation) {
GraphicalSoundboardHolder.OrientationMode orientationMode = screenOrientationToOrientationMode(screenOrientation);
setOrientationMode(orientationMode);
}
public GraphicalSoundboard addBoardPage(int preferredOrientation) {
return addBoard(preferredOrientation);
}
private GraphicalSoundboard addBoard(int preferredOrientation) {
GraphicalSoundboard gsbTemplate = new GraphicalSoundboard(preferredOrientation);
GraphicalSoundboard gsb = boardHolder.allocateBoardResources(gsbTemplate);
return gsb;
}
public GraphicalSoundboard getPage(Context context, int orientation, int pageNumber) {
for (GraphicalSoundboard gsb : boardHolder.getBoardList()) {
if (gsb.getScreenOrientation() == orientation &&
gsb.getPageNumber() == pageNumber) {
return GraphicalSoundboard.copy(context, gsb);
}
}
return null;
}
public void deletePage(Context context, GraphicalSoundboard deleteGsb) {
Log.v(TAG, "Going to delete page " + deleteGsb.getPageNumber());
deleteBoardId(deleteGsb.getId());
Log.v(TAG, "Reducing following page numbers.");
for (GraphicalSoundboard gsb : boardHolder.getBoardList()) {
if (gsb.getScreenOrientation() == deleteGsb.getScreenOrientation() &&
gsb.getPageNumber() > deleteGsb.getPageNumber()) {
gsb.setPageNumber(gsb.getPageNumber() - 1);
overrideBoard(context, gsb);
}
}
}
private GraphicalSoundboardHolder.OrientationMode screenOrientationToOrientationMode(int screenOrientation) {
if (screenOrientation == GraphicalSoundboard.SCREEN_ORIENTATION_PORTRAIT) {
return GraphicalSoundboardHolder.OrientationMode.ORIENTATION_MODE_PORTRAIT;
} else if (screenOrientation == GraphicalSoundboard.SCREEN_ORIENTATION_LANDSCAPE) {
return GraphicalSoundboardHolder.OrientationMode.ORIENTATION_MODE_LANDSCAPE;
}
return null;
}
public void saveBoard(Context context, String boardName) throws IOException {
// Since file paths may be altered while saving we need a separate cop for saving
GraphicalSoundboardHolder savedHolder = GraphicalSoundboardHolder.copy(context, boardHolder);
FileProcessor.saveGraphicalSoundboardHolder(boardName, savedHolder);
}
public void overrideBoard(Context context, GraphicalSoundboard tempGsb) {
GraphicalSoundboard gsb = GraphicalSoundboard.copy(context, tempGsb);
GraphicalSoundboard.unloadImages(gsb);
List<GraphicalSoundboard> boardList = boardHolder.getBoardList();
for (int i = 0; i < boardList.size(); i++) {
GraphicalSoundboard existingGsb = boardList.get(i);
if (gsb.getId() == existingGsb.getId()) {
boardList.set(i, gsb);
break;
}
}
}
public boolean boardWithOrientationExists(final int screenOrientation) {
for (GraphicalSoundboard gsb : boardHolder.getBoardList()) {
if (gsb.getScreenOrientation() == screenOrientation) return true;
}
return false;
}
public void deletePagesWithOrientation(int orientation) {
List<GraphicalSoundboard> gsbList = boardHolder.getBoardList();
ListIterator<GraphicalSoundboard> iterator = gsbList.listIterator();
while (iterator.hasNext()) {
GraphicalSoundboard gsb = iterator.next();
if (gsb.getScreenOrientation() == orientation) {
Log.v(TAG, "Deleting board id " + gsb.getId() + " since its orientation is " + orientation);
iterator.remove();
}
}
}
public void deleteBoardId(int boardId) {
List<GraphicalSoundboard> gsbList = boardHolder.getBoardList();
ListIterator<GraphicalSoundboard> iterator = gsbList.listIterator();
while (iterator.hasNext()) {
GraphicalSoundboard gsb = iterator.next();
if (gsb.getId() == boardId) {
Log.v(TAG, "Deleting board id " + gsb.getId());
iterator.remove();
break;
}
}
}
public boolean boardUsesFile(File file) {
for (GraphicalSoundboard gsb : boardHolder.getBoardList()) {
try {
if (file.getName().equals(gsb.getBackgroundImagePath().getName())) return true;
} catch (NullPointerException e) {}
for (GraphicalSound sound : gsb.getSoundList()) {
try {
if (sound.getPath().getAbsolutePath().equals(file.getAbsolutePath())) return true;
} catch (NullPointerException e) {}
try {
if (sound.getImagePath().getAbsolutePath().equals(file.getAbsolutePath())) return true;
} catch (NullPointerException e) {}
try {
if (sound.getActiveImagePath().getAbsolutePath().equals(file.getAbsolutePath())) return true;
} catch (NullPointerException e) {}
}
}
return false;
}
public List<GraphicalSoundboard> getBoardList() {
return boardHolder.getBoardList();
}
public boolean isPaginationSynchronizedBetweenOrientations() {
return boardHolder.isPaginationSynchronizedBetweenOrientations();
}
public void setPaginationSynchronizedBetweenOrientations(boolean paginationSynchronizedBetweenOrientations) {
boardHolder.setPaginationSynchronizedBetweenOrientations(paginationSynchronizedBetweenOrientations);
}
}
| |
/*
* Copyright 2015 Qianqian Zhu <zhuqianqian.299@gmail.com> All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.z299studio.pb;
import android.content.Context;
import android.content.res.Configuration;
import android.content.res.Resources;
import android.os.Bundle;
import androidx.annotation.NonNull;
import androidx.fragment.app.Fragment;
import androidx.core.view.GravityCompat;
import androidx.drawerlayout.widget.DrawerLayout;
import androidx.appcompat.app.ActionBar;
import androidx.appcompat.app.AppCompatActivity;
import androidx.appcompat.app.ActionBarDrawerToggle;
import android.view.LayoutInflater;
import android.view.Menu;
import android.view.MenuInflater;
import android.view.MenuItem;
import android.view.View;
import android.view.ViewGroup;
import android.widget.AdapterView;
import android.widget.ListView;
import java.util.ArrayList;
import java.util.Hashtable;
import com.z299studio.pb.NavigationDrawerAdapter.NavMenuItem;
public class NavigationDrawerFragment extends Fragment implements
AdapterView.OnItemClickListener{
private final String SELECTION_KEY = "current_selection";
public interface NavigationDrawerCallbacks {
void onNavigationDrawerItemSelected(int type, int id);
}
private NavigationDrawerCallbacks mCallback;
private DrawerLayout mDrawerLayout;
private ListView mMenuList;
private View mFragmentContainerView;
public ActionBarDrawerToggle mDrawerToggle;
private NavigationDrawerAdapter mAdapter;
private int mCategory;
private Hashtable<Integer, Integer> mCategory2Navigation;
private boolean mDrawerHidden;
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
if(savedInstanceState != null) {
mCategory = savedInstanceState.getInt(SELECTION_KEY);
} else {
mCategory = AccountManager.ALL_CATEGORY_ID;
}
mCategory2Navigation = new Hashtable<>();
setHasOptionsMenu(true);
}
@Override
public View onCreateView(@NonNull LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
if(Application.getInstance() == null
|| Application.getInstance().getAccountManager() == null) {
return null;
}
mDrawerHidden = getResources().getBoolean(R.bool.hide_drawer);
mMenuList = (ListView)inflater.inflate(R.layout.fragment_navigation_drawer,
container, false);
mAdapter = new NavigationDrawerAdapter(getActivity(), buildMenuItems());
mMenuList.setAdapter(mAdapter);
int position = mCategory2Navigation.get(mCategory);
mAdapter.selectItem(null, position);
mMenuList.setOnItemClickListener(this);
mMenuList.setItemChecked(position, true);
return mMenuList;
}
@Override
public void onSaveInstanceState(@NonNull Bundle outState) {
outState.putInt(SELECTION_KEY, mCategory);
super.onSaveInstanceState(outState);
}
@Override
public void onResume() {
super.onResume();
if(Application.getInstance().queryChange(Application.DATA_OTHER)||
Application.getInstance().queryChange(Application.DATA_ALL)) {
mAdapter.setList(buildMenuItems());
mAdapter.notifyDataSetChanged();
select(AccountManager.ALL_CATEGORY_ID);
}
}
@Override
public void onAttach(Context context) {
super.onAttach(context);
try {
mCallback = (NavigationDrawerCallbacks) context;
} catch (ClassCastException e) {
throw new ClassCastException("Activity must implement NavigationDrawerCallbacks.");
}
}
@Override
public void onDetach() {
super.onDetach();
mCallback = null;
}
@Override
public void onConfigurationChanged(Configuration newConfig) {
super.onConfigurationChanged(newConfig);
if (mDrawerToggle != null) {
mDrawerToggle.onConfigurationChanged(newConfig);
}
}
@Override
public void onCreateOptionsMenu(Menu menu, MenuInflater inflater) {
super.onCreateOptionsMenu(menu, inflater);
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
if(mDrawerToggle != null) {
return mDrawerToggle.onOptionsItemSelected(item);
}
return super.onOptionsItemSelected(item);
}
@Override
public void onItemClick(AdapterView<?> parent, View view, int position, long id) {
NavMenuItem item = null;
if(id == mCategory) {
return;
}
if(mMenuList!=null) {
item = (NavMenuItem)mMenuList.getItemAtPosition(position);
if(item.mType == NavMenuItem.MENU_SELECTION) {
mAdapter.selectItem(view, position);
mCategory = (int)id;
}
mMenuList.setItemChecked(mCategory2Navigation.get(mCategory), true);
}
if (mDrawerLayout != null) {
mDrawerLayout.closeDrawer(mFragmentContainerView);
}
if (mCallback != null && item !=null) {
mCallback.onNavigationDrawerItemSelected(item.mType, item.mId);
}
}
public void setUp(int fragmentId, DrawerLayout drawerLayout) {
mFragmentContainerView = getActivity() == null ? null : getActivity().findViewById(fragmentId);
mDrawerLayout = drawerLayout;
if (mDrawerLayout != null) {
mDrawerLayout.setDrawerShadow(R.drawable.drawer_shadow, GravityCompat.START);
if (getActivity() != null) {
ActionBar actionBar = ((AppCompatActivity) getActivity()).getSupportActionBar();
if (actionBar != null) {
actionBar.setDisplayHomeAsUpEnabled(true);
}
}
mDrawerToggle = new ActionBarDrawerToggle(getActivity(), mDrawerLayout,
R.string.navigation_drawer_open, R.string.navigation_drawer_close ) {
@Override
public void onDrawerClosed(View drawerView) {
super.onDrawerClosed(drawerView);
if (!isAdded()) {
return;
}
getActivity().invalidateOptionsMenu();
}
@Override
public void onDrawerOpened(View drawerView) {
super.onDrawerOpened(drawerView);
if (!isAdded()) {
return;
}
getActivity().invalidateOptionsMenu();
}
};
mDrawerLayout.post(() -> mDrawerToggle.syncState());
mDrawerLayout.addDrawerListener(mDrawerToggle);
}
}
private ArrayList<NavMenuItem> buildMenuItems() {
Application app = Application.getInstance();
Resources r = getResources();
AccountManager am = app.getAccountManager();
ArrayList<NavMenuItem> result = new ArrayList<>();
int icons[] = Application.getThemedIcons();
String[] categoryNames = app.getSortedCategoryNames();
int[] categoryIcons = app.getSortedCategoryIcons();
int[] categoryIds = app.getSortedCategoryIds();
int pos = 0, i;
if(mDrawerHidden) {
result.add(new NavMenuItem(0, r.getString(R.string.app_name),
0, 0, NavMenuItem.MENU_HEADER));
pos++;
}
result.add(new NavMenuItem(R.drawable.pb_all, r.getString(R.string.all_accounts),
am.getAccountsCountByCategory(AccountManager.ALL_CATEGORY_ID),
AccountManager.ALL_CATEGORY_ID, NavMenuItem.MENU_SELECTION));
mCategory2Navigation.put(AccountManager.ALL_CATEGORY_ID, pos++);
if(Application.Options.mShowOther) {
result.add(new NavMenuItem(R.drawable.pb_unknown, r.getString(R.string.def_category),
am.getAccountsCountByCategory(AccountManager.DEFAULT_CATEGORY_ID),
AccountManager.DEFAULT_CATEGORY_ID, NavMenuItem.MENU_SELECTION));
mCategory2Navigation.put(AccountManager.DEFAULT_CATEGORY_ID, pos++);
}
for(i = 1; i < categoryIcons.length; ++i) {
result.add(new NavMenuItem(icons[categoryIcons[i]], categoryNames[i],
am.getAccountsCountByCategory(categoryIds[i]), categoryIds[i],
NavMenuItem.MENU_SELECTION));
mCategory2Navigation.put(categoryIds[i], pos++);
}
if(mDrawerHidden) {
result.add(new NavMenuItem(0, null, 0, 0,
NavMenuItem.MENU_SEPARATOR));
int stringIds[] = {R.string.help, R.string.rate, R.string.settings};
int iconIds[] = {R.drawable.ic_action_help, R.drawable.ic_rate_review,
R.drawable.ic_action_settings};
for(i = 0; i < stringIds.length; ++i) {
result.add(new NavMenuItem(iconIds[i], r.getString(stringIds[i]),
0, stringIds[i], NavMenuItem.MENU_ACTION));
}
}
return result;
}
public void remove(int category) {
if(category < 0) {
mAdapter.setList(buildMenuItems());
mAdapter.notifyDataSetChanged();
}
else {
Integer pos = mCategory2Navigation.get(category);
if(pos!=null) {
mAdapter.remove(pos);
for(int p = pos; p < mAdapter.getCount(); ++p) {
NavMenuItem nmi = (NavMenuItem) mAdapter.getItem(p);
if(nmi.mType == NavMenuItem.MENU_SELECTION) {
mCategory2Navigation.put(nmi.mId, p);
}
}
}
}
}
public void select(int category) {
Integer pos = mCategory2Navigation.get(category);
if(pos!=null) {
onItemClick(mMenuList, null, pos, category);
}
}
public void increaseCounterInMenu(int category, int delta) {
Integer pos = mCategory2Navigation.get(category);
if(pos != null) {
int firstVisiblePosition = mMenuList.getFirstVisiblePosition();
View view = mMenuList.getChildAt(pos - firstVisiblePosition);
mAdapter.increaseCounterInMenu(view, pos, delta);
}
}
public void refreshCategoryCounters() {
int[] cateIds = Application.getInstance().getSortedCategoryIds();
AccountManager am = Application.getInstance().getAccountManager();
Integer pos;
for(int id : cateIds) {
pos = mCategory2Navigation.get(id);
if(pos!=null) {
mAdapter.updateCategoryCounter(null, pos, am.getAccountsCountByCategory(id));
}
}
pos = mCategory2Navigation.get(AccountManager.ALL_CATEGORY_ID);
if(pos!=null) {
mAdapter.updateCategoryCounter(null, pos,
am.getAccountsCountByCategory(AccountManager.ALL_CATEGORY_ID));
}
mAdapter.notifyDataSetChanged();
}
public int getCount(int categoryId){
Integer pos = mCategory2Navigation.get(categoryId);
if(pos!=null) {
return mAdapter.getCounterInMenu(pos);
}
return 0;
}
public void lockDrawer(boolean lock) {
if(mDrawerLayout!=null) {
mDrawerLayout.setDrawerLockMode(lock ?
DrawerLayout.LOCK_MODE_LOCKED_CLOSED : DrawerLayout.LOCK_MODE_UNLOCKED);
}
else {
mMenuList.setEnabled(!lock);
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.phoenix.end2end;
import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.SYSTEM_CATALOG_SCHEMA;
import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.SYSTEM_FUNCTION_TABLE;
import static org.apache.phoenix.query.QueryServices.DYNAMIC_JARS_DIR_KEY;
import static org.apache.phoenix.util.PhoenixRuntime.JDBC_PROTOCOL;
import static org.apache.phoenix.util.PhoenixRuntime.JDBC_PROTOCOL_SEPARATOR;
import static org.apache.phoenix.util.PhoenixRuntime.JDBC_PROTOCOL_TERMINATOR;
import static org.apache.phoenix.util.PhoenixRuntime.PHOENIX_TEST_DRIVER_URL_PARAM;
import static org.apache.phoenix.util.TestUtil.JOIN_ITEM_TABLE_FULL_NAME;
import static org.apache.phoenix.util.TestUtil.JOIN_SUPPLIER_TABLE_FULL_NAME;
import static org.apache.phoenix.util.TestUtil.LOCALHOST;
import static org.junit.Assert.*;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.Statement;
import java.util.HashSet;
import java.util.Map;
import java.util.Properties;
import java.util.Set;
import java.util.jar.Attributes;
import java.util.jar.JarEntry;
import java.util.jar.JarOutputStream;
import java.util.jar.Manifest;
import javax.tools.JavaCompiler;
import javax.tools.ToolProvider;
import org.apache.commons.lang3.exception.ExceptionUtils;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.phoenix.expression.function.UDFExpression;
import org.apache.phoenix.jdbc.PhoenixTestDriver;
import org.apache.phoenix.query.QueryServices;
import org.apache.phoenix.schema.FunctionAlreadyExistsException;
import org.apache.phoenix.schema.FunctionNotFoundException;
import org.apache.phoenix.schema.ValueRangeExcpetion;
import org.apache.phoenix.util.PhoenixRuntime;
import org.apache.phoenix.util.QueryUtil;
import org.apache.phoenix.util.ReadOnlyProps;
import org.junit.After;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Rule;
import org.junit.Test;
import com.google.common.collect.Maps;
import org.junit.rules.TestName;
public class UserDefinedFunctionsIT extends BaseOwnClusterIT {
protected static final String TENANT_ID = "ZZTop";
private static String url;
private static PhoenixTestDriver driver;
private static HBaseTestingUtility util;
private static String STRING_REVERSE_EVALUATE_METHOD =
new StringBuffer()
.append(" public boolean evaluate(Tuple tuple, ImmutableBytesWritable ptr) {\n")
.append(" Expression arg = getChildren().get(0);\n")
.append(" if (!arg.evaluate(tuple, ptr)) {\n")
.append(" return false;\n")
.append(" }\n")
.append(" int targetOffset = ptr.getLength();\n")
.append(" if (targetOffset == 0) {\n")
.append(" return true;\n")
.append(" }\n")
.append(" byte[] source = ptr.get();\n")
.append(" byte[] target = new byte[targetOffset];\n")
.append(" int sourceOffset = ptr.getOffset(); \n")
.append(" int endOffset = sourceOffset + ptr.getLength();\n")
.append(" SortOrder sortOrder = arg.getSortOrder();\n")
.append(" while (sourceOffset < endOffset) {\n")
.append(" int nBytes = StringUtil.getBytesInChar(source[sourceOffset], sortOrder);\n")
.append(" targetOffset -= nBytes;\n")
.append(" System.arraycopy(source, sourceOffset, target, targetOffset, nBytes);\n")
.append(" sourceOffset += nBytes;\n")
.append(" }\n")
.append(" ptr.set(target);\n")
.append(" return true;\n")
.append(" }\n").toString();
private static String SUM_COLUMN_VALUES_EVALUATE_METHOD =
new StringBuffer()
.append(" public boolean evaluate(Tuple tuple, ImmutableBytesWritable ptr) {\n")
.append(" int[] array = new int[getChildren().size()];\n")
.append(" int i = 0;\n")
.append(" for(Expression child:getChildren()) {\n")
.append(" if (!child.evaluate(tuple, ptr)) {\n")
.append(" return false;\n")
.append(" }\n")
.append(" int targetOffset = ptr.getLength();\n")
.append(" if (targetOffset == 0) {\n")
.append(" return true;\n")
.append(" }\n")
.append(" array[i++] = (Integer) PInteger.INSTANCE.toObject(ptr);\n")
.append(" }\n")
.append(" int sum = 0;\n")
.append(" for(i=0;i<getChildren().size();i++) {\n")
.append(" sum+=array[i];\n")
.append(" }\n")
.append(" ptr.set(PInteger.INSTANCE.toBytes((Integer)sum));\n")
.append(" return true;\n")
.append(" }\n").toString();
private static String ARRAY_INDEX_EVALUATE_METHOD =
new StringBuffer()
.append(" public boolean evaluate(Tuple tuple, ImmutableBytesWritable ptr) {\n")
.append(" Expression indexExpr = children.get(1);\n")
.append(" if (!indexExpr.evaluate(tuple, ptr)) {\n")
.append(" return false;\n")
.append(" } else if (ptr.getLength() == 0) {\n")
.append(" return true;\n")
.append(" }\n")
.append(" // Use Codec to prevent Integer object allocation\n")
.append(" int index = PInteger.INSTANCE.getCodec().decodeInt(ptr, indexExpr.getSortOrder());\n")
.append(" if(index < 0) {\n")
.append(" throw new ParseException(\"Index cannot be negative :\" + index);\n")
.append(" }\n")
.append(" Expression arrayExpr = children.get(0);\n")
.append(" return PArrayDataTypeDecoder.positionAtArrayElement(tuple, ptr, index, arrayExpr, getDataType(),getMaxLength());\n")
.append(" }\n").toString();
private static String GETY_EVALUATE_METHOD =
new StringBuffer()
.append(" public boolean evaluate(Tuple tuple, ImmutableBytesWritable ptr) {\n")
.append(" Expression arg = getChildren().get(0);\n")
.append(" if (!arg.evaluate(tuple, ptr)) {\n")
.append(" return false;\n")
.append(" }\n")
.append(" int targetOffset = ptr.getLength();\n")
.append(" if (targetOffset == 0) {\n")
.append(" return true;\n")
.append(" }\n")
.append(" byte[] s = ptr.get();\n")
.append(" int retVal = (int)Bytes.toShort(s);\n")
.append(" ptr.set(PInteger.INSTANCE.toBytes(retVal));\n")
.append(" return true;\n")
.append(" }\n").toString();
private static String GETX_EVALUATE_METHOD =
new StringBuffer()
.append(" public boolean evaluate(Tuple tuple, ImmutableBytesWritable ptr) {\n")
.append(" Expression arg = getChildren().get(0);\n")
.append(" if (!arg.evaluate(tuple, ptr)) {\n")
.append(" return false;\n")
.append(" }\n")
.append(" int targetOffset = ptr.getLength();\n")
.append(" if (targetOffset == 0) {\n")
.append(" return true;\n")
.append(" }\n")
.append(" byte[] s = ptr.get();\n")
.append(" Long retVal = Long.reverseBytes(Bytes.toLong(s));\n")
.append(" ptr.set(PLong.INSTANCE.toBytes(retVal));\n")
.append(" return true;\n")
.append(" }\n").toString();
private static String MY_REVERSE_CLASS_NAME = "MyReverse";
private static String MY_SUM_CLASS_NAME = "MySum";
private static String MY_ARRAY_INDEX_CLASS_NAME = "MyArrayIndex";
private static String GETX_CLASSNAME = "GetX";
private static String GETY_CLASSNAME = "GetY";
private static String MY_REVERSE_PROGRAM = getProgram(MY_REVERSE_CLASS_NAME, STRING_REVERSE_EVALUATE_METHOD, "return PVarchar.INSTANCE;");
private static String MY_SUM_PROGRAM = getProgram(MY_SUM_CLASS_NAME, SUM_COLUMN_VALUES_EVALUATE_METHOD, "return PInteger.INSTANCE;");
private static String MY_ARRAY_INDEX_PROGRAM = getProgram(MY_ARRAY_INDEX_CLASS_NAME, ARRAY_INDEX_EVALUATE_METHOD, "return PDataType.fromTypeId(children.get(0).getDataType().getSqlType()- PDataType.ARRAY_TYPE_BASE);");
private static String GETX_CLASSNAME_PROGRAM = getProgram(GETX_CLASSNAME, GETX_EVALUATE_METHOD, "return PLong.INSTANCE;");
private static String GETY_CLASSNAME_PROGRAM = getProgram(GETY_CLASSNAME, GETY_EVALUATE_METHOD, "return PInteger.INSTANCE;");
private static Properties EMPTY_PROPS = new Properties();
@Rule
public TestName name = new TestName();
@Override
@After
public void cleanUpAfterTest() throws Exception {
Connection conn = driver.connect(url, EMPTY_PROPS);
Statement stmt = conn.createStatement();
stmt.execute("delete jar '"+ util.getConfiguration().get(QueryServices.DYNAMIC_JARS_DIR_KEY)+"/"+"myjar1.jar'");
stmt.execute("delete jar '"+ util.getConfiguration().get(QueryServices.DYNAMIC_JARS_DIR_KEY)+"/"+"myjar2.jar'");
stmt.execute("delete jar '"+ util.getConfiguration().get(QueryServices.DYNAMIC_JARS_DIR_KEY)+"/"+"myjar3.jar'");
stmt.execute("delete jar '"+ util.getConfiguration().get(QueryServices.DYNAMIC_JARS_DIR_KEY)+"/"+"myjar4.jar'");
stmt.execute("delete jar '"+ util.getConfiguration().get(QueryServices.DYNAMIC_JARS_DIR_KEY)+"/"+"myjar5.jar'");
stmt.execute("delete jar '"+ util.getConfiguration().get(QueryServices.DYNAMIC_JARS_DIR_KEY)+"/"+"myjar6.jar'");
stmt.execute("delete jar '"+ util.getConfiguration().get(QueryServices.DYNAMIC_JARS_DIR_KEY)+"/"+"myjar7.jar'");
stmt.execute("delete jar '"+ util.getConfiguration().get(QueryServices.DYNAMIC_JARS_DIR_KEY)+"/"+"myjar8.jar'");
conn.commit();
conn.close();
}
@Before
public void doSetupBeforeTest() throws Exception {
compileTestClass(MY_REVERSE_CLASS_NAME, MY_REVERSE_PROGRAM, 1);
compileTestClass(MY_SUM_CLASS_NAME, MY_SUM_PROGRAM, 2);
compileTestClass(MY_ARRAY_INDEX_CLASS_NAME, MY_ARRAY_INDEX_PROGRAM, 3);
compileTestClass(MY_ARRAY_INDEX_CLASS_NAME, MY_ARRAY_INDEX_PROGRAM, 4);
compileTestClass(GETX_CLASSNAME, GETX_CLASSNAME_PROGRAM, 5);
compileTestClass(GETY_CLASSNAME, GETY_CLASSNAME_PROGRAM, 6);
}
private static String getProgram(String className, String evaluateMethod, String returnType) {
return new StringBuffer()
.append("package org.apache.phoenix.end2end;\n")
.append("import java.sql.SQLException;\n")
.append("import java.util.List;\n")
.append("import java.lang.Long;\n")
.append("import java.lang.Integer;\n")
.append("import org.apache.hadoop.hbase.io.ImmutableBytesWritable;\n")
.append("import org.apache.hadoop.hbase.util.Bytes;\n")
.append("import org.apache.phoenix.schema.types.PLong;")
.append("import org.apache.phoenix.schema.types.PInteger;"
+ ""
+ "")
.append("import org.apache.phoenix.expression.Expression;\n")
.append("import org.apache.phoenix.expression.function.ScalarFunction;\n")
.append("import org.apache.phoenix.schema.SortOrder;\n")
.append("import org.apache.phoenix.schema.tuple.Tuple;\n")
.append("import org.apache.phoenix.schema.types.PDataType;\n")
.append("import org.apache.phoenix.schema.types.PInteger;\n")
.append("import org.apache.phoenix.schema.types.PVarchar;\n")
.append("import org.apache.phoenix.util.StringUtil;\n")
.append("import org.apache.phoenix.schema.types.PArrayDataType;\n")
.append("import org.apache.phoenix.schema.types.PArrayDataTypeDecoder;\n")
.append("import org.apache.phoenix.parse.ParseException;\n")
.append("public class "+className+" extends ScalarFunction{\n")
.append(" public static final String NAME = \""+className+"\";\n")
.append(" public "+className+"() {\n")
.append(" }\n")
.append(" public "+className+"(List<Expression> children) throws SQLException {\n")
.append(" super(children);\n")
.append(" }\n")
.append(" @Override\n")
.append(evaluateMethod)
.append(" @Override\n")
.append(" public SortOrder getSortOrder() {\n")
.append(" return getChildren().get(0).getSortOrder();\n")
.append(" }\n")
.append(" @Override\n")
.append(" public PDataType getDataType() {\n")
.append(returnType+"\n")
.append(" }\n")
.append(" @Override\n")
.append(" public String getName() {\n")
.append(" return NAME;\n")
.append(" }\n")
.append("}\n").toString();
}
@BeforeClass
public static void doSetup() throws Exception {
Configuration conf = HBaseConfiguration.create();
setUpConfigForMiniCluster(conf);
util = new HBaseTestingUtility(conf);
util.startMiniDFSCluster(1);
util.startMiniZKCluster(1);
String string = util.getConfiguration().get("fs.defaultFS");
// PHOENIX-4675 setting the trailing slash implicitly tests that we're doing some path normalization
conf.set(DYNAMIC_JARS_DIR_KEY, string+"/hbase/tmpjars/");
util.startMiniHBaseCluster(1, 1);
UDFExpression.setConfig(conf);
String clientPort = util.getConfiguration().get(QueryServices.ZOOKEEPER_PORT_ATTRIB);
url =
JDBC_PROTOCOL + JDBC_PROTOCOL_SEPARATOR + LOCALHOST + JDBC_PROTOCOL_SEPARATOR
+ clientPort + JDBC_PROTOCOL_TERMINATOR + PHOENIX_TEST_DRIVER_URL_PARAM;
Map<String, String> props = Maps.newHashMapWithExpectedSize(1);
props.put(QueryServices.ALLOW_USER_DEFINED_FUNCTIONS_ATTRIB, "true");
props.put(QueryServices.DYNAMIC_JARS_DIR_KEY,string+"/hbase/tmpjars/");
driver = initAndRegisterTestDriver(url, new ReadOnlyProps(props.entrySet().iterator()));
}
@Test
public void testListJars() throws Exception {
Connection conn = driver.connect(url, EMPTY_PROPS);
Path jarPath = new Path(util.getConfiguration().get(QueryServices.DYNAMIC_JARS_DIR_KEY));
Statement stmt = conn.createStatement();
ResultSet rs = stmt.executeQuery("list jars");
assertTrue(rs.next());
assertEquals(new Path(jarPath, "myjar1.jar").toString(), rs.getString("jar_location"));
assertTrue(rs.next());
assertEquals(new Path(jarPath, "myjar2.jar").toString(), rs.getString("jar_location"));
assertTrue(rs.next());
assertEquals(new Path(jarPath, "myjar3.jar").toString(), rs.getString("jar_location"));
assertTrue(rs.next());
assertEquals(new Path(jarPath, "myjar4.jar").toString(), rs.getString("jar_location"));
assertTrue(rs.next());
assertEquals(new Path(jarPath, "myjar5.jar").toString(), rs.getString("jar_location"));
assertTrue(rs.next());
assertEquals(new Path(jarPath, "myjar6.jar").toString(), rs.getString("jar_location"));
assertFalse(rs.next());
}
@Test
public void testDeleteJar() throws Exception {
Connection conn = driver.connect(url, EMPTY_PROPS);
Statement stmt = conn.createStatement();
ResultSet rs = stmt.executeQuery("list jars");
assertTrue(rs.next());
Path jarPath = new Path(util.getConfiguration().get(QueryServices.DYNAMIC_JARS_DIR_KEY));
assertEquals(new Path(jarPath, "myjar1.jar").toString(), rs.getString("jar_location"));
assertTrue(rs.next());
assertEquals(new Path(jarPath, "myjar2.jar").toString(), rs.getString("jar_location"));
assertTrue(rs.next());
assertEquals(new Path(jarPath, "myjar3.jar").toString(), rs.getString("jar_location"));
assertTrue(rs.next());
assertEquals(new Path(jarPath, "myjar4.jar").toString(), rs.getString("jar_location"));
assertTrue(rs.next());
assertEquals(new Path(jarPath, "myjar5.jar").toString(), rs.getString("jar_location"));
assertTrue(rs.next());
assertEquals(new Path(jarPath, "myjar6.jar").toString(), rs.getString("jar_location"));
assertFalse(rs.next());
stmt.execute("delete jar '"+ new Path(jarPath, "myjar4.jar").toString() + "'");
rs = stmt.executeQuery("list jars");
assertTrue(rs.next());
assertEquals(new Path(jarPath, "myjar1.jar").toString(), rs.getString("jar_location"));
assertTrue(rs.next());
assertEquals(new Path(jarPath, "myjar2.jar").toString(), rs.getString("jar_location"));
assertTrue(rs.next());
assertEquals(new Path(jarPath, "myjar3.jar").toString(), rs.getString("jar_location"));
assertTrue(rs.next());
assertEquals(new Path(jarPath, "myjar5.jar").toString(), rs.getString("jar_location"));
assertTrue(rs.next());
assertEquals(new Path(jarPath, "myjar6.jar").toString(), rs.getString("jar_location"));
assertFalse(rs.next());
}
@Test
public void testCreateFunction() throws Exception {
Connection conn = driver.connect(url, EMPTY_PROPS);
Statement stmt = conn.createStatement();
conn.createStatement().execute("create table t(k integer primary key, firstname varchar, lastname varchar)");
stmt.execute("upsert into t values(1,'foo','jock')");
conn.commit();
stmt.execute("create function myreverse(VARCHAR) returns VARCHAR as 'org.apache.phoenix.end2end."+MY_REVERSE_CLASS_NAME+"' using jar "
+ "'"+util.getConfiguration().get(DYNAMIC_JARS_DIR_KEY) + "/myjar1.jar"+"'");
ResultSet rs = stmt.executeQuery("select myreverse(firstname) from t");
assertTrue(rs.next());
assertEquals("oof", rs.getString(1));
assertFalse(rs.next());
rs = stmt.executeQuery("select * from t where myreverse(firstname)='oof'");
assertTrue(rs.next());
assertEquals(1, rs.getInt(1));
assertEquals("foo", rs.getString(2));
assertEquals("jock", rs.getString(3));
assertFalse(rs.next());
try {
stmt.execute("create function myreverse(VARCHAR) returns VARCHAR as 'org.apache.phoenix.end2end."+MY_REVERSE_CLASS_NAME+"' using jar "
+ "'"+util.getConfiguration().get(DYNAMIC_JARS_DIR_KEY) + "/myjar1.jar"+"'");
fail("Duplicate function should not be created.");
} catch(FunctionAlreadyExistsException e) {
}
// without specifying the jar should pick the class from path of hbase.dynamic.jars.dir configuration.
stmt.execute("create function myreverse2(VARCHAR) returns VARCHAR as 'org.apache.phoenix.end2end."+MY_REVERSE_CLASS_NAME+"'");
rs = stmt.executeQuery("select myreverse2(firstname) from t");
assertTrue(rs.next());
assertEquals("oof", rs.getString(1));
assertFalse(rs.next());
rs = stmt.executeQuery("select * from t where myreverse2(firstname)='oof'");
assertTrue(rs.next());
assertEquals(1, rs.getInt(1));
assertEquals("foo", rs.getString(2));
assertEquals("jock", rs.getString(3));
assertFalse(rs.next());
conn.createStatement().execute("create table t3(tenant_id varchar not null, k integer not null, firstname varchar, lastname varchar constraint pk primary key(tenant_id,k)) MULTI_TENANT=true");
// Function created with global id should be accessible.
Connection conn2 = driver.connect(url+";"+PhoenixRuntime.TENANT_ID_ATTRIB+"="+TENANT_ID, EMPTY_PROPS);
try {
conn2.createStatement().execute("upsert into t3 values(1,'foo','jock')");
conn2.commit();
conn2.createStatement().execute("create function myreverse(VARCHAR) returns VARCHAR as 'org.apache.phoenix.end2end."+MY_REVERSE_CLASS_NAME+"' using jar "
+ "'"+util.getConfiguration().get(DYNAMIC_JARS_DIR_KEY) + "/myjar1.jar"+"'");
rs = conn2.createStatement().executeQuery("select myreverse(firstname) from t3");
assertTrue(rs.next());
assertEquals("oof", rs.getString(1));
} catch(FunctionAlreadyExistsException e) {
fail("FunctionAlreadyExistsException should not be thrown");
}
// calling global udf on tenant specific specific connection.
rs = conn2.createStatement().executeQuery("select myreverse2(firstname) from t3");
assertTrue(rs.next());
assertEquals("oof", rs.getString(1));
try {
conn2.createStatement().execute("drop function myreverse2");
fail("FunctionNotFoundException should be thrown");
} catch(FunctionNotFoundException e){
}
conn.createStatement().execute("drop function myreverse2");
try {
rs = conn2.createStatement().executeQuery("select myreverse2(firstname) from t3");
fail("FunctionNotFoundException should be thrown.");
} catch(FunctionNotFoundException e){
}
try{
rs = conn2.createStatement().executeQuery("select unknownFunction(firstname) from t3");
fail("FunctionNotFoundException should be thrown.");
} catch(FunctionNotFoundException e) {
}
conn.createStatement().execute("CREATE TABLE TESTTABLE10(ID VARCHAR NOT NULL, NAME VARCHAR ARRAY, CITY VARCHAR ARRAY CONSTRAINT pk PRIMARY KEY (ID) )");
conn.createStatement().execute("create function UDF_ARRAY_ELEM(VARCHAR ARRAY, INTEGER) returns VARCHAR as 'org.apache.phoenix.end2end."+MY_ARRAY_INDEX_CLASS_NAME+"' using jar "
+ "'"+util.getConfiguration().get(DYNAMIC_JARS_DIR_KEY) + "/myjar3.jar"+"'");
conn.createStatement().execute("UPSERT INTO TESTTABLE10(ID,NAME,CITY) VALUES('111', ARRAY['JOHN','MIKE','BOB'], ARRAY['NYC','LA','SF'])");
conn.createStatement().execute("UPSERT INTO TESTTABLE10(ID,NAME,CITY) VALUES('112', ARRAY['CHEN','CARL','ALICE'], ARRAY['BOSTON','WASHINGTON','PALO ALTO'])");
conn.commit();
rs = conn.createStatement().executeQuery("SELECT ID, UDF_ARRAY_ELEM(NAME, 2) FROM TESTTABLE10");
assertTrue(rs.next());
assertEquals("111", rs.getString(1));
assertEquals("MIKE", rs.getString(2));
assertTrue(rs.next());
assertEquals("112", rs.getString(1));
assertEquals("CARL", rs.getString(2));
assertFalse(rs.next());
rs = conn2.createStatement().executeQuery("SELECT ID, UDF_ARRAY_ELEM(NAME, 2) FROM TESTTABLE10");
assertTrue(rs.next());
assertEquals("111", rs.getString(1));
assertEquals("MIKE", rs.getString(2));
assertTrue(rs.next());
assertEquals("112", rs.getString(1));
assertEquals("CARL", rs.getString(2));
assertFalse(rs.next());
}
@Test
public void testSameUDFWithDifferentImplementationsInDifferentTenantConnections() throws Exception {
Connection nonTenantConn = driver.connect(url, EMPTY_PROPS);
nonTenantConn.createStatement().execute("create function myfunction(VARCHAR) returns VARCHAR as 'org.apache.phoenix.end2end."+MY_REVERSE_CLASS_NAME+"' using jar "
+ "'"+util.getConfiguration().get(DYNAMIC_JARS_DIR_KEY) + "/myjar1.jar"+"'");
try {
nonTenantConn.createStatement().execute("create function myfunction(VARCHAR) returns VARCHAR as 'org.apache.phoenix.end2end.UnknownClass' using jar "
+ "'"+util.getConfiguration().get(DYNAMIC_JARS_DIR_KEY) + "/myjar1.jar"+"'");
fail("FunctionAlreadyExistsException should be thrown.");
} catch(FunctionAlreadyExistsException e) {
}
String tenantId1="tenId1";
String tenantId2="tenId2";
nonTenantConn.createStatement().execute("create table t7(tenant_id varchar not null, k integer not null, k1 integer, name varchar constraint pk primary key(tenant_id, k)) multi_tenant=true");
Connection tenant1Conn = driver.connect(url+";"+PhoenixRuntime.TENANT_ID_ATTRIB+"="+tenantId1, EMPTY_PROPS);
Connection tenant2Conn = driver.connect(url+";"+PhoenixRuntime.TENANT_ID_ATTRIB+"="+tenantId2, EMPTY_PROPS);
tenant1Conn.createStatement().execute("upsert into t7 values(1,1,'jock')");
tenant1Conn.commit();
tenant2Conn.createStatement().execute("upsert into t7 values(1,2,'jock')");
tenant2Conn.commit();
tenant1Conn.createStatement().execute("create function myfunction(VARCHAR) returns VARCHAR as 'org.apache.phoenix.end2end."+MY_REVERSE_CLASS_NAME+"' using jar "
+ "'"+util.getConfiguration().get(DYNAMIC_JARS_DIR_KEY) + "/myjar1.jar"+"'");
try {
tenant1Conn.createStatement().execute("create function myfunction(VARCHAR) returns VARCHAR as 'org.apache.phoenix.end2end.UnknownClass' using jar "
+ "'"+util.getConfiguration().get(DYNAMIC_JARS_DIR_KEY) + "/myjar1.jar"+"'");
fail("FunctionAlreadyExistsException should be thrown.");
} catch(FunctionAlreadyExistsException e) {
}
tenant2Conn.createStatement().execute("create function myfunction(INTEGER, INTEGER CONSTANT defaultValue=10 minvalue=1 maxvalue=15 ) returns INTEGER as 'org.apache.phoenix.end2end."+MY_SUM_CLASS_NAME+"' using jar "
+ "'"+util.getConfiguration().get(DYNAMIC_JARS_DIR_KEY) + "/myjar2.jar"+"'");
try {
tenant2Conn.createStatement().execute("create function myfunction(VARCHAR) returns VARCHAR as 'org.apache.phoenix.end2end.UnknownClass' using jar "
+ "'"+util.getConfiguration().get(DYNAMIC_JARS_DIR_KEY) + "/unknown.jar"+"'");
fail("FunctionAlreadyExistsException should be thrown.");
} catch(FunctionAlreadyExistsException e) {
}
ResultSet rs = tenant1Conn.createStatement().executeQuery("select MYFUNCTION(name) from t7");
assertTrue(rs.next());
assertEquals("kcoj", rs.getString(1));
assertFalse(rs.next());
rs = tenant1Conn.createStatement().executeQuery("select * from t7 where MYFUNCTION(name)='kcoj'");
assertTrue(rs.next());
assertEquals(1, rs.getInt(1));
assertEquals(1, rs.getInt(2));
assertEquals("jock", rs.getString(3));
assertFalse(rs.next());
rs = tenant2Conn.createStatement().executeQuery("select MYFUNCTION(k) from t7");
assertTrue(rs.next());
assertEquals(11, rs.getInt(1));
assertFalse(rs.next());
rs = tenant2Conn.createStatement().executeQuery("select * from t7 where MYFUNCTION(k1)=12");
assertTrue(rs.next());
assertEquals(1, rs.getInt(1));
assertEquals(2, rs.getInt(2));
assertEquals("jock", rs.getString(3));
assertFalse(rs.next());
}
@Test
public void testUDFsWithMultipleConnections() throws Exception {
Connection conn1 = driver.connect(url, EMPTY_PROPS);
conn1.createStatement().execute("create function myfunction(VARCHAR) returns VARCHAR as 'org.apache.phoenix.end2end."+MY_REVERSE_CLASS_NAME+"' using jar "
+ "'"+util.getConfiguration().get(DYNAMIC_JARS_DIR_KEY) + "/myjar1.jar"+"'");
Connection conn2 = driver.connect(url, EMPTY_PROPS);
try{
conn2.createStatement().execute("create function myfunction(VARCHAR) returns VARCHAR as 'org.apache.phoenix.end2end."+MY_REVERSE_CLASS_NAME+"' using jar "
+ "'"+util.getConfiguration().get(DYNAMIC_JARS_DIR_KEY) + "/myjar1.jar"+"'");
fail("FunctionAlreadyExistsException should be thrown.");
} catch(FunctionAlreadyExistsException e) {
}
conn2.createStatement().execute("create table t8(k integer not null primary key, k1 integer, name varchar)");
conn2.createStatement().execute("upsert into t8 values(1,1,'jock')");
conn2.commit();
ResultSet rs = conn2.createStatement().executeQuery("select MYFUNCTION(name) from t8");
assertTrue(rs.next());
assertEquals("kcoj", rs.getString(1));
assertFalse(rs.next());
rs = conn2.createStatement().executeQuery("select * from t8 where MYFUNCTION(name)='kcoj'");
assertTrue(rs.next());
assertEquals(1, rs.getInt(1));
assertEquals(1, rs.getInt(2));
assertEquals("jock", rs.getString(3));
assertFalse(rs.next());
conn2.createStatement().execute("drop function MYFUNCTION");
try {
rs = conn1.createStatement().executeQuery("select MYFUNCTION(name) from t8");
fail("FunctionNotFoundException should be thrown");
} catch(FunctionNotFoundException e) {
}
}
@Test
public void testUsingUDFFunctionInDifferentQueries() throws Exception {
Connection conn = driver.connect(url, EMPTY_PROPS);
Statement stmt = conn.createStatement();
conn.createStatement().execute("create table t1(k integer primary key, firstname varchar, lastname varchar)");
stmt.execute("upsert into t1 values(1,'foo','jock')");
conn.commit();
conn.createStatement().execute("create table t2(k integer primary key, k1 integer, lastname_reverse varchar)");
conn.commit();
stmt.execute("create function mysum3(INTEGER, INTEGER CONSTANT defaultValue=10 minvalue=1 maxvalue=15 ) returns INTEGER as 'org.apache.phoenix.end2end."+MY_SUM_CLASS_NAME+"' using jar "
+ "'"+util.getConfiguration().get(DYNAMIC_JARS_DIR_KEY) + "/myjar2.jar"+"'");
stmt.execute("create function myreverse3(VARCHAR) returns VARCHAR as 'org.apache.phoenix.end2end."+MY_REVERSE_CLASS_NAME+"' using jar "
+ "'"+util.getConfiguration().get(DYNAMIC_JARS_DIR_KEY) + "/myjar1.jar"+"'");
stmt.execute("upsert into t2(k,k1,lastname_reverse) select mysum3(k),mysum3(k,11),myreverse3(lastname) from t1");
conn.commit();
ResultSet rs = stmt.executeQuery("select * from t2");
assertTrue(rs.next());
assertEquals(11, rs.getInt(1));
assertEquals(12, rs.getInt(2));
assertEquals("kcoj", rs.getString(3));
assertFalse(rs.next());
stmt.execute("delete from t2 where myreverse3(lastname_reverse)='jock' and mysum3(k)=21");
conn.commit();
rs = stmt.executeQuery("select * from t2");
assertFalse(rs.next());
stmt.execute("create function myreverse4(VARCHAR CONSTANT defaultValue='null') returns VARCHAR as 'org.apache.phoenix.end2end."+MY_REVERSE_CLASS_NAME+"'");
stmt.execute("upsert into t2 values(11,12,myreverse4('jock'))");
conn.commit();
rs = stmt.executeQuery("select * from t2");
assertTrue(rs.next());
assertEquals(11, rs.getInt(1));
assertEquals(12, rs.getInt(2));
assertEquals("kcoj", rs.getString(3));
assertFalse(rs.next());
}
@Test
public void testVerifyCreateFunctionArguments() throws Exception {
Connection conn = driver.connect(url, EMPTY_PROPS);
Statement stmt = conn.createStatement();
conn.createStatement().execute("create table t4(k integer primary key, k1 integer, lastname varchar)");
stmt.execute("upsert into t4 values(1,1,'jock')");
conn.commit();
stmt.execute("create function mysum(INTEGER, INTEGER CONSTANT defaultValue=10 minvalue=1 maxvalue=15 ) returns INTEGER as 'org.apache.phoenix.end2end."+MY_SUM_CLASS_NAME+"' using jar "
+ "'"+util.getConfiguration().get(DYNAMIC_JARS_DIR_KEY) + "/myjar2.jar"+"'");
ResultSet rs = stmt.executeQuery("select mysum(k,12) from t4");
assertTrue(rs.next());
assertEquals(13, rs.getInt(1));
rs = stmt.executeQuery("select mysum(k) from t4");
assertTrue(rs.next());
assertEquals(11, rs.getInt(1));
try {
stmt.executeQuery("select mysum(k,20) from t4");
fail("Value Range Exception should be thrown.");
} catch(ValueRangeExcpetion e) {
}
}
@Test
public void testTemporaryFunctions() throws Exception {
Connection conn = driver.connect(url, EMPTY_PROPS);
Statement stmt = conn.createStatement();
conn.createStatement().execute("create table t9(k integer primary key, k1 integer, lastname varchar)");
stmt.execute("upsert into t9 values(1,1,'jock')");
conn.commit();
stmt.execute("create temporary function mysum9(INTEGER, INTEGER CONSTANT defaultValue=10 minvalue=1 maxvalue=15 ) returns INTEGER as 'org.apache.phoenix.end2end."+MY_SUM_CLASS_NAME+"' using jar "
+ "'"+util.getConfiguration().get(DYNAMIC_JARS_DIR_KEY) + "/myjar2.jar"+"'");
ResultSet rs = stmt.executeQuery("select mysum9(k,12) from t9");
assertTrue(rs.next());
assertEquals(13, rs.getInt(1));
rs = stmt.executeQuery("select mysum9(k) from t9");
assertTrue(rs.next());
assertEquals(11, rs.getInt(1));
rs = stmt.executeQuery("select k from t9 where mysum9(k)=11");
assertTrue(rs.next());
assertEquals(1, rs.getInt(1));
try {
rs = stmt.executeQuery("select k from t9 where mysum9(k,10,'x')=11");
fail("FunctionNotFoundException should be thrown");
} catch(FunctionNotFoundException e) {
} catch(Exception e) {
fail("FunctionNotFoundException should be thrown");
}
try {
rs = stmt.executeQuery("select mysum9() from t9");
fail("FunctionNotFoundException should be thrown");
} catch(FunctionNotFoundException e) {
} catch(Exception e) {
fail("FunctionNotFoundException should be thrown");
}
stmt.execute("drop function mysum9");
try {
rs = stmt.executeQuery("select k from t9 where mysum9(k)=11");
fail("FunctionNotFoundException should be thrown");
} catch(FunctionNotFoundException e){
}
}
@Test
public void testDropFunction() throws Exception {
Connection conn = driver.connect(url, EMPTY_PROPS);
Statement stmt = conn.createStatement();
String query = "select count(*) from "+ SYSTEM_CATALOG_SCHEMA + ".\"" + SYSTEM_FUNCTION_TABLE + "\"";
ResultSet rs = stmt.executeQuery(query);
rs.next();
int numRowsBefore = rs.getInt(1);
stmt.execute("create function mysum6(INTEGER, INTEGER CONSTANT defaultValue=10 minvalue=1 maxvalue=15 ) returns INTEGER as 'org.apache.phoenix.end2end."+MY_SUM_CLASS_NAME+"' using jar "
+ "'"+util.getConfiguration().get(DYNAMIC_JARS_DIR_KEY) + "/myjar2.jar"+"'");
rs = stmt.executeQuery(query);
rs.next();
int numRowsAfter= rs.getInt(1);
assertEquals(3, numRowsAfter - numRowsBefore);
stmt.execute("drop function mysum6");
rs = stmt.executeQuery(query);
rs.next();
assertEquals(numRowsBefore, rs.getInt(1));
conn.createStatement().execute("create table t6(k integer primary key, k1 integer, lastname varchar)");
try {
rs = stmt.executeQuery("select mysum6(k1) from t6");
fail("FunctionNotFoundException should be thrown");
} catch(FunctionNotFoundException e) {
}
try {
stmt.execute("drop function mysum6");
fail("FunctionNotFoundException should be thrown");
} catch(FunctionNotFoundException e) {
}
try {
stmt.execute("drop function if exists mysum6");
} catch(FunctionNotFoundException e) {
fail("FunctionNotFoundException should not be thrown");
}
stmt.execute("create function mysum6(INTEGER, INTEGER CONSTANT defaultValue=10 minvalue=1 maxvalue=15 ) returns INTEGER as 'org.apache.phoenix.end2end."+MY_SUM_CLASS_NAME+"' using jar "
+ "'"+util.getConfiguration().get(DYNAMIC_JARS_DIR_KEY) + "/myjar2.jar"+"'");
try {
rs = stmt.executeQuery("select mysum6(k1) from t6");
} catch(FunctionNotFoundException e) {
fail("FunctionNotFoundException should not be thrown");
}
}
@Test
public void testUDFsWithLatestTimestamp() throws Exception {
Properties props = new Properties();
Connection conn = DriverManager.getConnection(url, props);
Statement stmt = conn.createStatement();
String query = "select count(*) from "+ SYSTEM_CATALOG_SCHEMA + ".\"" + SYSTEM_FUNCTION_TABLE + "\"";
ResultSet rs = stmt.executeQuery(query);
rs.next();
int numRowsBefore = rs.getInt(1);
stmt.execute("create function mysum61(INTEGER, INTEGER CONSTANT defaultValue=10 minvalue=1 maxvalue=15 ) returns INTEGER as 'org.apache.phoenix.end2end."+MY_SUM_CLASS_NAME+"' using jar "
+ "'"+util.getConfiguration().get(DYNAMIC_JARS_DIR_KEY) + "/myjar2.jar"+"'");
conn = DriverManager.getConnection(url, props);
stmt = conn.createStatement();
rs = stmt.executeQuery(query);
rs.next();
int numRowsAfter= rs.getInt(1);
assertEquals(3, numRowsAfter - numRowsBefore);
stmt.execute("drop function mysum61");
conn = DriverManager.getConnection(url, props);
stmt = conn.createStatement();
rs = stmt.executeQuery(query);
rs.next();
assertEquals(numRowsBefore, rs.getInt(1));
conn.createStatement().execute("create table t62(k integer primary key, k1 integer, lastname varchar)");
try {
rs = stmt.executeQuery("select mysum61(k1) from t62");
fail("FunctionNotFoundException should be thrown");
} catch(FunctionNotFoundException e) {
}
try {
stmt.execute("drop function mysum61");
fail("FunctionNotFoundException should be thrown");
} catch(FunctionNotFoundException e) {
}
try {
stmt.execute("drop function if exists mysum61");
} catch(FunctionNotFoundException e) {
fail("FunctionNotFoundException should not be thrown");
}
stmt.execute("create function mysum61(INTEGER, INTEGER CONSTANT defaultValue=10 minvalue=1 maxvalue=15 ) returns INTEGER as 'org.apache.phoenix.end2end."+MY_SUM_CLASS_NAME+"' using jar "
+ "'"+util.getConfiguration().get(DYNAMIC_JARS_DIR_KEY) + "/myjar2.jar"+"'");
conn = DriverManager.getConnection(url, props);
stmt = conn.createStatement();
try {
rs = stmt.executeQuery("select mysum61(k1) from t62");
} catch(FunctionNotFoundException e) {
fail("FunctionNotFoundException should not be thrown");
}
conn.createStatement().execute("create table t61(k integer primary key, k1 integer, lastname varchar)");
conn = DriverManager.getConnection(url, props);
stmt = conn.createStatement();
stmt.execute("upsert into t61 values(1,1,'jock')");
conn.commit();
stmt.execute("create function myfunction6(VARCHAR) returns VARCHAR as 'org.apache.phoenix.end2end."+MY_REVERSE_CLASS_NAME+"' using jar "
+ "'"+util.getConfiguration().get(DYNAMIC_JARS_DIR_KEY) + "/myjar1.jar"+"'");
stmt.execute("create or replace function myfunction6(INTEGER, INTEGER CONSTANT defaultValue=10 minvalue=1 maxvalue=15 ) returns INTEGER as 'org.apache.phoenix.end2end."+MY_SUM_CLASS_NAME+"' using jar "
+ "'"+util.getConfiguration().get(DYNAMIC_JARS_DIR_KEY) + "/myjar2.jar"+"'");
conn = DriverManager.getConnection(url, props);
stmt = conn.createStatement();
rs = stmt.executeQuery("select myfunction6(k,12) from t61");
assertTrue(rs.next());
assertEquals(13, rs.getInt(1));
rs = stmt.executeQuery("select myfunction6(k) from t61");
assertTrue(rs.next());
assertEquals(11, rs.getInt(1));
rs = stmt.executeQuery("select k from t61 where myfunction6(k)=11");
assertTrue(rs.next());
assertEquals(1, rs.getInt(1));
stmt.execute("create or replace function myfunction6(VARCHAR) returns VARCHAR as 'org.apache.phoenix.end2end."+MY_REVERSE_CLASS_NAME+"' using jar "
+ "'"+util.getConfiguration().get(DYNAMIC_JARS_DIR_KEY) + "/myjar1.jar"+"'");
conn = DriverManager.getConnection(url, props);
stmt = conn.createStatement();
rs = stmt.executeQuery("select k from t61 where myfunction6(lastname)='kcoj'");
assertTrue(rs.next());
assertEquals(1, rs.getInt(1));
props.setProperty(QueryServices.ALLOW_USER_DEFINED_FUNCTIONS_ATTRIB, "false");
conn = DriverManager.getConnection(url, props);
stmt = conn.createStatement();
try {
rs = stmt.executeQuery("select k from t61 where reverse(lastname,11)='kcoj'");
fail("FunctionNotFoundException should be thrown.");
} catch(FunctionNotFoundException e) {
}
}
@Test
public void testFunctionalIndexesWithUDFFunction() throws Exception {
Connection conn = driver.connect(url, EMPTY_PROPS);
Statement stmt = conn.createStatement();
stmt.execute("create table t5(k integer primary key, k1 integer, lastname_reverse varchar)");
stmt.execute("create function myreverse5(VARCHAR) returns VARCHAR as 'org.apache.phoenix.end2end."+MY_REVERSE_CLASS_NAME+"'");
stmt.execute("upsert into t5 values(1,1,'jock')");
conn.commit();
stmt.execute("create index idx on t5(myreverse5(lastname_reverse))");
String query = "select myreverse5(lastname_reverse) from t5";
ResultSet rs = stmt.executeQuery("explain " + query);
assertEquals("CLIENT PARALLEL 1-WAY FULL SCAN OVER IDX\n"
+ " SERVER FILTER BY FIRST KEY ONLY", QueryUtil.getExplainPlan(rs));
rs = stmt.executeQuery(query);
assertTrue(rs.next());
assertEquals("kcoj", rs.getString(1));
assertFalse(rs.next());
stmt.execute("create local index idx2 on t5(myreverse5(lastname_reverse))");
query = "select k,k1,myreverse5(lastname_reverse) from t5 where myreverse5(lastname_reverse)='kcoj'";
rs = stmt.executeQuery("explain " + query);
assertEquals("CLIENT PARALLEL 1-WAY RANGE SCAN OVER T5 [1,'kcoj']\n"
+ " SERVER FILTER BY FIRST KEY ONLY\n"
+"CLIENT MERGE SORT", QueryUtil.getExplainPlan(rs));
rs = stmt.executeQuery(query);
assertTrue(rs.next());
assertEquals(1, rs.getInt(1));
assertEquals(1, rs.getInt(2));
assertEquals("kcoj", rs.getString(3));
assertFalse(rs.next());
}
private static void initJoinTableValues(Connection conn) throws Exception {
conn.createStatement().execute("create table " + JOIN_ITEM_TABLE_FULL_NAME +
" (\"item_id\" varchar(10) not null primary key, " +
" name varchar, " +
" price integer, " +
" discount1 integer, " +
" discount2 integer, " +
" \"supplier_id\" varchar(10), " +
" description varchar)");
conn.createStatement().execute("create table " + JOIN_SUPPLIER_TABLE_FULL_NAME +
" (\"supplier_id\" varchar(10) not null primary key, " +
" name varchar, " +
" phone varchar(12), " +
" address varchar, " +
" loc_id varchar(5))");
PreparedStatement stmt;
conn.createStatement().execute("CREATE SEQUENCE my.seq");
// Insert into item table
stmt = conn.prepareStatement(
"upsert into " + JOIN_ITEM_TABLE_FULL_NAME +
" (\"item_id\", " +
" NAME, " +
" PRICE, " +
" DISCOUNT1, " +
" DISCOUNT2, " +
" \"supplier_id\", " +
" DESCRIPTION) " +
"values (?, ?, ?, ?, ?, ?, ?)");
stmt.setString(1, "0000000001");
stmt.setString(2, "T1");
stmt.setInt(3, 100);
stmt.setInt(4, 5);
stmt.setInt(5, 10);
stmt.setString(6, "0000000001");
stmt.setString(7, "Item T1");
stmt.execute();
stmt.setString(1, "0000000002");
stmt.setString(2, "T2");
stmt.setInt(3, 200);
stmt.setInt(4, 5);
stmt.setInt(5, 8);
stmt.setString(6, "0000000001");
stmt.setString(7, "Item T2");
stmt.execute();
stmt.setString(1, "0000000003");
stmt.setString(2, "T3");
stmt.setInt(3, 300);
stmt.setInt(4, 8);
stmt.setInt(5, 12);
stmt.setString(6, "0000000002");
stmt.setString(7, "Item T3");
stmt.execute();
stmt.setString(1, "0000000004");
stmt.setString(2, "T4");
stmt.setInt(3, 400);
stmt.setInt(4, 6);
stmt.setInt(5, 10);
stmt.setString(6, "0000000002");
stmt.setString(7, "Item T4");
stmt.execute();
stmt.setString(1, "0000000005");
stmt.setString(2, "T5");
stmt.setInt(3, 500);
stmt.setInt(4, 8);
stmt.setInt(5, 15);
stmt.setString(6, "0000000005");
stmt.setString(7, "Item T5");
stmt.execute();
stmt.setString(1, "0000000006");
stmt.setString(2, "T6");
stmt.setInt(3, 600);
stmt.setInt(4, 8);
stmt.setInt(5, 15);
stmt.setString(6, "0000000006");
stmt.setString(7, "Item T6");
stmt.execute();
stmt.setString(1, "invalid001");
stmt.setString(2, "INVALID-1");
stmt.setInt(3, 0);
stmt.setInt(4, 0);
stmt.setInt(5, 0);
stmt.setString(6, "0000000000");
stmt.setString(7, "Invalid item for join test");
stmt.execute();
// Insert into supplier table
stmt = conn.prepareStatement(
"upsert into " + JOIN_SUPPLIER_TABLE_FULL_NAME +
" (\"supplier_id\", " +
" NAME, " +
" PHONE, " +
" ADDRESS, " +
" LOC_ID) " +
"values (?, ?, ?, ?, ?)");
stmt.setString(1, "0000000001");
stmt.setString(2, "S1");
stmt.setString(3, "888-888-1111");
stmt.setString(4, "101 YYY Street");
stmt.setString(5, "10001");
stmt.execute();
stmt.setString(1, "0000000002");
stmt.setString(2, "S2");
stmt.setString(3, "888-888-2222");
stmt.setString(4, "202 YYY Street");
stmt.setString(5, "10002");
stmt.execute();
stmt.setString(1, "0000000003");
stmt.setString(2, "S3");
stmt.setString(3, "888-888-3333");
stmt.setString(4, "303 YYY Street");
stmt.setString(5, null);
stmt.execute();
stmt.setString(1, "0000000004");
stmt.setString(2, "S4");
stmt.setString(3, "888-888-4444");
stmt.setString(4, "404 YYY Street");
stmt.setString(5, null);
stmt.execute();
stmt.setString(1, "0000000005");
stmt.setString(2, "S5");
stmt.setString(3, "888-888-5555");
stmt.setString(4, "505 YYY Street");
stmt.setString(5, "10005");
stmt.execute();
stmt.setString(1, "0000000006");
stmt.setString(2, "S6");
stmt.setString(3, "888-888-6666");
stmt.setString(4, "606 YYY Street");
stmt.setString(5, "10006");
stmt.execute();
conn.commit();
}
@Test
public void testUdfWithJoin() throws Exception {
String query = "SELECT /*+ USE_SORT_MERGE_JOIN*/ item.\"item_id\", item.name, supp.\"supplier_id\", myreverse8(supp.name) FROM "
+ JOIN_SUPPLIER_TABLE_FULL_NAME + " supp RIGHT JOIN " + JOIN_ITEM_TABLE_FULL_NAME
+ " item ON myreverse8(item.\"supplier_id\") = myreverse8(supp.\"supplier_id\") ORDER BY \"item_id\"";
Connection conn = driver.connect(url, EMPTY_PROPS);
initJoinTableValues(conn);
conn.createStatement().execute(
"create function myreverse8(VARCHAR) returns VARCHAR as 'org.apache.phoenix.end2end.MyReverse' using jar "
+ "'" + util.getConfiguration().get(DYNAMIC_JARS_DIR_KEY) + "/myjar1.jar" + "'");
try {
PreparedStatement statement = conn.prepareStatement(query);
ResultSet rs = statement.executeQuery();
assertTrue(rs.next());
assertEquals(rs.getString(1), "0000000001");
assertEquals(rs.getString(2), "T1");
assertEquals(rs.getString(3), "0000000001");
assertEquals(rs.getString(4), "1S");
assertTrue(rs.next());
assertEquals(rs.getString(1), "0000000002");
assertEquals(rs.getString(2), "T2");
assertEquals(rs.getString(3), "0000000001");
assertEquals(rs.getString(4), "1S");
assertTrue(rs.next());
assertEquals(rs.getString(1), "0000000003");
assertEquals(rs.getString(2), "T3");
assertEquals(rs.getString(3), "0000000002");
assertEquals(rs.getString(4), "2S");
assertTrue(rs.next());
assertEquals(rs.getString(1), "0000000004");
assertEquals(rs.getString(2), "T4");
assertEquals(rs.getString(3), "0000000002");
assertEquals(rs.getString(4), "2S");
assertTrue(rs.next());
assertEquals(rs.getString(1), "0000000005");
assertEquals(rs.getString(2), "T5");
assertEquals(rs.getString(3), "0000000005");
assertEquals(rs.getString(4), "5S");
assertTrue(rs.next());
assertEquals(rs.getString(1), "0000000006");
assertEquals(rs.getString(2), "T6");
assertEquals(rs.getString(3), "0000000006");
assertEquals(rs.getString(4), "6S");
assertTrue(rs.next());
assertEquals(rs.getString(1), "invalid001");
assertEquals(rs.getString(2), "INVALID-1");
assertNull(rs.getString(3));
assertNull(rs.getString(4));
assertFalse(rs.next());
} finally {
conn.close();
}
}
@Test
public void testReplaceFunction() throws Exception {
Connection conn = driver.connect(url, EMPTY_PROPS);
Statement stmt = conn.createStatement();
conn.createStatement().execute("create table t10(k integer primary key, k1 integer, lastname varchar)");
stmt.execute("upsert into t10 values(1,1,'jock')");
conn.commit();
stmt.execute("create function myfunction63(VARCHAR) returns VARCHAR as 'org.apache.phoenix.end2end."+MY_REVERSE_CLASS_NAME+"' using jar "
+ "'"+util.getConfiguration().get(DYNAMIC_JARS_DIR_KEY) + "/myjar1.jar"+"'");
stmt.execute("create or replace function myfunction63(INTEGER, INTEGER CONSTANT defaultValue=10 minvalue=1 maxvalue=15 ) returns INTEGER as 'org.apache.phoenix.end2end."+MY_SUM_CLASS_NAME+"' using jar "
+ "'"+util.getConfiguration().get(DYNAMIC_JARS_DIR_KEY) + "/myjar2.jar"+"'");
ResultSet rs = stmt.executeQuery("select myfunction63(k,12) from t10");
assertTrue(rs.next());
assertEquals(13, rs.getInt(1));
rs = stmt.executeQuery("select myfunction63(k) from t10");
assertTrue(rs.next());
assertEquals(11, rs.getInt(1));
rs = stmt.executeQuery("select k from t10 where myfunction63(k)=11");
assertTrue(rs.next());
assertEquals(1, rs.getInt(1));
Connection conn2 = driver.connect(url, EMPTY_PROPS);
stmt = conn2.createStatement();
rs = stmt.executeQuery("select myfunction63(k,12) from t10");
assertTrue(rs.next());
assertEquals(13, rs.getInt(1));
rs = stmt.executeQuery("select myfunction63(k) from t10");
assertTrue(rs.next());
assertEquals(11, rs.getInt(1));
rs = stmt.executeQuery("select k from t10 where myfunction63(k)=11");
assertTrue(rs.next());
assertEquals(1, rs.getInt(1));
}
@Test
public void testUDFsWithSameChildrenInAQuery() throws Exception {
Connection conn = driver.connect(url, EMPTY_PROPS);
Statement stmt = conn.createStatement();
conn.createStatement().execute("create table t11(k varbinary primary key, k1 integer, lastname varchar)");
String query = "UPSERT INTO t11"
+ "(k, k1, lastname) "
+ "VALUES(?,?,?)";
PreparedStatement pStmt = conn.prepareStatement(query);
pStmt.setBytes(1, new byte[] {0,0,0,0,0,0,0,1});
pStmt.setInt(2, 1);
pStmt.setString(3, "jock");
pStmt.execute();
conn.commit();
stmt.execute("create function udf1(VARBINARY) returns UNSIGNED_LONG as 'org.apache.phoenix.end2end."+GETX_CLASSNAME+"' using jar "
+ "'"+util.getConfiguration().get(DYNAMIC_JARS_DIR_KEY) + "/myjar5.jar"+"'");
stmt.execute("create function udf2(VARBINARY) returns INTEGER as 'org.apache.phoenix.end2end."+GETY_CLASSNAME+"' using jar "
+ "'"+util.getConfiguration().get(DYNAMIC_JARS_DIR_KEY) + "/myjar6.jar"+"'");
ResultSet rs = stmt.executeQuery("select udf1(k), udf2(k) from t11");
assertTrue(rs.next());
assertEquals(72057594037927936l, rs.getLong(1));
assertEquals(0, rs.getInt(2));
rs = stmt.executeQuery("select udf2(k), udf1(k) from t11");
assertTrue(rs.next());
assertEquals(0, rs.getInt(1));
assertEquals(72057594037927936l, rs.getLong(2));
rs = stmt.executeQuery("select udf1(k), udf1(k) from t11");
assertTrue(rs.next());
assertEquals(72057594037927936l, rs.getLong(1));
assertEquals(72057594037927936l, rs.getLong(2));
}
/**
* Compiles the test class with bogus code into a .class file.
* Upon finish, the bogus jar will be left at dynamic.jar.dir location
*/
private static void compileTestClass(String className, String program, int counter) throws Exception {
String javaFileName = className+".java";
File javaFile = new File(javaFileName);
String classFileName = className+".class";
File classFile = new File(classFileName);
String jarName = "myjar"+counter+".jar";
String jarPath = "." + File.separator + jarName;
File jarFile = new File(jarPath);
try {
String packageName = "org.apache.phoenix.end2end";
FileOutputStream fos = new FileOutputStream(javaFileName);
fos.write(program.getBytes());
fos.close();
JavaCompiler jc = ToolProvider.getSystemJavaCompiler();
int result = jc.run(null, null, null, javaFileName);
assertEquals(0, result);
Manifest manifest = new Manifest();
manifest.getMainAttributes().put(Attributes.Name.MANIFEST_VERSION, "1.0");
FileOutputStream jarFos = new FileOutputStream(jarPath);
JarOutputStream jarOutputStream = new JarOutputStream(jarFos, manifest);
String pathToAdd = packageName.replace('.', '/') + '/';
String jarPathStr = new String(pathToAdd);
Set<String> pathsInJar = new HashSet<String>();
while (pathsInJar.add(jarPathStr)) {
int ix = jarPathStr.lastIndexOf('/', jarPathStr.length() - 2);
if (ix < 0) {
break;
}
jarPathStr = jarPathStr.substring(0, ix);
}
for (String pathInJar : pathsInJar) {
jarOutputStream.putNextEntry(new JarEntry(pathInJar));
jarOutputStream.closeEntry();
}
jarOutputStream.putNextEntry(new JarEntry(pathToAdd + classFile.getName()));
byte[] allBytes = new byte[(int) classFile.length()];
FileInputStream fis = new FileInputStream(classFile);
fis.read(allBytes);
fis.close();
jarOutputStream.write(allBytes);
jarOutputStream.closeEntry();
jarOutputStream.close();
jarFos.close();
assertTrue(jarFile.exists());
Connection conn = driver.connect(url, EMPTY_PROPS);
Statement stmt = conn.createStatement();
stmt.execute("add jars '"+jarFile.getAbsolutePath()+"'");
} finally {
if (javaFile != null) javaFile.delete();
if (classFile != null) classFile.delete();
if (jarFile != null) jarFile.delete();
}
}
/**
* Test creating functions using hbase.dynamic.jars.dir
* @throws Exception
*/
@Test
public void testCreateFunctionDynamicJarDir() throws Exception {
Connection conn = driver.connect(url, EMPTY_PROPS);
String tableName = "table" + name.getMethodName();
conn.createStatement().execute("create table " + tableName + "(tenant_id varchar not null, k integer not null, "
+ "firstname varchar, lastname varchar constraint pk primary key(tenant_id,k)) MULTI_TENANT=true");
String tenantId = "tenId" + name.getMethodName();
Connection tenantConn = driver.connect(url + ";" + PhoenixRuntime.TENANT_ID_ATTRIB + "=" + tenantId, EMPTY_PROPS);
Statement stmtTenant = tenantConn.createStatement();
stmtTenant.execute("upsert into " + tableName + " values(1,'foo','jock')");
tenantConn.commit();
compileTestClass(MY_REVERSE_CLASS_NAME, MY_REVERSE_PROGRAM, 7);
String sql="create function myfunction(VARCHAR) returns VARCHAR as 'org.apache.phoenix.end2end." + MY_REVERSE_CLASS_NAME
+ "' using jar '" + util.getConfiguration().get(QueryServices.DYNAMIC_JARS_DIR_KEY).toString() + "'";
stmtTenant.execute(sql);
ResultSet rs = stmtTenant.executeQuery("select myfunction(firstname) from " + tableName);
assertTrue(rs.next());
assertEquals("oof",rs.getString(1));
}
/**
* Test creating functions using dir otherthan hbase.dynamic.jars.dir
* @throws Exception
*/
@Test
public void testCreateFunctionNonDynamicJarDir() throws Exception {
Connection conn = driver.connect(url, EMPTY_PROPS);
String tableName = "table" + name.getMethodName();
conn.createStatement().execute("create table " + tableName + "(tenant_id varchar not null, k integer not null, "
+ "firstname varchar, lastname varchar constraint pk primary key(tenant_id,k)) MULTI_TENANT=true");
String tenantId = "tenId" + name.getMethodName();
Connection tenantConn = driver.connect(url + ";" + PhoenixRuntime.TENANT_ID_ATTRIB + "=" + tenantId, EMPTY_PROPS);
Statement stmtTenant = tenantConn.createStatement();
tenantConn.commit();
compileTestClass(MY_REVERSE_CLASS_NAME, MY_REVERSE_PROGRAM, 8);
Path destJarPathOnHDFS = copyJarsFromDynamicJarsDirToDummyHDFSDir("myjar8.jar");
try {
String sql =
"create function myfunction(VARCHAR) returns VARCHAR as 'org.apache.phoenix.end2end."
+ MY_REVERSE_CLASS_NAME + "' using jar '" + destJarPathOnHDFS.toString()
+ "'";
stmtTenant.execute(sql);
ResultSet rs = stmtTenant.executeQuery("select myfunction(firstname) from " + tableName);
fail("expecting java.lang.SecurityException");
}catch(Exception e){
assertTrue(ExceptionUtils.getRootCause(e) instanceof SecurityException);
}finally {
stmtTenant.execute("drop function myfunction");
}
}
/**
* Move the jars from the hbase.dynamic.jars.dir to data test directory
* @param jarName
* @return The destination jar file path.
* @throws IOException
*/
private Path copyJarsFromDynamicJarsDirToDummyHDFSDir(String jarName) throws IOException {
Path srcPath = new Path(util.getConfiguration().get(DYNAMIC_JARS_DIR_KEY) + "/" + jarName);
FileSystem srcFs = srcPath.getFileSystem(util.getConfiguration());
Path destPath = new Path(util.getDataTestDirOnTestFS().toString() + "/" + jarName);
FileSystem destFs = destPath.getFileSystem(util.getConfiguration());
FileUtil.copy(srcFs, srcPath, destFs, destPath, false, true, util.getConfiguration());
return destPath;
}
}
| |
package org.apache.lucene.facet.search;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import org.apache.lucene.facet.collections.IntIterator;
import org.apache.lucene.facet.collections.IntToObjectMap;
import org.apache.lucene.facet.partitions.IntermediateFacetResult;
import org.apache.lucene.facet.partitions.PartitionsFacetResultsHandler;
import org.apache.lucene.facet.search.FacetRequest.SortOrder;
import org.apache.lucene.facet.taxonomy.ParallelTaxonomyArrays;
import org.apache.lucene.facet.taxonomy.TaxonomyReader;
import org.apache.lucene.util.PriorityQueue;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* Generates {@link FacetResult} from the {@link FacetArrays} aggregated for a
* particular {@link FacetRequest}. The generated {@link FacetResult} is a
* subtree of the taxonomy tree. Its root node,
* {@link FacetResult#getFacetResultNode()}, is the facet specified by
* {@link FacetRequest#categoryPath}, and the enumerated children,
* {@link FacetResultNode#subResults}, of each node in that {@link FacetResult}
* are the top K ( = {@link FacetRequest#numResults}) among its children in the
* taxonomy. The depth (number of levels excluding the root) of the
* {@link FacetResult} tree is specified by {@link FacetRequest#getDepth()}.
* <p>
* Because the number of selected children of each node is restricted, and not
* the overall number of nodes in the {@link FacetResult}, facets not selected
* into {@link FacetResult} might have better values, or ordinals, (typically,
* higher counts), than facets that are selected into the {@link FacetResult}.
* <p>
* The generated {@link FacetResult} also provides with
* {@link FacetResult#getNumValidDescendants()}, which returns the total number
* of facets that are descendants of the root node, no deeper than
* {@link FacetRequest#getDepth()}, and which have valid value. The rootnode
* itself is not counted here. Valid value is determined by the
* {@link FacetResultsHandler}. {@link TopKInEachNodeHandler} defines valid as
* != 0.
* <p>
* <b>NOTE:</b> this code relies on the assumption that
* {@link TaxonomyReader#INVALID_ORDINAL} == -1, a smaller value than any valid
* ordinal.
*
* @lucene.experimental
*/
public class TopKInEachNodeHandler extends PartitionsFacetResultsHandler {
public TopKInEachNodeHandler(TaxonomyReader taxonomyReader, FacetRequest facetRequest, OrdinalValueResolver resolver,
FacetArrays facetArrays) {
super(taxonomyReader, facetRequest, resolver, facetArrays);
}
/**
* Recursively explore all facets that can be potentially included in the
* {@link FacetResult} to be generated, and that belong to the given
* partition, so that values can be examined and collected. For each such
* node, gather its top K ({@link FacetRequest#numResults}) children among its
* children that are encountered in the given particular partition (aka
* current counting list).
* @param offset
* to <code>offset</code> + the length of the count arrays within
* <code>arrays</code> (exclusive)
*
* @return {@link IntermediateFacetResult} consisting of
* {@link IntToObjectMap} that maps potential {@link FacetResult}
* nodes to their top K children encountered in the current partition.
* Note that the mapped potential tree nodes need not belong to the
* given partition, only the top K children mapped to. The aim is to
* identify nodes that are certainly excluded from the
* {@link FacetResult} to be eventually (after going through all the
* partitions) returned by this handler, because they have K better
* siblings, already identified in this partition. For the identified
* excluded nodes, we only count number of their descendants in the
* subtree (to be included in
* {@link FacetResult#getNumValidDescendants()}), but not bother with
* selecting top K in these generations, which, by definition, are,
* too, excluded from the FacetResult tree.
* @throws IOException
* in case
* {@link TaxonomyReader#getOrdinal(org.apache.lucene.facet.taxonomy.CategoryPath)}
* does.
* @see #fetchPartitionResult(int)
*/
@Override
public IntermediateFacetResult fetchPartitionResult(int offset) throws IOException {
// get the root of the result tree to be returned, and the depth of that result tree
// (depth means number of node levels excluding the root).
int rootNode = this.taxonomyReader.getOrdinal(facetRequest.categoryPath);
if (rootNode == TaxonomyReader.INVALID_ORDINAL) {
return null;
}
int K = Math.min(facetRequest.numResults,taxonomyReader.getSize()); // number of best results in each node
// this will grow into the returned IntermediateFacetResult
IntToObjectMap<AACO> AACOsOfOnePartition = new IntToObjectMap<AACO>();
// all partitions, except, possibly, the last, have the same length. Hence modulo is OK.
int partitionSize = facetArrays.arrayLength;
int depth = facetRequest.getDepth();
if (depth == 0) {
// Need to only have root node.
IntermediateFacetResultWithHash tempFRWH = new IntermediateFacetResultWithHash(
facetRequest, AACOsOfOnePartition);
if (isSelfPartition(rootNode, facetArrays, offset)) {
tempFRWH.isRootNodeIncluded = true;
tempFRWH.rootNodeValue = resolver.valueOf(rootNode % partitionSize);
}
return tempFRWH;
}
if (depth > Short.MAX_VALUE - 3) {
depth = Short.MAX_VALUE -3;
}
int endOffset = offset + partitionSize; // one past the largest ordinal in the partition
ParallelTaxonomyArrays childrenArray = taxonomyReader.getParallelTaxonomyArrays();
int[] children = childrenArray.children();
int[] siblings = childrenArray.siblings();
int totalNumOfDescendantsConsidered = 0; // total number of facets with value != 0,
// in the tree. These include those selected as top K in each node, and all the others that
// were not. Not including rootNode
// the following priority queue will be used again and again for each node recursed into
// to select its best K children among its children encountered in the given partition
PriorityQueue<AggregatedCategory> pq =
new AggregatedCategoryHeap(K, this.getSuitableACComparator());
// reusables will feed the priority queue in each use
AggregatedCategory [] reusables = new AggregatedCategory[2+K];
for (int i = 0; i < reusables.length; i++) {
reusables[i] = new AggregatedCategory(1,0);
}
/*
* The returned map is built by a recursive visit of potential tree nodes. Nodes
* determined to be excluded from the FacetResult are not recursively explored as others,
* they are only recursed in order to count the number of their descendants.
* Also, nodes that they and any of their descendants can not be mapped into facets encountered
* in this partition, are, too, explored no further. These are facets whose ordinal
* numbers are greater than the ordinals of the given partition. (recall that the Taxonomy
* maintains that a parent ordinal is smaller than any of its descendants' ordinals).
* So, when scanning over all children of a potential tree node n: (1) all children with ordinal number
* greater than those in the given partition are skipped over, (2) among the children of n residing
* in this partition, the best K children are selected (using pq) for usual further recursion
* and the rest (those rejected out from the pq) are only recursed for counting total number
* of descendants, and (3) all the children of ordinal numbers smaller than the given partition
* are further explored in the usual way, since these may lead to descendants residing in this partition.
*
* ordinalStack drives the recursive descent.
* Top of stack holds the current node which we recurse from.
* ordinalStack[0] holds the root of the facetRequest, and
* it is always maintained that parent(ordianlStack[i]) = ordinalStack[i-1].
* localDepth points to the current top of ordinalStack.
* Only top of ordinalStack can be TaxonomyReader.INVALID_ORDINAL, and this if and only if
* the element below it explored all its relevant children.
*/
int[] ordinalStack = new int[depth+2]; // for 0 and for invalid on top
ordinalStack[0] = rootNode;
int localDepth = 0;
/*
* bestSignlingsStack[i] maintains the best K children of ordinalStack[i-1], namely,
* the best K siblings of ordinalStack[i], best K among those residing in the given partition.
* Note that the residents of ordinalStack need not belong
* to the current partition, only the residents of bestSignlingsStack.
* When exploring the children of ordianlStack[i-1] that reside in the current partition
* (after the top K of them have been determined and stored into bestSignlingsStack[i]),
* siblingExplored[i] points into bestSignlingsStack[i], to the child now explored, hence
* residing in ordinalStack[i], and firstToTheLeftOfPartition[i] holds the largest ordinal of
* a sibling smaller than the ordinals in the partition.
* When siblingExplored[i] == max int, the top K siblings of ordinalStack[i] among those siblings
* that reside in this partition have not been determined yet.
* if siblingExplored[i] < 0, the node in ordinalStack[i] is to the left of partition
* (i.e. of a smaller ordinal than the current partition)
* (step (3) above is executed for the children of ordianlStack[i-1])
*/
int[][] bestSignlingsStack = new int[depth+2][];
int[] siblingExplored = new int[depth+2];
int[] firstToTheLeftOfPartition = new int [depth+2];
int tosOrdinal; // top of stack element, the ordinal at the top of stack
/*
* to start the loop, complete the datastructures for root node:
* push its youngest child to ordinalStack; make a note in siblingExplored[] that the children
* of rootNode, which reside in the current partition have not been read yet to select the top
* K of them. Also, make rootNode as if, related to its parent, rootNode belongs to the children
* of ordinal numbers smaller than those of the current partition (this will ease on end condition --
* we can continue to the older sibling of rootNode once the localDepth goes down, before we verify that
* it went that down)
*/
ordinalStack[++localDepth] = children[rootNode];
siblingExplored[localDepth] = Integer.MAX_VALUE; // we have not verified position wrt current partition
siblingExplored[0] = -1; // as if rootNode resides to the left of current position
/*
* now the whole recursion: loop as long as stack is not empty of elements descendants of
* facetRequest's root.
*/
while (localDepth > 0) {
tosOrdinal = ordinalStack[localDepth];
if (tosOrdinal == TaxonomyReader.INVALID_ORDINAL) {
// the brotherhood that has been occupying the top of stack is all exhausted.
// Hence, element below tos, namely, father of tos, has all its children,
// and itself, all explored.
localDepth--;
// replace this father, now on top of stack, by this father's sibling:
// this parent's ordinal can not be greater than current partition, as otherwise
// its child, now just removed, would not have been pushed on it.
// so the father is either inside the partition, or smaller ordinal
if (siblingExplored[localDepth] < 0 ) {
ordinalStack[localDepth] = siblings[ordinalStack[localDepth]];
continue;
}
// in this point, siblingExplored[localDepth] between 0 and number of bestSiblings
// it can not be max int
siblingExplored[localDepth]--;
if (siblingExplored[localDepth] == -1 ) {
//siblings residing in the partition have been all processed, we now move
// to those of ordinal numbers smaller than the partition
ordinalStack[localDepth] = firstToTheLeftOfPartition[localDepth];
} else {
// still explore siblings residing in the partition
// just move to the next one
ordinalStack[localDepth] = bestSignlingsStack[localDepth][siblingExplored[localDepth]];
}
continue;
} // endof tosOrdinal is invalid, and hence removed, and its parent was replaced by this
// parent's sibling
// now try to push a kid, but first look at tos whether it 'deserves' its kids explored:
// it is not to the right of current partition, and we know whether to only count or to
// select best K siblings.
if (siblingExplored[localDepth] == Integer.MAX_VALUE) {
//tosOrdinal was not examined yet for its position relative to current partition
// and the best K of current partition, among its siblings, have not been determined yet
while (tosOrdinal >= endOffset) {
tosOrdinal = siblings[tosOrdinal];
}
// now it is inside. Run it and all its siblings inside the partition through a heap
// and in doing so, count them, find best K
pq.clear();
//reusables are consumed as from a stack. The stack starts full and returns full.
int tosReuslables = reusables.length -1;
while (tosOrdinal >= offset) { // while tosOrdinal belongs to the given partition; here, too, we use the fact
// that TaxonomyReader.INVALID_ORDINAL == -1 < offset
double value = resolver.valueOf(tosOrdinal % partitionSize);
if (value != 0) { // the value of yc is not 0, it is to be considered.
totalNumOfDescendantsConsidered++;
// consume one reusable, and push to the priority queue
AggregatedCategory ac = reusables[tosReuslables--];
ac.ordinal = tosOrdinal;
ac.value = value;
ac = pq.insertWithOverflow(ac);
if (null != ac) {
/* when a facet is excluded from top K, because already in this partition it has
* K better siblings, it is only recursed for count only.
*/
// update totalNumOfDescendants by the now excluded node and all its descendants
totalNumOfDescendantsConsidered--; // reduce the 1 earned when the excluded node entered the heap
// and now return it and all its descendants. These will never make it to FacetResult
totalNumOfDescendantsConsidered += countOnly (ac.ordinal, children,
siblings, partitionSize, offset, endOffset, localDepth, depth);
reusables[++tosReuslables] = ac;
}
}
tosOrdinal = siblings[tosOrdinal];
}
// now pq has best K children of ordinals that belong to the given partition.
// Populate a new AACO with them.
// tosOrdinal is now first sibling smaller than partition, make a note of that
firstToTheLeftOfPartition[localDepth] = tosOrdinal;
int aaci = pq.size();
int[] ords = new int[aaci];
double [] vals = new double [aaci];
while (aaci > 0) {
AggregatedCategory ac = pq.pop();
ords[--aaci] = ac.ordinal;
vals[aaci] = ac.value;
reusables[++tosReuslables] = ac;
}
// if more than 0 ordinals, add this AACO to the map to be returned,
// and add ords to sibling stack, and make a note in siblingExplored that these are to
// be visited now
if (ords.length > 0) {
AACOsOfOnePartition.put(ordinalStack[localDepth-1], new AACO(ords,vals));
bestSignlingsStack[localDepth] = ords;
siblingExplored[localDepth] = ords.length-1;
ordinalStack[localDepth] = ords[ords.length-1];
} else {
// no ordinals siblings of tosOrdinal in current partition, move to the left of it
// tosOrdinal is already there (to the left of partition).
// make a note of it in siblingExplored
ordinalStack[localDepth] = tosOrdinal;
siblingExplored[localDepth] = -1;
}
continue;
} // endof we did not check the position of a valid ordinal wrt partition
// now tosOrdinal is a valid ordinal, inside partition or to the left of it, we need
// to push its kids on top of it, if not too deep.
// Make a note that we did not check them yet
if (localDepth >= depth) {
// localDepth == depth; current tos exhausted its possible children, mark this by pushing INVALID_ORDINAL
ordinalStack[++localDepth] = TaxonomyReader.INVALID_ORDINAL;
continue;
}
ordinalStack[++localDepth] = children[tosOrdinal];
siblingExplored[localDepth] = Integer.MAX_VALUE;
} // endof loop while stack is not empty
// now generate a TempFacetResult from AACOsOfOnePartition, and consider self.
IntermediateFacetResultWithHash tempFRWH = new IntermediateFacetResultWithHash(
facetRequest, AACOsOfOnePartition);
if (isSelfPartition(rootNode, facetArrays, offset)) {
tempFRWH.isRootNodeIncluded = true;
tempFRWH.rootNodeValue = resolver.valueOf(rootNode % partitionSize);
}
tempFRWH.totalNumOfFacetsConsidered = totalNumOfDescendantsConsidered;
return tempFRWH;
}
/**
* Recursively count <code>ordinal</code>, whose depth is <code>currentDepth</code>,
* and all its descendants down to <code>maxDepth</code> (including),
* descendants whose value in the count arrays, <code>arrays</code>, is != 0.
* The count arrays only includes the current partition, from <code>offset</code>, to (exclusive)
* <code>endOffset</code>.
* It is assumed that <code>ordinal</code> < <code>endOffset</code>,
* otherwise, not <code>ordinal</code>, and none of its descendants, reside in
* the current partition. <code>ordinal</code> < <code>offset</code> is allowed,
* as ordinal's descendants might be >= <code>offeset</code>.
*
* @param ordinal a facet ordinal.
* @param youngestChild mapping a given ordinal to its youngest child in the taxonomy (of largest ordinal number),
* or to -1 if has no children.
* @param olderSibling mapping a given ordinal to its older sibling, or to -1
* @param partitionSize number of ordinals in the given partition
* @param offset the first (smallest) ordinal in the given partition
* @param endOffset one larger than the largest ordinal that belong to this partition
* @param currentDepth the depth or ordinal in the TaxonomyTree (relative to rootnode of the facetRequest)
* @param maxDepth maximal depth of descendants to be considered here (measured relative to rootnode of the
* facetRequest).
* @return the number of nodes, from ordinal down its descendants, of depth <= maxDepth,
* which reside in the current partition, and whose value != 0
*/
private int countOnly(int ordinal, int[] youngestChild, int[] olderSibling, int partitionSize, int offset,
int endOffset, int currentDepth, int maxDepth) {
int ret = 0;
if (offset <= ordinal) {
// ordinal belongs to the current partition
if (0 != resolver.valueOf(ordinal % partitionSize)) {
ret++;
}
}
// now consider children of ordinal, if not too deep
if (currentDepth >= maxDepth) {
return ret;
}
int yc = youngestChild[ordinal];
while (yc >= endOffset) {
yc = olderSibling[yc];
}
while (yc > TaxonomyReader.INVALID_ORDINAL) { // assuming this is -1, smaller than any legal ordinal
ret += countOnly (yc, youngestChild, olderSibling, partitionSize,
offset, endOffset, currentDepth+1, maxDepth);
yc = olderSibling[yc];
}
return ret;
}
/**
* Merge several partitions' {@link IntermediateFacetResult}-s into one of the
* same format
*
* @see #mergeResults(IntermediateFacetResult...)
*/
@Override
public IntermediateFacetResult mergeResults(IntermediateFacetResult... tmpResults) {
if (tmpResults.length == 0) {
return null;
}
int i=0;
// skip over null tmpResults
for (; (i < tmpResults.length)&&(tmpResults[i] == null); i++) {}
if (i == tmpResults.length) {
// all inputs are null
return null;
}
// i points to the first non-null input
int K = this.facetRequest.numResults; // number of best result in each node
IntermediateFacetResultWithHash tmpToReturn = (IntermediateFacetResultWithHash)tmpResults[i++];
// now loop over the rest of tmpResults and merge each into tmpToReturn
for ( ; i < tmpResults.length; i++) {
IntermediateFacetResultWithHash tfr = (IntermediateFacetResultWithHash)tmpResults[i];
tmpToReturn.totalNumOfFacetsConsidered += tfr.totalNumOfFacetsConsidered;
if (tfr.isRootNodeIncluded) {
tmpToReturn.isRootNodeIncluded = true;
tmpToReturn.rootNodeValue = tfr.rootNodeValue;
}
// now merge the HashMap of tfr into this of tmpToReturn
IntToObjectMap<AACO> tmpToReturnMapToACCOs = tmpToReturn.mapToAACOs;
IntToObjectMap<AACO> tfrMapToACCOs = tfr.mapToAACOs;
IntIterator tfrIntIterator = tfrMapToACCOs.keyIterator();
//iterate over all ordinals in tfr that are maps to their children
while (tfrIntIterator.hasNext()) {
int tfrkey = tfrIntIterator.next();
AACO tmpToReturnAACO = null;
if (null == (tmpToReturnAACO = tmpToReturnMapToACCOs.get(tfrkey))) {
// if tmpToReturn does not have any kids of tfrkey, map all the kids
// from tfr to it as one package, along with their redisude
tmpToReturnMapToACCOs.put(tfrkey, tfrMapToACCOs.get(tfrkey));
} else {
// merge the best K children of tfrkey as appear in tmpToReturn and in tfr
AACO tfrAACO = tfrMapToACCOs.get(tfrkey);
int resLength = tfrAACO.ordinals.length + tmpToReturnAACO.ordinals.length;
if (K < resLength) {
resLength = K;
}
int[] resOrds = new int [resLength];
double[] resVals = new double [resLength];
int indexIntoTmpToReturn = 0;
int indexIntoTFR = 0;
ACComparator merger = getSuitableACComparator(); // by facet Request
for (int indexIntoRes = 0; indexIntoRes < resLength; indexIntoRes++) {
if (indexIntoTmpToReturn >= tmpToReturnAACO.ordinals.length) {
//tmpToReturnAACO (former result to return) ran out of indices
// it is all merged into resOrds and resVal
resOrds[indexIntoRes] = tfrAACO.ordinals[indexIntoTFR];
resVals[indexIntoRes] = tfrAACO.values[indexIntoTFR];
indexIntoTFR++;
continue;
}
if (indexIntoTFR >= tfrAACO.ordinals.length) {
// tfr ran out of indices
resOrds[indexIntoRes] = tmpToReturnAACO.ordinals[indexIntoTmpToReturn];
resVals[indexIntoRes] = tmpToReturnAACO.values[indexIntoTmpToReturn];
indexIntoTmpToReturn++;
continue;
}
// select which goes now to res: next (ord, value) from tmpToReturn or from tfr:
if (merger.leftGoesNow( tmpToReturnAACO.ordinals[indexIntoTmpToReturn],
tmpToReturnAACO.values[indexIntoTmpToReturn],
tfrAACO.ordinals[indexIntoTFR],
tfrAACO.values[indexIntoTFR])) {
resOrds[indexIntoRes] = tmpToReturnAACO.ordinals[indexIntoTmpToReturn];
resVals[indexIntoRes] = tmpToReturnAACO.values[indexIntoTmpToReturn];
indexIntoTmpToReturn++;
} else {
resOrds[indexIntoRes] = tfrAACO.ordinals[indexIntoTFR];
resVals[indexIntoRes] = tfrAACO.values[indexIntoTFR];
indexIntoTFR++;
}
} // end of merge of best kids of tfrkey that appear in tmpToReturn and its kids that appear in tfr
// altogether yielding no more that best K kids for tfrkey, not to appear in the new shape of
// tmpToReturn
//update the list of best kids of tfrkey as appear in tmpToReturn
tmpToReturnMapToACCOs.put(tfrkey, new AACO(resOrds, resVals));
} // endof need to merge both AACO -- children for same ordinal
} // endof loop over all ordinals in tfr
} // endof loop over all temporary facet results to merge
return tmpToReturn;
}
private static class AggregatedCategoryHeap extends PriorityQueue<AggregatedCategory> {
private ACComparator merger;
public AggregatedCategoryHeap(int size, ACComparator merger) {
super(size);
this.merger = merger;
}
@Override
protected boolean lessThan(AggregatedCategory arg1, AggregatedCategory arg2) {
return merger.leftGoesNow(arg2.ordinal, arg2.value, arg1.ordinal, arg1.value);
}
}
private static class ResultNodeHeap extends PriorityQueue<FacetResultNode> {
private ACComparator merger;
public ResultNodeHeap(int size, ACComparator merger) {
super(size);
this.merger = merger;
}
@Override
protected boolean lessThan(FacetResultNode arg1, FacetResultNode arg2) {
return merger.leftGoesNow(arg2.ordinal, arg2.value, arg1.ordinal, arg1.value);
}
}
/**
* @return the {@link ACComparator} that reflects the order,
* expressed in the {@link FacetRequest}, of
* facets in the {@link FacetResult}.
*/
private ACComparator getSuitableACComparator() {
if (facetRequest.getSortOrder() == SortOrder.ASCENDING) {
return new AscValueACComparator();
} else {
return new DescValueACComparator();
}
}
/**
* A comparator of two Aggregated Categories according to the order
* (ascending / descending) and item (ordinal or value) specified in the
* FacetRequest for the FacetResult to be generated
*/
private static abstract class ACComparator {
ACComparator() { }
protected abstract boolean leftGoesNow (int ord1, double val1, int ord2, double val2);
}
private static final class AscValueACComparator extends ACComparator {
AscValueACComparator() { }
@Override
protected boolean leftGoesNow (int ord1, double val1, int ord2, double val2) {
return (val1 == val2) ? (ord1 < ord2) : (val1 < val2);
}
}
private static final class DescValueACComparator extends ACComparator {
DescValueACComparator() { }
@Override
protected boolean leftGoesNow (int ord1, double val1, int ord2, double val2) {
return (val1 == val2) ? (ord1 > ord2) : (val1 > val2);
}
}
/**
* Intermediate result to hold counts from one or more partitions processed
* thus far. Its main field, constructor parameter <i>mapToAACOs</i>, is a map
* from ordinals to AACOs. The AACOs mapped to contain ordinals and values
* encountered in the count arrays of the partitions processed thus far. The
* ordinals mapped from are their parents, and they may be not contained in
* the partitions processed thus far. All nodes belong to the taxonomy subtree
* defined at the facet request, constructor parameter <i>facetReq</i>, by its
* root and depth.
*/
public static class IntermediateFacetResultWithHash implements IntermediateFacetResult {
protected IntToObjectMap<AACO> mapToAACOs;
FacetRequest facetRequest;
boolean isRootNodeIncluded; // among the ordinals in the partitions
// processed thus far
double rootNodeValue; // the value of it, in case encountered.
int totalNumOfFacetsConsidered; // total number of facets
// which belong to facetRequest subtree and have value != 0,
// and have been encountered thus far in the partitions processed.
// root node of result tree is not included in this count.
public IntermediateFacetResultWithHash(FacetRequest facetReq,
IntToObjectMap<AACO> mapToAACOs) {
this.mapToAACOs = mapToAACOs;
this.facetRequest = facetReq;
this.isRootNodeIncluded = false;
this.rootNodeValue = 0.0;
this.totalNumOfFacetsConsidered = 0;
}
@Override
public FacetRequest getFacetRequest() {
return this.facetRequest;
}
} // endof FacetResultWithHash
/**
* Maintains info of one entry in the filled up count array:
* an ordinal number of a category and the value aggregated for it
* (typically, that value is the count for that ordinal).
*/
private static final class AggregatedCategory {
int ordinal;
double value;
AggregatedCategory(int ord, double val) {
this.ordinal = ord;
this.value = val;
}
}
/**
* Maintains an array of <code>AggregatedCategory</code>. For space consideration, this is implemented as
* a pair of arrays, <i>ordinals</i> and <i>values</i>, rather than one array of pairs.
* Enumerated in <i>ordinals</i> are siblings,
* potential nodes of the {@link FacetResult} tree
* (i.e., the descendants of the root node, no deeper than the specified depth).
* No more than K ( = {@link FacetRequest#numResults})
* siblings are enumerated.
* @lucene.internal
*/
protected static final class AACO {
int [] ordinals; // ordinals of the best K children, sorted from best to least
double [] values; // the respective values for these children
AACO (int[] ords, double[] vals) {
this.ordinals = ords;
this.values = vals;
}
}
@Override
public void labelResult(FacetResult facetResult) throws IOException {
if (facetResult == null) {
return; // any result to label?
}
FacetResultNode rootNode = facetResult.getFacetResultNode();
recursivelyLabel(rootNode, facetRequest.getNumLabel());
}
private void recursivelyLabel(FacetResultNode node, int numToLabel) throws IOException {
if (node == null) {
return;
}
node.label = taxonomyReader.getPath(node.ordinal);
// recursively label the first numToLabel children of every node
int numLabeled = 0;
for (FacetResultNode frn : node.subResults) {
recursivelyLabel(frn, numToLabel);
if (++numLabeled >= numToLabel) {
return;
}
}
}
@Override
// verifies that the children of each node are sorted by the order
// specified by the facetRequest.
// the values in these nodes may have changed due to a re-count, for example
// following the accumulation by Sampling.
// so now we test and re-order if necessary.
public FacetResult rearrangeFacetResult(FacetResult facetResult) {
PriorityQueue<FacetResultNode> nodesHeap =
new ResultNodeHeap(this.facetRequest.numResults, this.getSuitableACComparator());
FacetResultNode topFrn = facetResult.getFacetResultNode();
rearrangeChilrenOfNode(topFrn, nodesHeap);
return facetResult;
}
private void rearrangeChilrenOfNode(FacetResultNode node, PriorityQueue<FacetResultNode> nodesHeap) {
nodesHeap.clear(); // just to be safe
for (FacetResultNode frn : node.subResults) {
nodesHeap.add(frn);
}
int size = nodesHeap.size();
ArrayList<FacetResultNode> subResults = new ArrayList<FacetResultNode>(size);
while (nodesHeap.size() > 0) {
subResults.add(0, nodesHeap.pop());
}
node.subResults = subResults;
for (FacetResultNode frn : node.subResults) {
rearrangeChilrenOfNode(frn, nodesHeap);
}
}
@Override
public FacetResult renderFacetResult(IntermediateFacetResult tmpResult) throws IOException {
IntermediateFacetResultWithHash tmp = (IntermediateFacetResultWithHash) tmpResult;
int ordinal = this.taxonomyReader.getOrdinal(this.facetRequest.categoryPath);
if ((tmp == null) || (ordinal == TaxonomyReader.INVALID_ORDINAL)) {
return null;
}
double value = Double.NaN;
if (tmp.isRootNodeIncluded) {
value = tmp.rootNodeValue;
}
FacetResultNode root = generateNode(ordinal, value, tmp.mapToAACOs);
return new FacetResult(tmp.facetRequest, root, tmp.totalNumOfFacetsConsidered);
}
private FacetResultNode generateNode(int ordinal, double val, IntToObjectMap<AACO> mapToAACOs) {
FacetResultNode node = new FacetResultNode(ordinal, val);
AACO aaco = mapToAACOs.get(ordinal);
if (null == aaco) {
return node;
}
List<FacetResultNode> list = new ArrayList<FacetResultNode>();
for (int i = 0; i < aaco.ordinals.length; i++) {
list.add(generateNode(aaco.ordinals[i], aaco.values[i], mapToAACOs));
}
node.subResults = list;
return node;
}
}
| |
package biz.paluch.spinach;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkNotNull;
import static com.lambdaworks.redis.LettuceStrings.isEmpty;
import static com.lambdaworks.redis.LettuceStrings.isNotEmpty;
import java.io.Serializable;
import java.net.InetSocketAddress;
import java.net.SocketAddress;
import java.net.URI;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.TimeUnit;
import com.google.common.net.HostAndPort;
import com.lambdaworks.redis.ConnectionPoint;
import io.netty.channel.unix.DomainSocketAddress;
/**
* Disque URI. Contains connection details for the Disque connections. You can provide as well the database, password and
* timeouts within the DisqueURI. Either build your self the object
*
* @author <a href="mailto:mpaluch@paluch.biz">Mark Paluch</a>
* @since 3.0
*/
@SuppressWarnings("serial")
public class DisqueURI implements Serializable {
public static final String URI_SCHEME_DISQUE = "disque";
public static final String URI_SCHEME_DISQUE_SOCKET = "disque-socket";
public static final String URI_SCHEME_DISQUE_SECURE = "disques";
/**
* The default disque port.
*/
public static final int DEFAULT_DISQUE_PORT = 7711;
private int database;
private char[] password;
private boolean ssl = false;
private boolean verifyPeer = true;
private boolean startTls = false;
private long timeout = 60;
private TimeUnit unit = TimeUnit.SECONDS;
private final List<ConnectionPoint> connectionPoints = new ArrayList<ConnectionPoint>();
/**
* Default empty constructor.
*/
public DisqueURI() {
}
/**
* Constructor with host/port and timeout.
*
* @param host the host
* @param port the port
* @param timeout timeout value
* @param unit unit of the timeout value
*/
public DisqueURI(String host, int port, long timeout, TimeUnit unit) {
DisqueHostAndPort hap = new DisqueHostAndPort();
hap.setHost(host);
hap.setPort(port);
connectionPoints.add(hap);
this.timeout = timeout;
this.unit = unit;
}
/**
* Create a Disque URI from an URI string. Supported formats are:
* <ul>
* <li>disque://[password@]host[:port][,host2[:port2]][,hostN[:port2N]]</li>
* <li>disques://[password@]host[:port][,host2[:port2]][,hostN[:port2N]]</li>
* <li>disque-socket://socket-path</li>
* </ul>
*
* The uri must follow conventions of {@link java.net.URI}
*
* @param uri The URI string.
* @return An instance of {@link DisqueURI} containing details from the URI.
*/
public static DisqueURI create(String uri) {
return create(URI.create(uri));
}
/**
* Create a Disque URI from an URI string. Supported formats are:
* <ul>
* <li>disque://[password@]host[:port][,host2[:port2]][,hostN[:port2N]]</li>
* <li>disques://[password@]host[:port][,host2[:port2]][,hostN[:port2N]]</li>
* <li>disque-socket://socket-path</li>
* </ul>
*
* The uri must follow conventions of {@link java.net.URI}
*
* @param uri The URI.
* @return An instance of {@link DisqueURI} containing details from the URI.
*/
public static DisqueURI create(URI uri) {
DisqueURI.Builder builder;
builder = configureDisque(uri);
if (URI_SCHEME_DISQUE_SECURE.equals(uri.getScheme())) {
builder.withSsl(true);
}
String userInfo = uri.getUserInfo();
if (isEmpty(userInfo) && isNotEmpty(uri.getAuthority()) && uri.getAuthority().indexOf('@') > 0) {
userInfo = uri.getAuthority().substring(0, uri.getAuthority().indexOf('@'));
}
if (isNotEmpty(userInfo)) {
String password = userInfo;
if (password.startsWith(":")) {
password = password.substring(1);
}
builder.withPassword(password);
}
if (!URI_SCHEME_DISQUE_SOCKET.equals(uri.getScheme())) {
if (isNotEmpty(uri.getPath())) {
String pathSuffix = uri.getPath().substring(1);
if (isNotEmpty(pathSuffix)) {
builder.withDatabase(Integer.parseInt(pathSuffix));
}
}
}
return builder.build();
}
private static DisqueURI.Builder configureDisque(URI uri) {
DisqueURI.Builder builder = null;
if (URI_SCHEME_DISQUE_SOCKET.equals(uri.getScheme())) {
builder = Builder.disqueSocket(uri.getPath());
} else
if (isNotEmpty(uri.getHost())) {
if (uri.getPort() != -1) {
builder = DisqueURI.Builder.disque(uri.getHost(), uri.getPort());
} else {
builder = DisqueURI.Builder.disque(uri.getHost());
}
}
if (builder == null && isNotEmpty(uri.getAuthority())) {
String authority = uri.getAuthority();
if (authority.indexOf('@') > -1) {
authority = authority.substring(authority.indexOf('@') + 1);
}
String[] hosts = authority.split("\\,");
for (String host : hosts) {
if (uri.getScheme().equals(URI_SCHEME_DISQUE_SOCKET)) {
if (builder == null) {
builder = DisqueURI.Builder.disqueSocket(host);
} else {
builder.withSocket(host);
}
} else {
HostAndPort hostAndPort = HostAndPort.fromString(host);
if (builder == null) {
if (hostAndPort.hasPort()) {
builder = DisqueURI.Builder.disque(hostAndPort.getHostText(), hostAndPort.getPort());
} else {
builder = DisqueURI.Builder.disque(hostAndPort.getHostText());
}
} else {
if (hostAndPort.hasPort()) {
builder.withDisque(hostAndPort.getHostText(), hostAndPort.getPort());
} else {
builder.withDisque(hostAndPort.getHostText());
}
}
}
}
}
checkArgument(builder != null, "Invalid URI, cannot get host part");
return builder;
}
public char[] getPassword() {
return password;
}
public void setPassword(String password) {
this.password = password.toCharArray();
}
public long getTimeout() {
return timeout;
}
public void setTimeout(long timeout) {
this.timeout = timeout;
}
public TimeUnit getUnit() {
return unit;
}
public void setUnit(TimeUnit unit) {
this.unit = unit;
}
public int getDatabase() {
return database;
}
public void setDatabase(int database) {
this.database = database;
}
public boolean isSsl() {
return ssl;
}
public void setSsl(boolean ssl) {
this.ssl = ssl;
}
public boolean isVerifyPeer() {
return verifyPeer;
}
public void setVerifyPeer(boolean verifyPeer) {
this.verifyPeer = verifyPeer;
}
public boolean isStartTls() {
return startTls;
}
public void setStartTls(boolean startTls) {
this.startTls = startTls;
}
public List<ConnectionPoint> getConnectionPoints() {
return connectionPoints;
}
@Override
public String toString() {
final StringBuilder sb = new StringBuilder();
sb.append(getClass().getSimpleName());
sb.append(" ").append(connectionPoints);
return sb.toString();
}
/**
* Builder for Disque URI.
*/
public static class Builder {
private final DisqueURI disqueURI = new DisqueURI();
/**
* Set Disque host. Creates a new builder.
*
* @param host the host name
* @return New builder with Disque host/port.
*/
public static Builder disque(String host) {
return disque(host, DEFAULT_DISQUE_PORT);
}
/**
* Set Disque host and port. Creates a new builder
*
* @param host the host name
* @param port the port
* @return New builder with Disque host/port.
*/
public static Builder disque(String host, int port) {
checkNotNull(host, "Host must not be null");
Builder builder = new Builder();
builder.withDisque(host, port);
return builder;
}
/**
* Set Disque socket. Creates a new builder .
*
* @param socket the socket name
* @return New builder with Disque socket.
*/
public static Builder disqueSocket(String socket) {
checkNotNull(socket, "Socket must not be null");
Builder builder = new Builder();
builder.withSocket(socket);
return builder;
}
public Builder withSocket(String socket) {
checkNotNull(socket, "Socket must not be null");
this.disqueURI.connectionPoints.add(new DisqueSocket(socket));
return this;
}
public Builder withDisque(String host) {
checkNotNull(host, "Host must not be null");
return withDisque(host, DEFAULT_DISQUE_PORT);
}
public Builder withDisque(String host, int port) {
checkNotNull(host, "Host must not be null");
DisqueHostAndPort hap = new DisqueHostAndPort(host, port);
this.disqueURI.connectionPoints.add(hap);
return this;
}
/**
* Adds ssl information to the builder.
*
* @param ssl {@literal true} if use SSL
* @return the builder
*/
public Builder withSsl(boolean ssl) {
disqueURI.setSsl(ssl);
return this;
}
/**
* Enables/disables StartTLS when using SSL.
*
* @param startTls {@literal true} if use StartTLS
* @return the builder
*/
public Builder withStartTls(boolean startTls) {
disqueURI.setStartTls(startTls);
return this;
}
/**
* Enables/disables peer verification.
*
* @param verifyPeer {@literal true} to verify hosts when using SSL
* @return the builder
*/
public Builder withVerifyPeer(boolean verifyPeer) {
disqueURI.setVerifyPeer(verifyPeer);
return this;
}
/**
* Adds database selection.
*
* @param database the database number
* @return the builder
*/
public Builder withDatabase(int database) {
disqueURI.setDatabase(database);
return this;
}
/**
* Adds authentication.
*
* @param password the password
* @return the builder
*/
public Builder withPassword(String password) {
checkNotNull(password, "Password must not be null");
disqueURI.setPassword(password);
return this;
}
/**
* Adds timeout.
*
* @param timeout must be greater or equal 0"
* @param unit the timeout time unit.
* @return the builder
*/
public Builder withTimeout(long timeout, TimeUnit unit) {
checkNotNull(unit, "TimeUnit must not be null");
checkArgument(timeout >= 0, "Timeout must be greater or equal 0");
disqueURI.setTimeout(timeout);
disqueURI.setUnit(unit);
return this;
}
/**
*
* @return the DisqueURI.
*/
public DisqueURI build() {
return disqueURI;
}
}
public static class DisqueSocket implements Serializable, ConnectionPoint {
private String socket;
private transient SocketAddress resolvedAddress;
public DisqueSocket() {
}
public DisqueSocket(String socket) {
this.socket = socket;
}
@Override
public String getHost() {
return null;
}
@Override
public int getPort() {
return -1;
}
@Override
public String getSocket() {
return socket;
}
public void setSocket(String socket) {
this.socket = socket;
}
public SocketAddress getResolvedAddress() {
if (resolvedAddress == null) {
resolvedAddress = new DomainSocketAddress(getSocket());
}
return resolvedAddress;
}
@Override
public String toString() {
final StringBuffer sb = new StringBuffer();
sb.append("[socket=").append(socket);
sb.append(']');
return sb.toString();
}
}
public static class DisqueHostAndPort implements Serializable, ConnectionPoint {
private String host;
private int port;
private transient SocketAddress resolvedAddress;
public DisqueHostAndPort() {
}
public DisqueHostAndPort(String host, int port) {
this.host = host;
this.port = port;
}
public String getHost() {
return host;
}
public void setHost(String host) {
this.host = host;
}
public int getPort() {
return port;
}
@Override
public String getSocket() {
return null;
}
public void setPort(int port) {
this.port = port;
}
public SocketAddress getResolvedAddress() {
if (resolvedAddress == null) {
resolvedAddress = new InetSocketAddress(host, port);
}
return resolvedAddress;
}
@Override
public String toString() {
final StringBuffer sb = new StringBuffer();
sb.append("[port=").append(port);
sb.append(", host='").append(host).append('\'');
sb.append(']');
return sb.toString();
}
}
}
| |
/*
* Copyright 2000-2014 Vaadin Ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package com.vaadin.client.ui.tree;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import java.util.Set;
import com.google.gwt.aria.client.Roles;
import com.google.gwt.dom.client.Element;
import com.vaadin.client.ApplicationConnection;
import com.vaadin.client.BrowserInfo;
import com.vaadin.client.Paintable;
import com.vaadin.client.TooltipInfo;
import com.vaadin.client.UIDL;
import com.vaadin.client.VConsole;
import com.vaadin.client.WidgetUtil;
import com.vaadin.client.communication.StateChangeEvent;
import com.vaadin.client.ui.AbstractComponentConnector;
import com.vaadin.client.ui.VTree;
import com.vaadin.client.ui.VTree.TreeNode;
import com.vaadin.shared.ui.Connect;
import com.vaadin.shared.ui.MultiSelectMode;
import com.vaadin.shared.ui.tree.TreeConstants;
import com.vaadin.shared.ui.tree.TreeState;
import com.vaadin.ui.Tree;
@Connect(Tree.class)
public class TreeConnector extends AbstractComponentConnector implements
Paintable {
protected final Map<TreeNode, TooltipInfo> tooltipMap = new HashMap<TreeNode, TooltipInfo>();
@Override
protected void init() {
getWidget().connector = this;
}
@Override
public void updateFromUIDL(UIDL uidl, ApplicationConnection client) {
if (!isRealUpdate(uidl)) {
return;
}
getWidget().rendering = true;
getWidget().client = client;
if (uidl.hasAttribute("partialUpdate")) {
handleUpdate(uidl);
// IE8 needs a hack to measure the tree again after update
WidgetUtil.forceIE8Redraw(getWidget().getElement());
getWidget().rendering = false;
return;
}
getWidget().paintableId = uidl.getId();
getWidget().immediate = getState().immediate;
getWidget().disabled = !isEnabled();
getWidget().readonly = isReadOnly();
getWidget().dragMode = uidl.hasAttribute("dragMode") ? uidl
.getIntAttribute("dragMode") : 0;
getWidget().isNullSelectionAllowed = uidl
.getBooleanAttribute("nullselect");
if (uidl.hasAttribute("alb")) {
getWidget().bodyActionKeys = uidl.getStringArrayAttribute("alb");
}
getWidget().body.clear();
// clear out any references to nodes that no longer are attached
getWidget().clearNodeToKeyMap();
tooltipMap.clear();
TreeNode childTree = null;
UIDL childUidl = null;
for (final Iterator<?> i = uidl.getChildIterator(); i.hasNext();) {
childUidl = (UIDL) i.next();
if ("actions".equals(childUidl.getTag())) {
updateActionMap(childUidl);
continue;
} else if ("-ac".equals(childUidl.getTag())) {
getWidget().updateDropHandler(childUidl);
continue;
}
childTree = getWidget().new TreeNode();
getConnection().getVTooltip().connectHandlersToWidget(childTree);
updateNodeFromUIDL(childTree, childUidl, 1);
getWidget().body.add(childTree);
childTree.addStyleDependentName("root");
childTree.childNodeContainer.addStyleDependentName("root");
}
if (childTree != null && childUidl != null) {
boolean leaf = !childUidl.getTag().equals("node");
childTree.addStyleDependentName(leaf ? "leaf-last" : "last");
childTree.childNodeContainer.addStyleDependentName("last");
}
final String selectMode = uidl.getStringAttribute("selectmode");
getWidget().selectable = !"none".equals(selectMode);
getWidget().isMultiselect = "multi".equals(selectMode);
if (getWidget().isMultiselect) {
Roles.getTreeRole().setAriaMultiselectableProperty(
getWidget().getElement(), true);
if (BrowserInfo.get().isTouchDevice()) {
// Always use the simple mode for touch devices that do not have
// shift/ctrl keys (#8595)
getWidget().multiSelectMode = MultiSelectMode.SIMPLE;
} else {
getWidget().multiSelectMode = MultiSelectMode.valueOf(uidl
.getStringAttribute("multiselectmode"));
}
} else {
Roles.getTreeRole().setAriaMultiselectableProperty(
getWidget().getElement(), false);
}
getWidget().selectedIds = uidl.getStringArrayVariableAsSet("selected");
// Update lastSelection and focusedNode to point to *actual* nodes again
// after the old ones have been cleared from the body. This fixes focus
// and keyboard navigation issues as described in #7057 and other
// tickets.
if (getWidget().lastSelection != null) {
getWidget().lastSelection = getWidget().getNodeByKey(
getWidget().lastSelection.key);
}
if (getWidget().focusedNode != null) {
Set<String> selectedIds = getWidget().selectedIds;
// If the focused node is not between the selected nodes, we need to
// refresh the focused node to prevent an undesired scroll. #12618.
if (!selectedIds.isEmpty()
&& !selectedIds.contains(getWidget().focusedNode.key)) {
String keySelectedId = selectedIds.iterator().next();
TreeNode nodeToSelect = getWidget().getNodeByKey(keySelectedId);
getWidget().setFocusedNode(nodeToSelect);
} else {
getWidget().setFocusedNode(
getWidget().getNodeByKey(getWidget().focusedNode.key));
}
}
if (getWidget().lastSelection == null
&& getWidget().focusedNode == null
&& !getWidget().selectedIds.isEmpty()) {
getWidget().setFocusedNode(
getWidget().getNodeByKey(
getWidget().selectedIds.iterator().next()));
getWidget().focusedNode.setFocused(false);
}
// IE8 needs a hack to measure the tree again after update
WidgetUtil.forceIE8Redraw(getWidget().getElement());
getWidget().rendering = false;
}
@Override
public void onStateChanged(StateChangeEvent stateChangeEvent) {
super.onStateChanged(stateChangeEvent);
// VTree does not implement Focusable
getWidget().setTabIndex(getState().tabIndex);
}
@Override
public VTree getWidget() {
return (VTree) super.getWidget();
}
private void handleUpdate(UIDL uidl) {
final TreeNode rootNode = getWidget().getNodeByKey(
uidl.getStringAttribute("rootKey"));
if (rootNode != null) {
if (!rootNode.getState()) {
// expanding node happened server side
rootNode.setState(true, false);
}
String levelPropertyString = Roles.getTreeitemRole()
.getAriaLevelProperty(rootNode.getElement());
int levelProperty;
try {
levelProperty = Integer.valueOf(levelPropertyString);
} catch (NumberFormatException e) {
levelProperty = 1;
VConsole.error(e);
}
renderChildNodes(rootNode, (Iterator) uidl.getChildIterator(),
levelProperty + 1);
}
}
/**
* Registers action for the root and also for individual nodes
*
* @param uidl
*/
private void updateActionMap(UIDL uidl) {
final Iterator<?> it = uidl.getChildIterator();
while (it.hasNext()) {
final UIDL action = (UIDL) it.next();
final String key = action.getStringAttribute("key");
final String caption = action
.getStringAttribute(TreeConstants.ATTRIBUTE_ACTION_CAPTION);
String iconUrl = null;
if (action.hasAttribute(TreeConstants.ATTRIBUTE_ACTION_ICON)) {
iconUrl = getConnection()
.translateVaadinUri(
action.getStringAttribute(TreeConstants.ATTRIBUTE_ACTION_ICON));
}
getWidget().registerAction(key, caption, iconUrl);
}
}
public void updateNodeFromUIDL(TreeNode treeNode, UIDL uidl, int level) {
Roles.getTreeitemRole().setAriaLevelProperty(treeNode.getElement(),
level);
String nodeKey = uidl.getStringAttribute("key");
treeNode.setText(uidl
.getStringAttribute(TreeConstants.ATTRIBUTE_NODE_CAPTION));
treeNode.key = nodeKey;
getWidget().registerNode(treeNode);
if (uidl.hasAttribute("al")) {
treeNode.actionKeys = uidl.getStringArrayAttribute("al");
}
if (uidl.getTag().equals("node")) {
if (uidl.getChildCount() == 0) {
treeNode.childNodeContainer.setVisible(false);
} else {
renderChildNodes(treeNode, (Iterator) uidl.getChildIterator(),
level + 1);
treeNode.childrenLoaded = true;
}
} else {
treeNode.addStyleName(TreeNode.CLASSNAME + "-leaf");
}
if (uidl.hasAttribute(TreeConstants.ATTRIBUTE_NODE_STYLE)) {
treeNode.setNodeStyleName(uidl
.getStringAttribute(TreeConstants.ATTRIBUTE_NODE_STYLE));
}
String description = uidl.getStringAttribute("descr");
if (description != null) {
tooltipMap.put(treeNode, new TooltipInfo(description, null,
treeNode));
}
if (uidl.getBooleanAttribute("expanded") && !treeNode.getState()) {
treeNode.setState(true, false);
}
if (uidl.getBooleanAttribute("selected")) {
treeNode.setSelected(true);
// ensure that identifier is in selectedIds array (this may be a
// partial update)
getWidget().selectedIds.add(nodeKey);
}
String iconUrl = uidl
.getStringAttribute(TreeConstants.ATTRIBUTE_NODE_ICON);
String iconAltText = uidl
.getStringAttribute(TreeConstants.ATTRIBUTE_NODE_ICON_ALT);
treeNode.setIcon(iconUrl, iconAltText);
}
void renderChildNodes(TreeNode containerNode, Iterator<UIDL> i, int level) {
containerNode.childNodeContainer.clear();
containerNode.childNodeContainer.setVisible(true);
while (i.hasNext()) {
final UIDL childUidl = i.next();
// actions are in bit weird place, don't mix them with children,
// but current node's actions
if ("actions".equals(childUidl.getTag())) {
updateActionMap(childUidl);
continue;
}
final TreeNode childTree = getWidget().new TreeNode();
getConnection().getVTooltip().connectHandlersToWidget(childTree);
updateNodeFromUIDL(childTree, childUidl, level);
containerNode.childNodeContainer.add(childTree);
if (!i.hasNext()) {
childTree
.addStyleDependentName(childTree.isLeaf() ? "leaf-last"
: "last");
childTree.childNodeContainer.addStyleDependentName("last");
}
}
containerNode.childrenLoaded = true;
}
@Override
public boolean isReadOnly() {
return super.isReadOnly() || getState().propertyReadOnly;
}
@Override
public TreeState getState() {
return (TreeState) super.getState();
}
@Override
public TooltipInfo getTooltipInfo(Element element) {
TooltipInfo info = null;
// Try to find a tooltip for a node
if (element != getWidget().getElement()) {
Object node = WidgetUtil.findWidget(element, TreeNode.class);
if (node != null) {
TreeNode tnode = (TreeNode) node;
if (tnode.isCaptionElement(element)) {
info = tooltipMap.get(tnode);
}
}
}
// If no tooltip found for the node or if the target was not a node, use
// the default tooltip
if (info == null) {
info = super.getTooltipInfo(element);
}
return info;
}
@Override
public boolean hasTooltip() {
/*
* Item tooltips are not processed until updateFromUIDL, so we can't be
* sure that there are no tooltips during onStateChange when this method
* is used.
*/
return true;
}
}
| |
/**
* Copyright (C) dbychkov.com.
* <p/>
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.testaholic.brewery.activity;
import android.app.AlertDialog;
import android.app.Fragment;
import android.content.Context;
import android.content.DialogInterface;
import android.content.Intent;
import android.os.Bundle;
import android.support.v7.app.ActionBar;
import android.support.v7.widget.Toolbar;
import android.view.MenuItem;
import android.widget.ImageView;
import android.widget.RelativeLayout;
import net.testaholic.brewery.R;
import net.testaholic.brewery.adapter.CardPagerAdapter;
import net.testaholic.brewery.anim.ZoomOutPageTransformer;
import net.testaholic.brewery.dagger.component.ActivityComponent;
import net.testaholic.brewery.domain.Ingredients;
import net.testaholic.brewery.fragment.CardContainerFragment;
import net.testaholic.brewery.presentation.StudyFlashcardsActivityPresenter;
import net.testaholic.brewery.view.StudyFlashcardsView;
import net.testaholic.brewery.widgets.ViewPagerCustomDuration;
import java.util.List;
import javax.inject.Inject;
import butterknife.Bind;
import butterknife.BindColor;
import butterknife.BindString;
import butterknife.ButterKnife;
import butterknife.OnClick;
/**
* Study session activity
*/
public class StudyFlashcardsActivity extends BaseActivity implements StudyFlashcardsView {
public static final String EXTRA_LESSON_ID = "lessonId";
@Inject
StudyFlashcardsActivityPresenter presenter;
@Bind(R.id.knowButton)
ImageView knowButton;
@Bind(R.id.dontKnowButton)
ImageView dontKnowButton;
@Bind(R.id.view_pager)
ViewPagerCustomDuration viewPager;
@Bind(R.id.toolbar)
Toolbar toolbar;
@BindString(R.string.flashcards_activity_title)
String title;
@BindString(R.string.flashcards_activity_lesson_ended_title)
String lessonEndedTitle;
@BindString(R.string.flashcards_activity_lesson_ended_text)
String lessonEndedText;
@BindString(R.string.flashcards_activity_words_learnt_title)
String wordsLearntTitle;
@BindString(R.string.flashcards_activity_words_learnt_text)
String wordsLearntText;
@BindColor(R.color.know_button_color)
int knowButtonColor;
@BindColor(R.color.dont_know_button_color)
int dontKnowButtonColor;
@Bind(R.id.main_container)
RelativeLayout relativeLayout;
private long lessonId;
private CardPagerAdapter adapter;
public static Intent createIntent(Context context, Long lessonId) {
Intent intent = new Intent(context, StudyFlashcardsActivity.class);
intent.putExtra(EXTRA_LESSON_ID, lessonId);
return intent;
}
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_test_cards);
ButterKnife.bind(this);
initExtra();
initToolbar();
initButtons();
initPresenter();
}
private void initExtra() {
lessonId = getIntent().getLongExtra(FlashcardsActivity.EXTRA_LESSON_ID, -1L);
}
private void initPresenter() {
presenter.setView(this);
presenter.initialize(lessonId);
}
@Override
public void injectActivity(ActivityComponent component) {
component.inject(this);
}
@Override
public void onBackPressed() {
super.onBackPressed();
overridePendingTransition(R.anim.slide_in_left_100, R.anim.slide_out_right_100);
}
private void initToolbar() {
setSupportActionBar(toolbar);
try {
final ActionBar ab = getSupportActionBar();
getSupportActionBar().setDisplayHomeAsUpEnabled(true);
ab.setTitle(title);
} catch (Exception ex) {
}
}
private void initButtons() {
knowButton.setColorFilter(knowButtonColor);
dontKnowButton.setColorFilter(dontKnowButtonColor);
}
public boolean onOptionsItemSelected(MenuItem item) {
switch (item.getItemId()) {
case android.R.id.home:
onBackPressed();
return true;
default:
return super.onOptionsItemSelected(item);
}
}
@Override
public void showLessonEndedDialog() {
buildDialog(lessonEndedText, lessonEndedTitle);
}
@Override
public void showAllWordsLearntDialog() {
buildDialog(wordsLearntText, wordsLearntTitle);
}
private void buildDialog(String message, String title) {
new AlertDialog.Builder(this)
.setTitle(title)
.setMessage(message)
.setPositiveButton(android.R.string.ok, new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
dialog.dismiss();
onBackPressed();
}
})
.setCancelable(false)
.create()
.show();
}
@Override
public void renderFlashcards(List<Ingredients> wordsFromLesson) {
adapter = new CardPagerAdapter(getFragmentManager(), wordsFromLesson);
viewPager.setPageTransformer(true, new ZoomOutPageTransformer());
viewPager.setAdapter(adapter);
viewPager.setScrollDurationFactor(3);
}
public Fragment findFragmentByPosition(int position) {
return getFragmentManager().findFragmentByTag("android:switcher:" + viewPager.getId() + ":"
+ adapter.getItemId(position));
}
@Override
public boolean showCardBack(int position) {
CardContainerFragment cardContainerFragment = ((CardContainerFragment) findFragmentByPosition(position));
if (!cardContainerFragment.isFlipped()) {
cardContainerFragment.flipCard();
return true;
}
return false;
}
@OnClick(R.id.knowButton)
public void onKnowButtonClicked() {
presenter.knowWordButtonPressed();
}
@OnClick(R.id.dontKnowButton)
public void onDontKnowButtonClicked() {
presenter.dontKnowWordButtonPressed();
}
@Override
public void showFlashcard(int flashCardNumber) {
viewPager.setCurrentItem(flashCardNumber);
}
}
| |
/**
* PerformanceCModuleDec.java
* ---------------------------------
* Copyright (c) 2016
* RESOLVE Software Research Group
* School of Computing
* Clemson University
* All rights reserved.
* ---------------------------------
* This file is subject to the terms and conditions defined in
* file 'LICENSE.txt', which is part of this source code package.
*/
package edu.clemson.cs.r2jt.absyn;
import edu.clemson.cs.r2jt.collections.Iterator;
import edu.clemson.cs.r2jt.collections.List;
import edu.clemson.cs.r2jt.data.PosSymbol;
import edu.clemson.cs.r2jt.data.Symbol;
public class PerformanceCModuleDec extends ModuleDec {
// ===========================================================
// Variables
// ===========================================================
/** The ProfileNames member. */
private PosSymbol name;
/** The ProfileNames member. */
private PosSymbol profileName1;
/** The Profile's concept name member. */
private PosSymbol profilecName;
/** The parameters member. */
private List<ModuleParameterDec> parameters;
/** The usesItems member. */
private List<UsesItem> usesItems;
/** The requires member. */
private Exp requires;
/** The constraints member. */
private List<Exp> constraints;
/** The performance initialization member. */
private PerformanceInitItem perfInit;
/** The performance finalization member. */
private PerformanceFinalItem perfFinal;
/** The facilityInit member. */
private InitItem facilityInit;
/** The facilityFinal member. */
private FinalItem facilityFinal;
/** The decs member. */
private List<Dec> decs;
// ===========================================================
// Constructors
// ===========================================================
public PerformanceCModuleDec() {};
public PerformanceCModuleDec(PosSymbol name, PosSymbol profileName1,
PosSymbol profilecName, List<ModuleParameterDec> parameters,
List<UsesItem> usesItems, Exp requires, List<Exp> constraints,
PerformanceInitItem perfInit, PerformanceFinalItem perfFinal,
InitItem facilityInit, FinalItem facilityFinal, List<Dec> decs) {
this.name = name;
this.profileName1 = profileName1;
this.profilecName = profilecName;
this.parameters = parameters;
this.usesItems = usesItems;
this.requires = requires;
this.constraints = constraints;
this.perfInit = perfInit;
this.perfFinal = perfFinal;
this.facilityInit = facilityInit;
this.facilityFinal = facilityFinal;
this.decs = decs;
}
// ===========================================================
// Accessor Methods
// ===========================================================
// -----------------------------------------------------------
// Get Methods
// -----------------------------------------------------------
/** Returns the value of the profileNames variable. */
public PosSymbol getName() {
return name;
}
/** Returns the value of the profileNames variable. */
public PosSymbol getProfileName1() {
return profileName1;
}
/** Returns the value of the Profile's concept name variable. */
public PosSymbol getProfilecName() {
return profilecName;
}
/** Returns the value of the parameters variable. */
public List<ModuleParameterDec> getParameters() {
return parameters;
}
/** Returns the value of the usesItems variable. */
public List<UsesItem> getUsesItems() {
return usesItems;
}
/** Returns the value of the requires variable. */
public Exp getRequires() {
return requires;
}
/** Returns the value of the constraints variable. */
public List<Exp> getConstraints() {
return constraints;
}
/** Returns the value of the performance initialization variable. */
public PerformanceInitItem getPerfInit() {
return perfInit;
}
/** Returns the value of the performance finalization variable. */
public PerformanceFinalItem getPerfFinal() {
return perfFinal;
}
/** Returns the value of the facilityInit variable. */
public InitItem getFacilityInit() {
return facilityInit;
}
/** Returns the value of the facilityFinal variable. */
public FinalItem getFacilityFinal() {
return facilityFinal;
}
/** Returns the value of the decs variable. */
public List<Dec> getDecs() {
return decs;
}
/** Returns a list of procedures in this realization. */
public List<Symbol> getLocalProcedureNames() {
List<Symbol> retval = new List<Symbol>();
Iterator<Dec> it = decs.iterator();
while (it.hasNext()) {
Dec d = it.next();
if (d instanceof ProcedureDec) {
retval.add(d.getName().getSymbol());
}
}
return retval;
}
// -----------------------------------------------------------
// Set Methods
// -----------------------------------------------------------
/** Sets the profileName1 variable to the specified value. */
public void setName(PosSymbol name) {
this.name = name;
}
/** Sets the profileName1 variable to the specified value. */
public void setProfileNames1(PosSymbol profileName1) {
this.profileName1 = profileName1;
}
/** Sets the Profile's concept name variable to the specified value. */
public void setProfilecName(PosSymbol profilecName) {
this.profilecName = profilecName;
}
/** Sets the parameters variable to the specified value. */
public void setParameters(List<ModuleParameterDec> parameters) {
this.parameters = parameters;
}
/** Sets the usesItems variable to the specified value. */
public void setUsesItems(List<UsesItem> usesItems) {
this.usesItems = usesItems;
}
/** Sets the requires variable to the specified value. */
public void setRequires(Exp requires) {
this.requires = requires;
}
/** Sets the constraints variable to the specified value. */
public void setConstraints(List<Exp> constraints) {
this.constraints = constraints;
}
/** Sets the performance initialization variable to the specified value. */
public void setPerfInit(PerformanceInitItem perfInit) {
this.perfInit = perfInit;
}
/** Sets the performance finalization variable to the specified value. */
public void setPerfFinal(PerformanceFinalItem perfFinal) {
this.perfFinal = perfFinal;
}
/** Sets the facilityInit variable to the specified value. */
public void setFacilityInit(InitItem facilityInit) {
this.facilityInit = facilityInit;
}
/** Sets the facilityFinal variable to the specified value. */
public void setFacilityFinal(FinalItem facilityFinal) {
this.facilityFinal = facilityFinal;
}
/** Sets the decs variable to the specified value. */
public void setDecs(List<Dec> decs) {
this.decs = decs;
}
// ===========================================================
// Public Methods
// ===========================================================
/** Accepts a ResolveConceptualVisitor. */
public void accept(ResolveConceptualVisitor v) {
v.visitPerformanceCModuleDec(this);
}
/** Returns a formatted text string of this class. */
public String asString(int indent, int increment) {
StringBuffer sb = new StringBuffer();
printSpace(indent, sb);
sb.append("PerformanceCModuleDec\n");
if (name != null) {
sb.append(name.asString(indent + increment, increment));
}
if (profileName1 != null) {
sb.append(profileName1.asString(indent + increment, increment));
}
if (profilecName != null) {
sb.append(profilecName.asString(indent + increment, increment));
}
if (parameters != null) {
sb.append(parameters.asString(indent + increment, increment));
}
if (usesItems != null) {
sb.append(usesItems.asString(indent + increment, increment));
}
if (requires != null) {
sb.append(requires.asString(indent + increment, increment));
}
if (constraints != null) {
sb.append(constraints.asString(indent + increment, increment));
}
if (perfInit != null) {
sb.append(perfInit.asString(indent + increment, increment));
}
if (perfFinal != null) {
sb.append(perfFinal.asString(indent + increment, increment));
}
if (facilityInit != null) {
sb.append(facilityInit.asString(indent + increment, increment));
}
if (facilityFinal != null) {
sb.append(facilityFinal.asString(indent + increment, increment));
}
if (decs != null) {
sb.append(decs.asString(indent + increment, increment));
}
return sb.toString();
}
}
| |
/*
* Copyright 2019 Wultra s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.getlime.security.powerauth.biometry.impl.legacy;
import android.content.Context;
import android.content.pm.PackageManager;
import android.hardware.fingerprint.FingerprintManager;
import android.os.Build;
import android.os.CancellationSignal;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.annotation.RequiresApi;
import androidx.fragment.app.FragmentManager;
import android.util.Pair;
import javax.crypto.Cipher;
import io.getlime.security.powerauth.biometry.BiometricAuthenticationRequest;
import io.getlime.security.powerauth.biometry.BiometricDialogResources;
import io.getlime.security.powerauth.biometry.BiometricKeyData;
import io.getlime.security.powerauth.biometry.BiometricStatus;
import io.getlime.security.powerauth.biometry.BiometryType;
import io.getlime.security.powerauth.biometry.IBiometricKeyEncryptor;
import io.getlime.security.powerauth.biometry.IBiometricKeystore;
import io.getlime.security.powerauth.biometry.impl.BiometricErrorDialogFragment;
import io.getlime.security.powerauth.biometry.impl.BiometricHelper;
import io.getlime.security.powerauth.biometry.impl.BiometricResultDispatcher;
import io.getlime.security.powerauth.biometry.impl.IBiometricAuthenticator;
import io.getlime.security.powerauth.biometry.impl.PrivateRequestData;
import io.getlime.security.powerauth.exception.PowerAuthErrorCodes;
import io.getlime.security.powerauth.exception.PowerAuthErrorException;
import io.getlime.security.powerauth.networking.interfaces.ICancelable;
import io.getlime.security.powerauth.sdk.impl.CancelableTask;
import io.getlime.security.powerauth.system.PA2Log;
/**
* The {@code FingerprintAuthenticator} class implements {@link IBiometricAuthenticator} interface with using
* an old {@link FingerprintManager} class.
*/
@RequiresApi(api = Build.VERSION_CODES.M)
public class FingerprintAuthenticator implements IBiometricAuthenticator {
// Private properties
private final @NonNull Context context;
private final @NonNull FingerprintManager fingerprintManager;
private final @NonNull IBiometricKeystore keystore;
private byte[] alreadyProtectedKey;
private BiometricKeyData processedBiometricKeyData;
private boolean hasAlreadyProcessedBiometricKeyData;
// Device construction
/**
* Create instance of {@link FingerprintAuthenticator}.
*
* @param context Android {@link Context} object.
* @param keystore {@link IBiometricKeystore} managing biometric key.
* @return Instance of {@link FingerprintAuthenticator} or {@code null} in case that fingerprint
* authentication is not supported on the system.
*/
public static @Nullable IBiometricAuthenticator createAuthenticator(@NonNull Context context, @NonNull IBiometricKeystore keystore) {
if (!keystore.isKeystoreReady()) {
return null;
}
// Acquire FingerprintManager (API level 23 is slightly different than later SDKs)
// This is inspired by the androidx.biometric fallback implementation.
final FingerprintManager fingerprintManager;
if (Build.VERSION.SDK_INT == Build.VERSION_CODES.M) {
// Get service directly
fingerprintManager = context.getSystemService(FingerprintManager.class);
} else if (Build.VERSION.SDK_INT > Build.VERSION_CODES.M && context.getPackageManager().hasSystemFeature(PackageManager.FEATURE_FINGERPRINT)) {
// Get service only when there's FEATURE_FINGERPRINT
fingerprintManager = context.getSystemService(FingerprintManager.class);
} else {
return null;
}
if (fingerprintManager == null) {
return null;
}
// If hardware is not detected, then simply return null. The BiometricAuthentication class
// will then use a dummy authenticator instead.
if (!fingerprintManager.isHardwareDetected()) {
return null;
}
return new FingerprintAuthenticator(context, keystore, fingerprintManager);
}
/**
* Private constructor for this class.
*
* @param context Android {@link Context} object.
* @param keystore {@link IBiometricKeystore} managing biometric key.
* @param manager {@link FingerprintManager} providing fingerprint authentication.
*/
private FingerprintAuthenticator(@NonNull Context context, @NonNull IBiometricKeystore keystore, @NonNull FingerprintManager manager) {
this.context = context;
this.fingerprintManager = manager;
this.keystore = keystore;
}
// IBiometricAuthenticator methods
@Override
public boolean isAvailable() {
return keystore.isKeystoreReady();
}
@Override
public @BiometryType int getBiometryType(@NonNull Context context) {
return fingerprintManager.isHardwareDetected() ? BiometryType.FINGERPRINT : BiometryType.NONE;
}
@Override
public @BiometricStatus int canAuthenticate() {
if (!isAvailable()) {
return BiometricStatus.NOT_AVAILABLE;
}
if (!fingerprintManager.hasEnrolledFingerprints()) {
return BiometricStatus.NOT_ENROLLED;
}
return BiometricStatus.OK;
}
@NonNull
@Override
public IBiometricKeystore getBiometricKeystore() {
return keystore;
}
@NonNull
@Override
public ICancelable authenticate(@NonNull final Context context,
@NonNull final FragmentManager fragmentManager,
@NonNull final PrivateRequestData requestData) throws PowerAuthErrorException {
// Get objects from request data
final BiometricAuthenticationRequest request = requestData.getRequest();
final BiometricResultDispatcher dispatcher = requestData.getDispatcher();
// Now construct appropriate cipher with the biometric key, wrapped in the crypto object.
final FingerprintManager.CryptoObject cryptoObject = wrapCipherToCryptoObject(request.getBiometricKeyEncryptor().initializeCipher(request.isForceGenerateNewKey()));
if (cryptoObject == null) {
throw new PowerAuthErrorException(PowerAuthErrorCodes.PA2ErrorCodeBiometryNotSupported, "Cannot create CryptoObject for biometric authentication.");
}
final CancellationSignal cancellationSignal = dispatcher.getCancelableTask().getCancellationSignal();
// Prepare fingerprint dialog fragment
final boolean shouldDisplayFingerprintDialog = !BiometricHelper.shouldHideFingerprintDialog(context);
final FingerprintAuthenticationDialogFragment dialogFragment;
if (shouldDisplayFingerprintDialog) {
dialogFragment = new FingerprintAuthenticationDialogFragment.Builder(context)
.setTitle(request.getTitle())
.setDescription(request.getDescription())
.setDialogResources(requestData.getResources())
.build();
} else {
// Dialog fragment should not be displayed, because the device has its own overlay.
dialogFragment = null;
}
final FingerprintAuthenticationHandler handler = new FingerprintAuthenticationHandler(fingerprintManager, cryptoObject, cancellationSignal, dialogFragment, new FingerprintAuthenticationHandler.ResultCallback() {
@Override
public void onAuthenticationSuccess(@NonNull FingerprintManager.AuthenticationResult result) {
final Cipher cipher = result.getCryptoObject() != null ? result.getCryptoObject().getCipher() : null;
if (cipher != null) {
final BiometricKeyData biometricKeyData = encryptOrDecryptRawKeyData(request);
if (biometricKeyData != null) {
dispatcher.dispatchSuccess(biometricKeyData);
return;
}
PA2Log.e("Failed to encrypt biometric key.");
} else {
PA2Log.e("Failed to get Cipher from CryptoObject.");
}
// If the code ends here, it mostly means that the vendor's implementation is quite off the standard.
// The device reports success, but we're unable to derive our cryptographic key, due to malfunction in cipher
// or due to fact, that the previously constructed cipher is not available. The right response for this state
// is to remove the biometric key from the keychain, show an error dialog and then, finally report "not available" state.
dispatcher.reportBiometricKeyUnavailable();
dispatcher.dispatchRunnable(new Runnable() {
@Override
public void run() {
final PowerAuthErrorException exception = new PowerAuthErrorException(PowerAuthErrorCodes.PA2ErrorCodeBiometryNotAvailable, "Failed to encrypt biometric key.");
showErrorDialogAfterSuccess(fragmentManager, requestData, exception);
}
});
}
@Override
public void onAuthenticationFailure(@NonNull PowerAuthErrorException exception) {
if (shouldDisplayFingerprintDialog) {
// Fingerprint dialog was displayed, so the failure was already presented to the user.
// In this case it's enough just to report exception back to the application.
dispatcher.dispatchError(exception);
} else {
// Fingerprint dialog was never displayed, so we should present a separate error
// dialog now to inform the user about the failure.
showErrorDialogFromException(fragmentManager, requestData, exception);
}
}
@Override
public void onAuthenticationCancel(boolean userCancel) {
if (userCancel) {
dispatcher.dispatchUserCancel();
}
}
@Override
public @NonNull String getFallbackErrorMessage(int errorCode) {
if (errorCode == FingerprintManager.FINGERPRINT_ERROR_LOCKOUT || errorCode == FingerprintManager.FINGERPRINT_ERROR_LOCKOUT_PERMANENT) {
return context.getString(requestData.getResources().strings.errorCodeLockout);
}
return context.getString(requestData.getResources().strings.errorCodeGeneric);
}
});
// Dismiss dialog when external cancel is requested.
dispatcher.setOnCancelListener(new CancelableTask.OnCancelListener() {
@Override
public void onCancel() {
if (dialogFragment != null) {
dialogFragment.dismiss();
}
}
});
if (shouldDisplayFingerprintDialog) {
// The dialog presentation is required, so link the dialog with the handler and make it visible.
dialogFragment.setFingerprintAuthenticationHandler(handler);
dialogFragment.show(fragmentManager);
} else {
// In case that device has it's own overlay, then it's enough to just start listening
// for the fingerprint manager's events.
handler.startListening();
}
return dispatcher.getCancelableTask();
}
// Private methods
/**
* Wrap {@link Cipher} into {@link FingerprintManager.CryptoObject}.
*
* @param cipher A cipher object that must be wrapped.
* @return {@link FingerprintManager.CryptoObject} created for given cipher.
*/
private @Nullable FingerprintManager.CryptoObject wrapCipherToCryptoObject(@Nullable Cipher cipher) {
// Wrap cipher into required crypto object
return cipher != null ? new FingerprintManager.CryptoObject(cipher) : null;
}
/**
* Encrypt or decrypt raw key data from biometric request.
*
* @param request Biometric request data.
* @return Encrypted bytes or {@code null} in case that encryption fails.
*/
private @Nullable BiometricKeyData encryptOrDecryptRawKeyData( @NonNull BiometricAuthenticationRequest request) {
synchronized (this) {
if (!hasAlreadyProcessedBiometricKeyData) {
hasAlreadyProcessedBiometricKeyData = true;
// Let's try to encrypt or decrypt the biometric key
final byte[] rawKeyData = request.getRawKeyData();
final IBiometricKeyEncryptor encryptor = request.getBiometricKeyEncryptor();
if (request.isForceGenerateNewKey()) {
processedBiometricKeyData = encryptor.encryptBiometricKey(rawKeyData);
} else {
processedBiometricKeyData = encryptor.decryptBiometricKey(rawKeyData);
}
}
return processedBiometricKeyData;
}
}
/**
* Shows error dialog despite the fact, that biometric authentication succeeded. This might happen
* in rare cases, when the vendor's implementation is unable to encrypt the provided biometric key.
*
* @param fragmentManager Fragment manager.
* @param requestData Private request data.
* @param exception Exception to be reported later to the operation's callback.
*/
private void showErrorDialogAfterSuccess(
@NonNull final FragmentManager fragmentManager,
@NonNull final PrivateRequestData requestData,
@NonNull final PowerAuthErrorException exception) {
final BiometricResultDispatcher dispatcher = requestData.getDispatcher();
if (dispatcher.getCancelableTask().isCancelled()) {
// Do nothing. Looks like the whole operation was canceled from the application.
return;
}
final BiometricDialogResources resources = requestData.getResources();
final Pair<Integer, Integer> titleDescription = BiometricHelper.getErrorDialogStringsForBiometricStatus(BiometricStatus.NOT_AVAILABLE, resources);
final BiometricErrorDialogFragment dialogFragment = new BiometricErrorDialogFragment.Builder(context)
.setTitle(titleDescription.first)
.setMessage(titleDescription.second)
.setCloseButton(resources.strings.ok, resources.colors.closeButtonText)
.setIcon(resources.drawables.errorIcon)
.setOnCloseListener(new BiometricErrorDialogFragment.OnCloseListener() {
@Override
public void onClose() {
dispatcher.dispatchError(exception);
}
})
.build();
// Handle cancel from the application. Note that this overrides the previous cancel listener.
dispatcher.setOnCancelListener(new CancelableTask.OnCancelListener() {
@Override
public void onCancel() {
dialogFragment.dismiss();
}
});
// Show fragment
dialogFragment.show(fragmentManager, BiometricErrorDialogFragment.FRAGMENT_DEFAULT_TAG);
}
/**
* Shows error dialog with message from provided exception.
*
* @param fragmentManager Fragment manager.
* @param requestData Private request data.
* @param exception Exception to be reported later to the operation's callback.
*/
private void showErrorDialogFromException(
@NonNull final FragmentManager fragmentManager,
@NonNull final PrivateRequestData requestData,
@NonNull final PowerAuthErrorException exception) {
final BiometricResultDispatcher dispatcher = requestData.getDispatcher();
if (dispatcher.getCancelableTask().isCancelled()) {
// Do nothing. Looks like the whole operation was canceled from the application.
return;
}
final BiometricDialogResources resources = requestData.getResources();
final Pair<Integer, Integer> titleDescription = BiometricHelper.getErrorDialogStringsForBiometricStatus(BiometricStatus.NOT_AVAILABLE, resources);
final String errorMessage = exception.getMessage() != null ? exception.getMessage() : context.getString(titleDescription.second);
final BiometricErrorDialogFragment dialogFragment = new BiometricErrorDialogFragment.Builder(context)
.setTitle(titleDescription.first)
.setMessage(errorMessage)
.setCloseButton(resources.strings.ok, resources.colors.closeButtonText)
.setIcon(resources.drawables.errorIcon)
.setOnCloseListener(new BiometricErrorDialogFragment.OnCloseListener() {
@Override
public void onClose() {
dispatcher.dispatchError(exception);
}
})
.build();
// Handle cancel from the application. Note that this overrides the previous cancel listener.
dispatcher.setOnCancelListener(new CancelableTask.OnCancelListener() {
@Override
public void onCancel() {
dialogFragment.dismiss();
}
});
// Show fragment
dialogFragment.show(fragmentManager, BiometricErrorDialogFragment.FRAGMENT_DEFAULT_TAG);
}
}
| |
/*
* Copyright 2016 Google Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.schemaorg.core;
import com.google.common.collect.ImmutableList;
import com.google.schemaorg.JsonLdContext;
import com.google.schemaorg.SchemaOrgType;
import com.google.schemaorg.core.datatype.Date;
import com.google.schemaorg.core.datatype.Text;
import com.google.schemaorg.core.datatype.URL;
import com.google.schemaorg.goog.PopularityScoreSpecification;
import javax.annotation.Nullable;
/** Interface of <a href="http://schema.org/Organization}">http://schema.org/Organization}</a>. */
public interface Organization extends PlaceOrganizationCommon {
/**
* Builder interface of <a
* href="http://schema.org/Organization}">http://schema.org/Organization}</a>.
*/
public interface Builder extends PlaceOrganizationCommon.Builder {
@Override
Builder addJsonLdContext(@Nullable JsonLdContext context);
@Override
Builder addJsonLdContext(@Nullable JsonLdContext.Builder context);
@Override
Builder setJsonLdId(@Nullable String value);
@Override
Builder setJsonLdReverse(String property, Thing obj);
@Override
Builder setJsonLdReverse(String property, Thing.Builder builder);
/** Add a value to property additionalType. */
Builder addAdditionalType(URL value);
/** Add a value to property additionalType. */
Builder addAdditionalType(String value);
/** Add a value to property address. */
Builder addAddress(PostalAddress value);
/** Add a value to property address. */
Builder addAddress(PostalAddress.Builder value);
/** Add a value to property address. */
Builder addAddress(Text value);
/** Add a value to property address. */
Builder addAddress(String value);
/** Add a value to property aggregateRating. */
Builder addAggregateRating(AggregateRating value);
/** Add a value to property aggregateRating. */
Builder addAggregateRating(AggregateRating.Builder value);
/** Add a value to property aggregateRating. */
Builder addAggregateRating(String value);
/** Add a value to property alternateName. */
Builder addAlternateName(Text value);
/** Add a value to property alternateName. */
Builder addAlternateName(String value);
/** Add a value to property alumni. */
Builder addAlumni(Person value);
/** Add a value to property alumni. */
Builder addAlumni(Person.Builder value);
/** Add a value to property alumni. */
Builder addAlumni(String value);
/** Add a value to property areaServed. */
Builder addAreaServed(AdministrativeArea value);
/** Add a value to property areaServed. */
Builder addAreaServed(AdministrativeArea.Builder value);
/** Add a value to property areaServed. */
Builder addAreaServed(GeoShape value);
/** Add a value to property areaServed. */
Builder addAreaServed(GeoShape.Builder value);
/** Add a value to property areaServed. */
Builder addAreaServed(Place value);
/** Add a value to property areaServed. */
Builder addAreaServed(Place.Builder value);
/** Add a value to property areaServed. */
Builder addAreaServed(Text value);
/** Add a value to property areaServed. */
Builder addAreaServed(String value);
/** Add a value to property award. */
Builder addAward(Text value);
/** Add a value to property award. */
Builder addAward(String value);
/** Add a value to property awards. */
Builder addAwards(Text value);
/** Add a value to property awards. */
Builder addAwards(String value);
/** Add a value to property brand. */
Builder addBrand(Brand value);
/** Add a value to property brand. */
Builder addBrand(Brand.Builder value);
/** Add a value to property brand. */
Builder addBrand(Organization value);
/** Add a value to property brand. */
Builder addBrand(Organization.Builder value);
/** Add a value to property brand. */
Builder addBrand(String value);
/** Add a value to property contactPoint. */
Builder addContactPoint(ContactPoint value);
/** Add a value to property contactPoint. */
Builder addContactPoint(ContactPoint.Builder value);
/** Add a value to property contactPoint. */
Builder addContactPoint(String value);
/** Add a value to property contactPoints. */
Builder addContactPoints(ContactPoint value);
/** Add a value to property contactPoints. */
Builder addContactPoints(ContactPoint.Builder value);
/** Add a value to property contactPoints. */
Builder addContactPoints(String value);
/** Add a value to property department. */
Builder addDepartment(Organization value);
/** Add a value to property department. */
Builder addDepartment(Organization.Builder value);
/** Add a value to property department. */
Builder addDepartment(String value);
/** Add a value to property description. */
Builder addDescription(Text value);
/** Add a value to property description. */
Builder addDescription(String value);
/** Add a value to property dissolutionDate. */
Builder addDissolutionDate(Date value);
/** Add a value to property dissolutionDate. */
Builder addDissolutionDate(String value);
/** Add a value to property duns. */
Builder addDuns(Text value);
/** Add a value to property duns. */
Builder addDuns(String value);
/** Add a value to property email. */
Builder addEmail(Text value);
/** Add a value to property email. */
Builder addEmail(String value);
/** Add a value to property employee. */
Builder addEmployee(Person value);
/** Add a value to property employee. */
Builder addEmployee(Person.Builder value);
/** Add a value to property employee. */
Builder addEmployee(String value);
/** Add a value to property employees. */
Builder addEmployees(Person value);
/** Add a value to property employees. */
Builder addEmployees(Person.Builder value);
/** Add a value to property employees. */
Builder addEmployees(String value);
/** Add a value to property event. */
Builder addEvent(Event value);
/** Add a value to property event. */
Builder addEvent(Event.Builder value);
/** Add a value to property event. */
Builder addEvent(String value);
/** Add a value to property events. */
Builder addEvents(Event value);
/** Add a value to property events. */
Builder addEvents(Event.Builder value);
/** Add a value to property events. */
Builder addEvents(String value);
/** Add a value to property faxNumber. */
Builder addFaxNumber(Text value);
/** Add a value to property faxNumber. */
Builder addFaxNumber(String value);
/** Add a value to property founder. */
Builder addFounder(Person value);
/** Add a value to property founder. */
Builder addFounder(Person.Builder value);
/** Add a value to property founder. */
Builder addFounder(String value);
/** Add a value to property founders. */
Builder addFounders(Person value);
/** Add a value to property founders. */
Builder addFounders(Person.Builder value);
/** Add a value to property founders. */
Builder addFounders(String value);
/** Add a value to property foundingDate. */
Builder addFoundingDate(Date value);
/** Add a value to property foundingDate. */
Builder addFoundingDate(String value);
/** Add a value to property foundingLocation. */
Builder addFoundingLocation(Place value);
/** Add a value to property foundingLocation. */
Builder addFoundingLocation(Place.Builder value);
/** Add a value to property foundingLocation. */
Builder addFoundingLocation(String value);
/** Add a value to property globalLocationNumber. */
Builder addGlobalLocationNumber(Text value);
/** Add a value to property globalLocationNumber. */
Builder addGlobalLocationNumber(String value);
/** Add a value to property hasOfferCatalog. */
Builder addHasOfferCatalog(OfferCatalog value);
/** Add a value to property hasOfferCatalog. */
Builder addHasOfferCatalog(OfferCatalog.Builder value);
/** Add a value to property hasOfferCatalog. */
Builder addHasOfferCatalog(String value);
/** Add a value to property hasPOS. */
Builder addHasPOS(Place value);
/** Add a value to property hasPOS. */
Builder addHasPOS(Place.Builder value);
/** Add a value to property hasPOS. */
Builder addHasPOS(String value);
/** Add a value to property image. */
Builder addImage(ImageObject value);
/** Add a value to property image. */
Builder addImage(ImageObject.Builder value);
/** Add a value to property image. */
Builder addImage(URL value);
/** Add a value to property image. */
Builder addImage(String value);
/** Add a value to property isicV4. */
Builder addIsicV4(Text value);
/** Add a value to property isicV4. */
Builder addIsicV4(String value);
/** Add a value to property legalName. */
Builder addLegalName(Text value);
/** Add a value to property legalName. */
Builder addLegalName(String value);
/** Add a value to property location. */
Builder addLocation(Place value);
/** Add a value to property location. */
Builder addLocation(Place.Builder value);
/** Add a value to property location. */
Builder addLocation(PostalAddress value);
/** Add a value to property location. */
Builder addLocation(PostalAddress.Builder value);
/** Add a value to property location. */
Builder addLocation(Text value);
/** Add a value to property location. */
Builder addLocation(String value);
/** Add a value to property logo. */
Builder addLogo(ImageObject value);
/** Add a value to property logo. */
Builder addLogo(ImageObject.Builder value);
/** Add a value to property logo. */
Builder addLogo(URL value);
/** Add a value to property logo. */
Builder addLogo(String value);
/** Add a value to property mainEntityOfPage. */
Builder addMainEntityOfPage(CreativeWork value);
/** Add a value to property mainEntityOfPage. */
Builder addMainEntityOfPage(CreativeWork.Builder value);
/** Add a value to property mainEntityOfPage. */
Builder addMainEntityOfPage(URL value);
/** Add a value to property mainEntityOfPage. */
Builder addMainEntityOfPage(String value);
/** Add a value to property makesOffer. */
Builder addMakesOffer(Offer value);
/** Add a value to property makesOffer. */
Builder addMakesOffer(Offer.Builder value);
/** Add a value to property makesOffer. */
Builder addMakesOffer(String value);
/** Add a value to property member. */
Builder addMember(Organization value);
/** Add a value to property member. */
Builder addMember(Organization.Builder value);
/** Add a value to property member. */
Builder addMember(Person value);
/** Add a value to property member. */
Builder addMember(Person.Builder value);
/** Add a value to property member. */
Builder addMember(String value);
/** Add a value to property memberOf. */
Builder addMemberOf(Organization value);
/** Add a value to property memberOf. */
Builder addMemberOf(Organization.Builder value);
/** Add a value to property memberOf. */
Builder addMemberOf(ProgramMembership value);
/** Add a value to property memberOf. */
Builder addMemberOf(ProgramMembership.Builder value);
/** Add a value to property memberOf. */
Builder addMemberOf(String value);
/** Add a value to property members. */
Builder addMembers(Organization value);
/** Add a value to property members. */
Builder addMembers(Organization.Builder value);
/** Add a value to property members. */
Builder addMembers(Person value);
/** Add a value to property members. */
Builder addMembers(Person.Builder value);
/** Add a value to property members. */
Builder addMembers(String value);
/** Add a value to property naics. */
Builder addNaics(Text value);
/** Add a value to property naics. */
Builder addNaics(String value);
/** Add a value to property name. */
Builder addName(Text value);
/** Add a value to property name. */
Builder addName(String value);
/** Add a value to property numberOfEmployees. */
Builder addNumberOfEmployees(QuantitativeValue value);
/** Add a value to property numberOfEmployees. */
Builder addNumberOfEmployees(QuantitativeValue.Builder value);
/** Add a value to property numberOfEmployees. */
Builder addNumberOfEmployees(String value);
/** Add a value to property owns. */
Builder addOwns(OwnershipInfo value);
/** Add a value to property owns. */
Builder addOwns(OwnershipInfo.Builder value);
/** Add a value to property owns. */
Builder addOwns(Product value);
/** Add a value to property owns. */
Builder addOwns(Product.Builder value);
/** Add a value to property owns. */
Builder addOwns(String value);
/** Add a value to property parentOrganization. */
Builder addParentOrganization(Organization value);
/** Add a value to property parentOrganization. */
Builder addParentOrganization(Organization.Builder value);
/** Add a value to property parentOrganization. */
Builder addParentOrganization(String value);
/** Add a value to property potentialAction. */
Builder addPotentialAction(Action value);
/** Add a value to property potentialAction. */
Builder addPotentialAction(Action.Builder value);
/** Add a value to property potentialAction. */
Builder addPotentialAction(String value);
/** Add a value to property review. */
Builder addReview(Review value);
/** Add a value to property review. */
Builder addReview(Review.Builder value);
/** Add a value to property review. */
Builder addReview(String value);
/** Add a value to property reviews. */
Builder addReviews(Review value);
/** Add a value to property reviews. */
Builder addReviews(Review.Builder value);
/** Add a value to property reviews. */
Builder addReviews(String value);
/** Add a value to property sameAs. */
Builder addSameAs(URL value);
/** Add a value to property sameAs. */
Builder addSameAs(String value);
/** Add a value to property seeks. */
Builder addSeeks(Demand value);
/** Add a value to property seeks. */
Builder addSeeks(Demand.Builder value);
/** Add a value to property seeks. */
Builder addSeeks(String value);
/** Add a value to property serviceArea. */
Builder addServiceArea(AdministrativeArea value);
/** Add a value to property serviceArea. */
Builder addServiceArea(AdministrativeArea.Builder value);
/** Add a value to property serviceArea. */
Builder addServiceArea(GeoShape value);
/** Add a value to property serviceArea. */
Builder addServiceArea(GeoShape.Builder value);
/** Add a value to property serviceArea. */
Builder addServiceArea(Place value);
/** Add a value to property serviceArea. */
Builder addServiceArea(Place.Builder value);
/** Add a value to property serviceArea. */
Builder addServiceArea(String value);
/** Add a value to property subOrganization. */
Builder addSubOrganization(Organization value);
/** Add a value to property subOrganization. */
Builder addSubOrganization(Organization.Builder value);
/** Add a value to property subOrganization. */
Builder addSubOrganization(String value);
/** Add a value to property taxID. */
Builder addTaxID(Text value);
/** Add a value to property taxID. */
Builder addTaxID(String value);
/** Add a value to property telephone. */
Builder addTelephone(Text value);
/** Add a value to property telephone. */
Builder addTelephone(String value);
/** Add a value to property url. */
Builder addUrl(URL value);
/** Add a value to property url. */
Builder addUrl(String value);
/** Add a value to property vatID. */
Builder addVatID(Text value);
/** Add a value to property vatID. */
Builder addVatID(String value);
/** Add a value to property detailedDescription. */
Builder addDetailedDescription(Article value);
/** Add a value to property detailedDescription. */
Builder addDetailedDescription(Article.Builder value);
/** Add a value to property detailedDescription. */
Builder addDetailedDescription(String value);
/** Add a value to property popularityScore. */
Builder addPopularityScore(PopularityScoreSpecification value);
/** Add a value to property popularityScore. */
Builder addPopularityScore(PopularityScoreSpecification.Builder value);
/** Add a value to property popularityScore. */
Builder addPopularityScore(String value);
/**
* Add a value to property.
*
* @param name The property name.
* @param value The value of the property.
*/
Builder addProperty(String name, SchemaOrgType value);
/**
* Add a value to property.
*
* @param name The property name.
* @param builder The schema.org object builder for the property value.
*/
Builder addProperty(String name, Thing.Builder builder);
/**
* Add a value to property.
*
* @param name The property name.
* @param value The string value of the property.
*/
Builder addProperty(String name, String value);
/** Build a {@link Organization} object. */
Organization build();
}
/**
* Returns the value list of property alumni. Empty list is returned if the property not set in
* current object.
*/
ImmutableList<SchemaOrgType> getAlumniList();
/**
* Returns the value list of property areaServed. Empty list is returned if the property not set
* in current object.
*/
ImmutableList<SchemaOrgType> getAreaServedList();
/**
* Returns the value list of property award. Empty list is returned if the property not set in
* current object.
*/
ImmutableList<SchemaOrgType> getAwardList();
/**
* Returns the value list of property awards. Empty list is returned if the property not set in
* current object.
*/
ImmutableList<SchemaOrgType> getAwardsList();
/**
* Returns the value list of property brand. Empty list is returned if the property not set in
* current object.
*/
ImmutableList<SchemaOrgType> getBrandList();
/**
* Returns the value list of property contactPoint. Empty list is returned if the property not set
* in current object.
*/
ImmutableList<SchemaOrgType> getContactPointList();
/**
* Returns the value list of property contactPoints. Empty list is returned if the property not
* set in current object.
*/
ImmutableList<SchemaOrgType> getContactPointsList();
/**
* Returns the value list of property department. Empty list is returned if the property not set
* in current object.
*/
ImmutableList<SchemaOrgType> getDepartmentList();
/**
* Returns the value list of property dissolutionDate. Empty list is returned if the property not
* set in current object.
*/
ImmutableList<SchemaOrgType> getDissolutionDateList();
/**
* Returns the value list of property duns. Empty list is returned if the property not set in
* current object.
*/
ImmutableList<SchemaOrgType> getDunsList();
/**
* Returns the value list of property email. Empty list is returned if the property not set in
* current object.
*/
ImmutableList<SchemaOrgType> getEmailList();
/**
* Returns the value list of property employee. Empty list is returned if the property not set in
* current object.
*/
ImmutableList<SchemaOrgType> getEmployeeList();
/**
* Returns the value list of property employees. Empty list is returned if the property not set in
* current object.
*/
ImmutableList<SchemaOrgType> getEmployeesList();
/**
* Returns the value list of property founder. Empty list is returned if the property not set in
* current object.
*/
ImmutableList<SchemaOrgType> getFounderList();
/**
* Returns the value list of property founders. Empty list is returned if the property not set in
* current object.
*/
ImmutableList<SchemaOrgType> getFoundersList();
/**
* Returns the value list of property foundingDate. Empty list is returned if the property not set
* in current object.
*/
ImmutableList<SchemaOrgType> getFoundingDateList();
/**
* Returns the value list of property foundingLocation. Empty list is returned if the property not
* set in current object.
*/
ImmutableList<SchemaOrgType> getFoundingLocationList();
/**
* Returns the value list of property hasOfferCatalog. Empty list is returned if the property not
* set in current object.
*/
ImmutableList<SchemaOrgType> getHasOfferCatalogList();
/**
* Returns the value list of property hasPOS. Empty list is returned if the property not set in
* current object.
*/
ImmutableList<SchemaOrgType> getHasPOSList();
/**
* Returns the value list of property legalName. Empty list is returned if the property not set in
* current object.
*/
ImmutableList<SchemaOrgType> getLegalNameList();
/**
* Returns the value list of property location. Empty list is returned if the property not set in
* current object.
*/
ImmutableList<SchemaOrgType> getLocationList();
/**
* Returns the value list of property makesOffer. Empty list is returned if the property not set
* in current object.
*/
ImmutableList<SchemaOrgType> getMakesOfferList();
/**
* Returns the value list of property member. Empty list is returned if the property not set in
* current object.
*/
ImmutableList<SchemaOrgType> getMemberList();
/**
* Returns the value list of property memberOf. Empty list is returned if the property not set in
* current object.
*/
ImmutableList<SchemaOrgType> getMemberOfList();
/**
* Returns the value list of property members. Empty list is returned if the property not set in
* current object.
*/
ImmutableList<SchemaOrgType> getMembersList();
/**
* Returns the value list of property naics. Empty list is returned if the property not set in
* current object.
*/
ImmutableList<SchemaOrgType> getNaicsList();
/**
* Returns the value list of property numberOfEmployees. Empty list is returned if the property
* not set in current object.
*/
ImmutableList<SchemaOrgType> getNumberOfEmployeesList();
/**
* Returns the value list of property owns. Empty list is returned if the property not set in
* current object.
*/
ImmutableList<SchemaOrgType> getOwnsList();
/**
* Returns the value list of property parentOrganization. Empty list is returned if the property
* not set in current object.
*/
ImmutableList<SchemaOrgType> getParentOrganizationList();
/**
* Returns the value list of property seeks. Empty list is returned if the property not set in
* current object.
*/
ImmutableList<SchemaOrgType> getSeeksList();
/**
* Returns the value list of property serviceArea. Empty list is returned if the property not set
* in current object.
*/
ImmutableList<SchemaOrgType> getServiceAreaList();
/**
* Returns the value list of property subOrganization. Empty list is returned if the property not
* set in current object.
*/
ImmutableList<SchemaOrgType> getSubOrganizationList();
/**
* Returns the value list of property taxID. Empty list is returned if the property not set in
* current object.
*/
ImmutableList<SchemaOrgType> getTaxIDList();
/**
* Returns the value list of property vatID. Empty list is returned if the property not set in
* current object.
*/
ImmutableList<SchemaOrgType> getVatIDList();
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.nifi.processors.kafka.pubsub;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.doAnswer;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.nio.charset.StandardCharsets;
import java.util.concurrent.atomic.AtomicInteger;
import org.apache.kafka.clients.producer.Callback;
import org.apache.kafka.clients.producer.Producer;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.nifi.flowfile.FlowFile;
import org.apache.nifi.logging.ComponentLog;
import org.apache.nifi.util.MockFlowFile;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.mockito.Mockito;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
public class TestPublisherLease {
private ComponentLog logger;
private Producer<byte[], byte[]> producer;
@Before
@SuppressWarnings("unchecked")
public void setup() {
logger = Mockito.mock(ComponentLog.class);
producer = Mockito.mock(Producer.class);
}
@Test
public void testPoisonOnException() throws IOException {
final AtomicInteger poisonCount = new AtomicInteger(0);
final PublisherLease lease = new PublisherLease(producer, 1024 * 1024, 1000L, logger) {
@Override
public void poison() {
poisonCount.incrementAndGet();
super.poison();
}
};
final FlowFile flowFile = Mockito.spy(new MockFlowFile(1L));
// Need a size grater than zero to make the lease reads the InputStream.
Mockito.when(flowFile.getSize()).thenReturn(1L);
final String topic = "unit-test";
final byte[] messageKey = null;
final byte[] demarcatorBytes = null;
final InputStream failureInputStream = new InputStream() {
@Override
public int read() throws IOException {
throw new IOException("Intentional Unit Test Exception");
}
};
try {
lease.publish(flowFile, failureInputStream, messageKey, demarcatorBytes, topic);
Assert.fail("Expected IOException");
} catch (final IOException ioe) {
// expected
}
assertEquals(1, poisonCount.get());
final PublishResult result = lease.complete();
assertTrue(result.getFailedFlowFiles().contains(flowFile));
assertFalse(result.getSuccessfulFlowFiles().contains(flowFile));
}
@Test
@SuppressWarnings("unchecked")
public void testPoisonOnFailure() throws IOException {
final AtomicInteger poisonCount = new AtomicInteger(0);
final PublisherLease lease = new PublisherLease(producer, 1024 * 1024, 1000L, logger) {
@Override
public void poison() {
poisonCount.incrementAndGet();
super.poison();
}
};
final FlowFile flowFile = new MockFlowFile(1L);
final String topic = "unit-test";
final byte[] messageKey = null;
final byte[] demarcatorBytes = null;
doAnswer(new Answer<Object>() {
@Override
public Object answer(final InvocationOnMock invocation) throws Throwable {
final Callback callback = invocation.getArgument(1);
callback.onCompletion(null, new RuntimeException("Unit Test Intentional Exception"));
return null;
}
}).when(producer).send(any(ProducerRecord.class), any(Callback.class));
lease.publish(flowFile, new ByteArrayInputStream(new byte[1]), messageKey, demarcatorBytes, topic);
assertEquals(1, poisonCount.get());
final PublishResult result = lease.complete();
assertTrue(result.getFailedFlowFiles().contains(flowFile));
assertFalse(result.getSuccessfulFlowFiles().contains(flowFile));
}
@Test
@SuppressWarnings("unchecked")
public void testAllDelimitedMessagesSent() throws IOException {
final AtomicInteger poisonCount = new AtomicInteger(0);
final PublisherLease lease = new PublisherLease(producer, 1024 * 1024, 10L, logger) {
@Override
protected void poison() {
poisonCount.incrementAndGet();
super.poison();
}
};
final AtomicInteger correctMessages = new AtomicInteger(0);
final AtomicInteger incorrectMessages = new AtomicInteger(0);
doAnswer(new Answer<Object>() {
@Override
public Object answer(InvocationOnMock invocation) throws Throwable {
final ProducerRecord<byte[], byte[]> record = invocation.getArgument(0);
final byte[] value = record.value();
final String valueString = new String(value, StandardCharsets.UTF_8);
if ("1234567890".equals(valueString)) {
correctMessages.incrementAndGet();
} else {
incorrectMessages.incrementAndGet();
}
return null;
}
}).when(producer).send(any(ProducerRecord.class), any(Callback.class));
final FlowFile flowFile = new MockFlowFile(1L);
final String topic = "unit-test";
final byte[] messageKey = null;
final byte[] demarcatorBytes = "\n".getBytes(StandardCharsets.UTF_8);
final byte[] flowFileContent = "1234567890\n1234567890\n1234567890\n\n\n\n1234567890\n\n\n1234567890\n\n\n\n".getBytes(StandardCharsets.UTF_8);
lease.publish(flowFile, new ByteArrayInputStream(flowFileContent), messageKey, demarcatorBytes, topic);
final byte[] flowFileContent2 = new byte[0];
lease.publish(new MockFlowFile(2L), new ByteArrayInputStream(flowFileContent2), messageKey, demarcatorBytes, topic);
final byte[] flowFileContent3 = "1234567890\n1234567890".getBytes(StandardCharsets.UTF_8); // no trailing new line
lease.publish(new MockFlowFile(3L), new ByteArrayInputStream(flowFileContent3), messageKey, demarcatorBytes, topic);
final byte[] flowFileContent4 = "\n\n\n".getBytes(StandardCharsets.UTF_8);
lease.publish(new MockFlowFile(4L), new ByteArrayInputStream(flowFileContent4), messageKey, demarcatorBytes, topic);
assertEquals(0, poisonCount.get());
verify(producer, times(0)).flush();
final PublishResult result = lease.complete();
assertTrue(result.getFailedFlowFiles().contains(flowFile));
assertFalse(result.getSuccessfulFlowFiles().contains(flowFile));
assertEquals(7, correctMessages.get());
assertEquals(0, incorrectMessages.get());
verify(producer, times(1)).flush();
}
@Test
@SuppressWarnings("unchecked")
public void testZeroByteMessageSent() throws IOException {
final AtomicInteger poisonCount = new AtomicInteger(0);
final PublisherLease lease = new PublisherLease(producer, 1024 * 1024, 10L, logger) {
@Override
protected void poison() {
poisonCount.incrementAndGet();
super.poison();
}
};
final AtomicInteger correctMessages = new AtomicInteger(0);
final AtomicInteger incorrectMessages = new AtomicInteger(0);
doAnswer(new Answer<Object>() {
@Override
public Object answer(InvocationOnMock invocation) throws Throwable {
final ProducerRecord<byte[], byte[]> record = invocation.getArgument(0);
final byte[] value = record.value();
final String valueString = new String(value, StandardCharsets.UTF_8);
if ("".equals(valueString)) {
correctMessages.incrementAndGet();
} else {
incorrectMessages.incrementAndGet();
}
return null;
}
}).when(producer).send(any(ProducerRecord.class), any(Callback.class));
final FlowFile flowFile = new MockFlowFile(1L);
final String topic = "unit-test";
final byte[] messageKey = null;
final byte[] demarcatorBytes = null;
final byte[] flowFileContent = new byte[0];
lease.publish(flowFile, new ByteArrayInputStream(flowFileContent), messageKey, demarcatorBytes, topic);
assertEquals(0, poisonCount.get());
verify(producer, times(0)).flush();
final PublishResult result = lease.complete();
assertEquals(1, correctMessages.get());
assertEquals(0, incorrectMessages.get());
verify(producer, times(1)).flush();
}
}
| |
/*
* Copyright (C) 2010 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* This file has been modified from the original.
*
* The original file can be found at:
* https://code.google.com/p/replicaisland/
*/
package com.replica.core.components;
import com.replica.core.BaseObject;
import com.replica.core.GameObject;
import com.replica.core.GameObject.ActionType;
import com.replica.core.components.SteeringBehavior.Behavior;
import com.replica.utility.DebugSystem;
import com.replica.utility.Vector2;
import com.replica.utility.VectorPool;
public class NPCComponent extends GameComponent {
private HitReactionComponent mHitReactComponent;
private int mGameEvent;
private boolean mSpawnGameEventOnDeath;
private boolean mReactToHits;
private float mDeathTime;
private float mDeathFadeDelay;
private Vector2 mTarget;
private SteeringBehavior mSteering;
private static final float DEATH_FADE_DELAY = 4.0f;
private static final int DEFAULT_ATTACK_DISTANCE = 20;
private static final float TIME_ATTACK = 1.0f;
public enum State {
IDLE, WANDER, SEEKING, ATTACK, HIT_REACT, DEAD,
}
private State mState;
public NPCComponent() {
super();
setPhase(ComponentPhases.THINK.ordinal());
reset();
}
@Override
public void reset() {
mHitReactComponent = null;
mSteering = null;
mGameEvent = -1;
mSpawnGameEventOnDeath = false;
mReactToHits = false;
mDeathTime = -1.0f;
mDeathFadeDelay = DEATH_FADE_DELAY;
mTarget = null;
}
@Override
public void update(float timeDelta, BaseObject parent) {
GameObject parentObject = (GameObject) parent;
if (mReactToHits && parentObject.getCurrentAction() == ActionType.HIT_REACT) {
// hit react here
} else if (parentObject.getCurrentAction() == ActionType.DEATH) {
mDeathTime = (mDeathTime < 0) ? timeDelta : mDeathTime + timeDelta;
if (mSpawnGameEventOnDeath && mGameEvent != -1) {
//spawn
mSpawnGameEventOnDeath = false;
}
if (mDeathTime <= DEATH_FADE_DELAY) {
//fade out
}
return;
} else if (parentObject.life <= 0) {
parentObject.setCurrentAction(ActionType.DEATH);
parentObject.getVelocity().zero();
return;
} else if (parentObject.getCurrentAction() == ActionType.INVALID
|| (!mReactToHits && parentObject.getCurrentAction() == ActionType.HIT_REACT)) {
parentObject.setCurrentAction(ActionType.MOVE);
} else if (parentObject.getCurrentAction() == ActionType.MOVE) {
}
if (parentObject.getCurrentAction() == ActionType.MOVE) {
if (mState != State.WANDER) {
gotoWander(parentObject);
}
}
switch (mState) {
case WANDER:
Wander(parentObject);
break;
case IDLE:
idle(parentObject);
break;
default:
break;
}
}
private void gotoIdle(GameObject parentObject) {
mState = State.IDLE;
// parentObject.setCurrentAction(ActionType.IDLE);
// parentObject.getVelocity().set(Vector2.ZERO);
}
private void idle(GameObject parentObject) {
lookForTarget();
if (!closeEnough(parentObject, mTarget)) {
gotoWander(parentObject);
parentObject.facingDirection.set(mTarget);
parentObject.facingDirection.subtract(parentObject.getPosition());
parentObject.facingDirection.normalize();
}
// else {
// mSteering.postCommand(Behavior.WallAvoidance, null);
// mSteering.postCommand(Behavior.Wander, null);
// }
}
private void gotoWander(GameObject parentObject) {
parentObject.setCurrentAction(ActionType.MOVE);
mState = State.WANDER;
}
private void Wander(GameObject parentObject) {
lookForTarget();
if (!closeEnough(parentObject, mTarget)) {
mSteering.postCommand(Behavior.Seek, mTarget);
mSteering.postCommand(Behavior.WallAvoidance);
} else {
mSteering.postCommand(Behavior.WallAvoidance);
mSteering.postCommand(Behavior.Wander);
}
parentObject.facingDirection.set(parentObject.getVelocity());
parentObject.facingDirection.normalize();
}
private boolean closeEnough(GameObject parentObject, Vector2 target) {
if (mTarget == null) {
return true;
}
final float x1 = parentObject.getCenteredPositionX();
final float y1 = parentObject.getCenteredPositionY();
VectorPool pool = BaseObject.sSystemRegistry.vectorPool;
Vector2 centerPosition = pool.allocate();
centerPosition.set(x1, y1);
final float distance_ = centerPosition.distance(target);
pool.release(centerPosition);
return DEFAULT_ATTACK_DISTANCE - distance_ > 0;
}
private void lookForTarget() {
}
public void setSteering(SteeringBehavior steering) {
mSteering = steering;
}
public void setHitReactionComponent(HitReactionComponent hitReact) {
mHitReactComponent = hitReact;
}
public void setGameEvent(int event, boolean spawnOnDeath) {
mGameEvent = event;
mSpawnGameEventOnDeath = spawnOnDeath;
}
}
| |
/**
* SubmergedCore 1.0
* Copyright (C) 2014 CodingBadgers <plugins@mcbadgercraft.com>
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package uk.submergedcode.SubmergedCore.module;
import java.io.BufferedReader;
import java.io.DataInputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.logging.Level;
import java.util.logging.Logger;
import com.google.common.base.Preconditions;
import net.milkbowl.vault.permission.Permission;
import org.apache.commons.io.IOUtils;
import org.apache.commons.lang.Validate;
import org.bukkit.ChatColor;
import org.bukkit.OfflinePlayer;
import org.bukkit.Server;
import org.bukkit.command.CommandSender;
import org.bukkit.configuration.file.FileConfiguration;
import org.bukkit.entity.Player;
import org.bukkit.event.Listener;
import org.bukkit.plugin.java.JavaPlugin;
import uk.submergedcode.SubmergedCore.SubmergedCore;
import uk.submergedcode.SubmergedCore.commands.ModuleCommand;
import uk.submergedcode.SubmergedCore.commands.ModuleCommandHandler;
import uk.submergedcode.SubmergedCore.config.ConfigFactory;
import uk.submergedcode.SubmergedCore.config.ConfigFile;
import uk.submergedcode.SubmergedCore.module.loader.LoadState;
import uk.submergedcode.SubmergedCore.module.loader.Loadable;
import uk.submergedcode.SubmergedCore.player.PlayerData;
import uk.submergedcode.SubmergedCore.update.UpdateThread;
import uk.submergedcode.SubmergedCore.update.Updater;
import uk.thecodingbadgers.bDatabaseManager.Database.BukkitDatabase;
/**
* The base Module class any module should extend this, it also provides
* helper methods for the module.
*/
public abstract class Module extends Loadable implements Listener {
protected static BukkitDatabase m_database = null;
private static Permission m_permissions = null;
protected final SubmergedCore m_plugin;
protected File m_configFile = null;
protected FileConfiguration m_config;
private boolean m_debug = false;
private boolean loadedLanguageFile;
private boolean m_enabled;
private List<Class<? extends ConfigFile>> m_configFiles;
private List<Listener> m_listeners = new ArrayList<Listener>();
private ModuleLogger m_log;
private Map<String, String> m_languageMap = new HashMap<String, String>();
private UpdateThread m_updater;
/**
* Instantiates a new module with default settings.
*/
public Module() {
super();
m_plugin = SubmergedCore.getInstance();
m_database = SubmergedCore.getBukkitDatabase();
m_debug = SubmergedCore.getConfigurationManager().isDebugEnabled();
m_permissions = SubmergedCore.getPermissions();
}
public JavaPlugin getPlugin() {
return m_plugin;
}
public final void init() {
Preconditions.checkState(m_log == null, "Modules already initialized, cannot reinitialize.");
m_log = new ModuleLogger(this);
this.onLoad();
}
protected void setUpdater(Updater updater) {
m_updater = new UpdateThread(updater);
log(Level.INFO, "Set new updater to " + m_updater.getUpdater().getUpdater());
}
public void update() {
if (m_updater == null) {
log(Level.INFO, "Updater is null, cannot check for updates");
return;
}
m_updater.start();
}
/**
* @return
*/
public Class<? extends PlayerData> getPlayerDataClass() {
return null;
}
/**
* Load language file.
*/
protected void loadLanguageFile() {
File languageFile = new File(getDataFolder() + File.separator + getName() + "_" + SubmergedCore.getConfigurationManager().getLanguage() + ".lang");
if (!languageFile.exists()) {
log(Level.SEVERE, "Missing language file '" + languageFile.getAbsolutePath() + "'!");
boolean foundLangFile = false;
InputStream stream = null;
FileOutputStream fstream = null;
try {
stream = getClass().getResourceAsStream("/" + languageFile.getName());
// if default file exists in jar, copy it out to the right
// directory
if (stream != null) {
fstream = new FileOutputStream(languageFile);
foundLangFile = true;
IOUtils.copy(stream, fstream);
}
} catch (IOException e) {
e.printStackTrace();
} finally {
try {
if (stream != null) {
stream.close();
}
if (fstream != null) {
fstream.close();
}
} catch (IOException ex) {
ex.printStackTrace();
}
}
if (foundLangFile) {
log(Level.INFO, "Copied default language file from jar file");
} else {
return;
}
}
log(Level.INFO, "Loading Language File: " + languageFile.getName());
FileInputStream fstream = null;
DataInputStream in = null;
BufferedReader br = null;
try {
fstream = new FileInputStream(languageFile);
in = new DataInputStream(fstream);
br = new BufferedReader(new InputStreamReader(in));
String line = null;
String key = null;
while ((line = br.readLine()) != null) {
if (line.isEmpty() || line.startsWith("//"))
continue;
if (line.startsWith("#")) {
key = line.substring(1);
continue;
}
if (key == null) {
log(Level.WARNING, "Trying to parse a language value, with no key set!");
continue;
}
m_languageMap.put(key.toLowerCase(), line);
}
loadedLanguageFile = true;
} catch (Exception e) {
e.printStackTrace();
} finally {
try {
if (fstream != null) {
fstream.close();
}
if (in != null) {
in.close();
}
if (br != null) {
br.close();
}
} catch (IOException e) {
e.printStackTrace();
}
}
}
/**
* Log a message console via this modules logger.
*
* @param level the Log level
* @param string the message
*/
public void log(Level level, String string) {
m_log.log(Level.INFO, string);
}
/**
* Get the logger associated with this module
*
* @return this modules logger
*/
public Logger getLogger() {
return m_log;
}
/**
* Register a bukkit event listener.
*
* @param listener the bukkit event listener
*/
public final void register(Listener listener) {
m_plugin.getServer().getPluginManager().registerEvents(listener, m_plugin);
m_listeners.add(listener);
}
/**
* Gets the vault permissions instance.
*
* @return the vault permissions instance
*/
public Permission getPermissions() {
return m_permissions;
}
/**
* The enable method for this module, called on enabling the module via
* {@link #setEnabled(boolean)} in the {@link LoadState.ENABLE} phase
* this is used to register commands, events and any other things that
* should be registered on enabling the module.
*/
public abstract void onEnable();
/**
* The disable method for this module, called on disabling the module via
* {@link #setEnabled(boolean)}this is used to clean up after the module
* when it is disabled.
*/
public abstract void onDisable();
/**
* The post enable method for this module, called after all modules have
* been enabled via the @link ModuleLoader}, this can be used to hook into
* other modules that have to be loaded before your own and add custom
* behaviour that does not warrant a dependency.
*/
public void onPostEnable() {
}
/**
* The load method for this module, called on loading the module via the
* {@link ModuleLoader} in the {@link LoadState.LOAD} phase this is called
* before any module in that load batch is enabled.
*/
public void onLoad() {
}
/**
* Sets the module enabled status, will call {@link #onEnable()} if the
* module isn't already enabled and you want to enable it and will call
* {@link #onDisable()} if the module isn't already disabled and you want
* to disable it.
*
* @param enabled if you want to enable or disable the module
*/
public void setEnabled(boolean enabled) {
if (enabled) {
if (m_enabled) {
return;
}
onEnable();
m_enabled = true;
} else {
if (!m_enabled) {
return;
}
onDisable();
ModuleCommandHandler.deregisterCommand(this);
m_enabled = false;
}
}
/**
* Returns the current state of the module, if it is enabled or disabled.
*
* @return if the module is enabled
*/
public boolean isEnabled() {
return m_enabled;
}
/**
* The command handing method for this module, this is only called if the
* command handing for that {@link ModuleCommand} returns false,
* preferably the
* {@link ModuleCommand#onCommand(CommandSender, String, String[])} should
* be used, this is just left for backwards comparability.
*
* @param sender the command sender
* @param label the command label used
* @param args the arguments for the command
* @return true, if the command has been handled, false if it hasn't
*/
public boolean onCommand(CommandSender sender, String label, String[] args) {
return false;
}
/**
* Checks if a player has a specific permission.
*
* @param player the player to check
* @param node the permission node
* @return true, if the player has the permission
*/
public static boolean hasPermission(final Player player, final String node) {
if (m_permissions.has(player, node)) {
return true;
}
return false;
}
/**
* Checks if a sender has a specific permission.
*
* @param sender the sender to check
* @param node the permission node
* @return true, if the player has the permission
*/
public static boolean hasPermission(final CommandSender sender, final String node) {
if (m_permissions.has(sender, node)) {
return true;
}
return false;
}
/**
* Checks if a sender has a specific permission.
*
* @param sender the sender to check
* @param node the permission node
* @return true, if the player has the permission
*/
public static boolean hasPermission(final String sender, final String node) {
if (m_permissions.has((String) null, sender, node)) {
return true;
}
return false;
}
/**
* Send message to a player formated in the default style.
*
* @param name the name of the module
* @param player the player to send to
* @param message the message
*/
public static void sendMessage(String name, CommandSender player, String message) {
player.sendMessage(ChatColor.DARK_PURPLE + "[" + name + "] " + ChatColor.RESET + message);
}
/**
* Register a command to this module.
*
* @param command the command
*/
protected void registerCommand(ModuleCommand command) {
ModuleCommandHandler.registerCommand(this, command);
}
/**
* Register a command to this module.
*
* @param clazz the command
*/
protected void registerCommands(Class<?> clazz) {
ModuleCommandHandler.findCommands(this, clazz);
}
/**
* Get all commands registered to this module
*
* @return the commands
* @Deprecated {@link ModuleCommandHandler#getCommands(Module)}
*/
public List<ModuleCommand> getCommands() {
return ModuleCommandHandler.getCommands(this);
}
/**
* Gets the language value for the current loaded language, case
* insensitive, all keys are forced to be in lower case.
*
* @param key the language key
* @return the language value, if available, the key with hyphens removed
* and in lower case otherwise
*/
public String getLanguageValue(String key) {
Validate.notNull(key, "Language key cannot be null");
if (!loadedLanguageFile) {
log(Level.SEVERE, "Cannot get language value before loading language file");
}
String value = m_languageMap.get(key.toLowerCase());
if (value == null) {
value = key.toLowerCase().replace("-", " ");
}
return value;
}
/**
* Get all the listeners registered to this module, for cleaning up on
* disable
*
* @return a list of all listeners
*/
public List<Listener> getListeners() {
return m_listeners;
}
/**
* Is debug mode enabled on this module
*
* @return if debug is enabled
*/
public boolean isDebug() {
return m_debug;
}
/**
* Set the debug mode for this module
*
* @param debug whether debug is on or not
*/
public void setDebug(boolean debug) {
m_debug = debug;
}
/**
* Output a message to console if debug mode is on
*
* @param message the message to output
*/
public void debugConsole(String message) {
if (!m_debug) {
return;
}
log(Level.INFO, "[Debug] " + message);
}
/**
* Registers a config class as a config and loads it, class must extend
* {@link ConfigFile} and each element that is going to be included in the
* file should be {@code static} and have a {@link Element} annotation
* associated with it.
*
* @param clazz the config class
*/
public void registerConfig(Class<? extends ConfigFile> clazz) {
if (m_configFiles == null) {
m_configFiles = new ArrayList<Class<? extends ConfigFile>>();
}
log(Level.INFO, "Load config file for " + clazz.getName());
try {
ConfigFactory.load(clazz, getDataFolder());
} catch (Exception e) {
e.printStackTrace();
}
m_configFiles.add(clazz);
}
/**
* Get a list of players whose name matches a given string
*
* @param match The name to match
* @param onlineOnly Only return players who are currently online
* @return A list of offline players whose names match the entry string
*/
public List<OfflinePlayer> matchPlayer(String match, boolean onlineOnly) {
Server server = m_plugin.getServer();
List<OfflinePlayer> matches = new ArrayList<OfflinePlayer>();
OfflinePlayer[] offlinePlayers = server.getOfflinePlayers();
for (OfflinePlayer player : offlinePlayers) {
if (onlineOnly && !player.isOnline()) {
continue;
}
final String playerName = player.getName();
// exact name, just return this
if (playerName.equalsIgnoreCase(match)) {
matches.clear();
matches.add(player);
return matches;
}
// match is contained within this player add them to the list
if (playerName.toLowerCase().startsWith(match.toLowerCase())) {
matches.add(player);
}
}
return matches;
}
}
| |
package sunning.democollection.custom.view.tabview;
import android.content.Context;
import android.content.res.Resources;
import android.util.Log;
import android.view.Gravity;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.AbsListView;
import android.widget.ArrayAdapter;
import android.widget.LinearLayout;
import android.widget.TextView;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import sunning.democollection.custom.view.tabview.colorizers.TableDataRowColorizer;
import static android.widget.LinearLayout.LayoutParams;
/**
* The abstract implementation of an adapter used to bring data to a {@link TableView}.
*
* @author ISchwarz
*/
public abstract class TableDataAdapter<T> extends ArrayAdapter<T> {
private static final String LOG_TAG = TableDataAdapter.class.getName();
private TableColumnModel columnModel;
private final List<T> data;
private TableDataRowColorizer<? super T> rowColoriser;
/**
* Creates a new TableDataAdapter.
*
* @param context
* The context that shall be used.
*/
public TableDataAdapter(final Context context, final T[] data) {
this(context, 0, new ArrayList<>(Arrays.asList(data)));
}
/**
* Creates a new TableDataAdapter.
*
* @param context
* The context that shall be used.
*/
public TableDataAdapter(final Context context, final List<T> data) {
this(context, 0, data);
}
/**
* Creates a new TableDataAdapter. (internally used)
*
* @param context
* The context that shall be used.
* @param columnCount
* The number of columns.
*/
protected TableDataAdapter(final Context context, final int columnCount, final List<T> data) {
this(context, new TableColumnModel(columnCount), data);
}
/**
* Creates a new TableDataAdapter. (internally used)
*
* @param context
* The context that shall be used.
* @param columnModel
* The column model to be used.
*/
protected TableDataAdapter(final Context context, final TableColumnModel columnModel, final List<T> data) {
super(context, -1, data);
this.columnModel = columnModel;
this.data = data;
}
/**
* Gives the data object that shall be displayed in the row with the given index.
*
* @param rowIndex
* The index of the row to get the data for.
* @return The data that shall be displayed in the row with the given index.
*/
public T getRowData(final int rowIndex) {
return getItem(rowIndex);
}
/**
* Gives the data that is set to this adapter.
*
* @return The data this adapter is currently working with.
*/
public List<T> getData() {
return data;
}
/**
* Gives the {@link Context} of this adapter. (Hint: use this method in the {@code getHeaderView()}-method
* to programmatically initialize new views.)
*
* @return The {@link Context} of this adapter.
*/
public Context getContext() {
return super.getContext();
}
/**
* Gives the {@link LayoutInflater} of this adapter. (Hint: use this method in the
* {@code getHeaderView()}-method to inflate xml-layout-files.)
*
* @return The {@link LayoutInflater} of the context of this adapter.
*/
public LayoutInflater getLayoutInflater() {
return (LayoutInflater) getContext().getSystemService(Context.LAYOUT_INFLATER_SERVICE);
}
/**
* Gives the {@link Resources} of this adapter. (Hint: use this method in the
* {@code getCellView()}-method to resolve resources.)
*
* @return The {@link Resources} of the context of this adapter.
*/
public Resources getResources() {
return getContext().getResources();
}
/**
* Method that gives the cell views for the different table cells.
*
* @param rowIndex
* The index of the row to return the table cell view.
* @param columnIndex
* The index of the column to return the table cell view.
* @param parentView
* The view to which the returned view will be added.
* @return The created header view for the given column.
*/
public abstract View getCellView(int rowIndex, int columnIndex, ViewGroup parentView);
@Override
public View getView(final int rowIndex, final View convertView, final ViewGroup parent) {
final LinearLayout rowView = new LinearLayout(getContext());
final AbsListView.LayoutParams rowLayoutParams = new AbsListView.LayoutParams(LayoutParams.MATCH_PARENT, LayoutParams.WRAP_CONTENT);
rowView.setLayoutParams(rowLayoutParams);
rowView.setGravity(Gravity.CENTER_VERTICAL);
T rowData = null;
try {
rowData = getItem(rowIndex);
} catch (final IndexOutOfBoundsException e) {
Log.w(LOG_TAG, "No row date available for row with index " + rowIndex + ". " +
"Caught Exception: " + e.getMessage());
}
rowView.setBackgroundColor(rowColoriser.getRowColor(rowIndex, rowData));
final int widthUnit = (parent.getWidth() / columnModel.getColumnWeightSum());
for (int columnIndex = 0; columnIndex < getColumnCount(); columnIndex++) {
View cellView = getCellView(rowIndex, columnIndex, rowView);
if (cellView == null) {
cellView = new TextView(getContext());
}
final int width = widthUnit * columnModel.getColumnWeight(columnIndex);
final LayoutParams cellLayoutParams = new LayoutParams(width, ViewGroup.LayoutParams.WRAP_CONTENT);
cellLayoutParams.weight = columnModel.getColumnWeight(columnIndex);
cellView.setLayoutParams(cellLayoutParams);
rowView.addView(cellView);
}
return rowView;
}
/**
* Sets the {@link TableDataRowColorizer} that will be used to colorise the table data rows.
*
* @param rowColorizer
* The {@link TableDataRowColorizer} that shall be used.
*/
protected void setRowColoriser(final TableDataRowColorizer<? super T> rowColorizer) {
this.rowColoriser = rowColorizer;
}
/**
* Sets the {@link TableColumnModel} that will be used to render the table cells.
*
* @param columnModel
* The {@link TableColumnModel} that should be set.
*/
protected void setColumnModel(final TableColumnModel columnModel) {
this.columnModel = columnModel;
}
/**
* Gives the {@link TableColumnModel} that is currently used to render the table headers.
*/
protected TableColumnModel getColumnModel() {
return columnModel;
}
/**
* Sets the column count which is used to render the table headers.
*
* @param columnCount
* The column count that should be set.
*/
protected void setColumnCount(final int columnCount) {
columnModel.setColumnCount(columnCount);
}
/**
* Gives the column count that is currently used to render the table headers.
*
* @return The number of columns.
*/
protected int getColumnCount() {
return columnModel.getColumnCount();
}
/**
* Sets the column weight (the relative width of a column) of the column at the given index.
*
* @param columnIndex
* The index of the column to which this weight should be assigned.
* @param columnWeight
* The weight that should be set to the column at the given index.
*/
protected void setColumnWeight(final int columnIndex, final int columnWeight) {
columnModel.setColumnWeight(columnIndex, columnWeight);
}
/**
* Gives the column weight (the relative width of a column) of the column at the given index.
*
* @param columnIndex
* The index of the column to receive the column weight.
* @return The column weight of the column at the given index.
*/
protected int getColumnWeight(final int columnIndex) {
return columnModel.getColumnWeight(columnIndex);
}
/**
* Gives the overall column weight (sum of all column weights).
*
* @return The collumn weight sum.
*/
protected int getColumnWeightSum() {
return columnModel.getColumnWeightSum();
}
}
| |
package com.codahale.metrics.jdbi;
import com.codahale.metrics.MetricRegistry;
import com.codahale.metrics.Timer;
import com.codahale.metrics.jdbi.strategies.NameStrategies;
import com.codahale.metrics.jdbi.strategies.ShortNameStrategy;
import com.codahale.metrics.jdbi.strategies.SmartNameStrategy;
import com.codahale.metrics.jdbi.strategies.StatementNameStrategy;
import org.junit.Test;
import org.skife.jdbi.v2.StatementContext;
import java.util.concurrent.TimeUnit;
import static com.codahale.metrics.MetricRegistry.name;
import static org.fest.assertions.api.Assertions.assertThat;
import static org.mockito.Mockito.doReturn;
import static org.mockito.Mockito.mock;
public class InstrumentedTimingCollectorTest {
private final MetricRegistry registry = new MetricRegistry();
@Test
public void updatesTimerForSqlObjects() throws Exception {
final StatementNameStrategy strategy = new SmartNameStrategy();
final InstrumentedTimingCollector collector = new InstrumentedTimingCollector(registry,
strategy);
final StatementContext ctx = mock(StatementContext.class);
doReturn("SELECT 1").when(ctx).getRawSql();
doReturn(getClass()).when(ctx).getSqlObjectType();
doReturn(getClass().getMethod("updatesTimerForSqlObjects")).when(ctx).getSqlObjectMethod();
collector.collect(TimeUnit.SECONDS.toNanos(1), ctx);
final String name = strategy.getStatementName(ctx);
final Timer timer = registry.timer(name);
assertThat(name)
.isEqualTo(name(getClass(), "updatesTimerForSqlObjects"));
assertThat(timer.getSnapshot().getMax())
.isEqualTo(1000000000);
}
@Test
public void updatesTimerForSqlObjectsWithoutMethod() throws Exception {
final StatementNameStrategy strategy = new SmartNameStrategy();
final InstrumentedTimingCollector collector = new InstrumentedTimingCollector(registry,
strategy);
final StatementContext ctx = mock(StatementContext.class);
doReturn("SELECT 1").when(ctx).getRawSql();
doReturn(getClass()).when(ctx).getSqlObjectType();
collector.collect(TimeUnit.SECONDS.toNanos(1), ctx);
final String name = strategy.getStatementName(ctx);
final Timer timer = registry.timer(name);
assertThat(name)
.isEqualTo(name(getClass(), "SELECT 1"));
assertThat(timer.getSnapshot().getMax())
.isEqualTo(1000000000);
}
@Test
public void updatesTimerForRawSql() throws Exception {
final StatementNameStrategy strategy = new SmartNameStrategy();
final InstrumentedTimingCollector collector = new InstrumentedTimingCollector(registry,
strategy);
final StatementContext ctx = mock(StatementContext.class);
doReturn("SELECT 1").when(ctx).getRawSql();
collector.collect(TimeUnit.SECONDS.toNanos(2), ctx);
final String name = strategy.getStatementName(ctx);
final Timer timer = registry.timer(name);
assertThat(name)
.isEqualTo(name("sql", "raw", "SELECT 1"));
assertThat(timer.getSnapshot().getMax())
.isEqualTo(2000000000);
}
@Test
public void updatesTimerForNoRawSql() throws Exception {
final StatementNameStrategy strategy = new SmartNameStrategy();
final InstrumentedTimingCollector collector = new InstrumentedTimingCollector(registry,
strategy);
final StatementContext ctx = mock(StatementContext.class);
collector.collect(TimeUnit.SECONDS.toNanos(2), ctx);
final String name = strategy.getStatementName(ctx);
final Timer timer = registry.timer(name);
assertThat(name)
.isEqualTo(name("sql", "empty"));
assertThat(timer.getSnapshot().getMax())
.isEqualTo(2000000000);
}
@Test
public void updatesTimerForNonSqlishRawSql() throws Exception {
final StatementNameStrategy strategy = new SmartNameStrategy();
final InstrumentedTimingCollector collector = new InstrumentedTimingCollector(registry,
strategy);
final StatementContext ctx = mock(StatementContext.class);
doReturn("don't know what it is but it's not SQL").when(ctx).getRawSql();
collector.collect(TimeUnit.SECONDS.toNanos(3), ctx);
final String name = strategy.getStatementName(ctx);
final Timer timer = registry.timer(name);
assertThat(name)
.isEqualTo(name("sql", "raw", "don't know what it is but it's not SQL"));
assertThat(timer.getSnapshot().getMax())
.isEqualTo(3000000000L);
}
@Test
public void updatesTimerForContextClass() throws Exception {
final StatementNameStrategy strategy = new SmartNameStrategy();
final InstrumentedTimingCollector collector = new InstrumentedTimingCollector(registry,
strategy);
final StatementContext ctx = mock(StatementContext.class);
doReturn("SELECT 1").when(ctx).getRawSql();
doReturn(getClass().getName()).when(ctx).getAttribute(NameStrategies.STATEMENT_CLASS);
doReturn("updatesTimerForContextClass").when(ctx)
.getAttribute(NameStrategies.STATEMENT_NAME);
collector.collect(TimeUnit.SECONDS.toNanos(3), ctx);
final String name = strategy.getStatementName(ctx);
final Timer timer = registry.timer(name);
assertThat(name)
.isEqualTo(name(getClass(), "updatesTimerForContextClass"));
assertThat(timer.getSnapshot().getMax())
.isEqualTo(3000000000L);
}
@Test
public void updatesTimerForTemplateFile() throws Exception {
final StatementNameStrategy strategy = new SmartNameStrategy();
final InstrumentedTimingCollector collector = new InstrumentedTimingCollector(registry,
strategy);
final StatementContext ctx = mock(StatementContext.class);
doReturn("SELECT 1").when(ctx).getRawSql();
doReturn("foo/bar.stg").when(ctx).getAttribute(NameStrategies.STATEMENT_GROUP);
doReturn("updatesTimerForTemplateFile").when(ctx)
.getAttribute(NameStrategies.STATEMENT_NAME);
collector.collect(TimeUnit.SECONDS.toNanos(4), ctx);
final String name = strategy.getStatementName(ctx);
final Timer timer = registry.timer(name);
assertThat(name)
.isEqualTo(name("foo", "bar", "updatesTimerForTemplateFile"));
assertThat(timer.getSnapshot().getMax())
.isEqualTo(4000000000L);
}
@Test
public void updatesTimerForContextGroupAndName() throws Exception {
final StatementNameStrategy strategy = new SmartNameStrategy();
final InstrumentedTimingCollector collector = new InstrumentedTimingCollector(registry,
strategy);
final StatementContext ctx = mock(StatementContext.class);
doReturn("SELECT 1").when(ctx).getRawSql();
doReturn("my-group").when(ctx).getAttribute(NameStrategies.STATEMENT_GROUP);
doReturn("updatesTimerForContextGroupAndName").when(ctx)
.getAttribute(NameStrategies.STATEMENT_NAME);
collector.collect(TimeUnit.SECONDS.toNanos(4), ctx);
final String name = strategy.getStatementName(ctx);
final Timer timer = registry.timer(name);
assertThat(name)
.isEqualTo(name("my-group", "updatesTimerForContextGroupAndName", ""));
assertThat(timer.getSnapshot().getMax())
.isEqualTo(4000000000L);
}
@Test
public void updatesTimerForContextGroupTypeAndName() throws Exception {
final StatementNameStrategy strategy = new SmartNameStrategy();
final InstrumentedTimingCollector collector = new InstrumentedTimingCollector(registry,
strategy);
final StatementContext ctx = mock(StatementContext.class);
doReturn("SELECT 1").when(ctx).getRawSql();
doReturn("my-group").when(ctx).getAttribute(NameStrategies.STATEMENT_GROUP);
doReturn("my-type").when(ctx).getAttribute(NameStrategies.STATEMENT_TYPE);
doReturn("updatesTimerForContextGroupTypeAndName").when(ctx)
.getAttribute(NameStrategies.STATEMENT_NAME);
collector.collect(TimeUnit.SECONDS.toNanos(5), ctx);
final String name = strategy.getStatementName(ctx);
final Timer timer = registry.timer(name);
assertThat(name)
.isEqualTo(name("my-group", "my-type", "updatesTimerForContextGroupTypeAndName"));
assertThat(timer.getSnapshot().getMax())
.isEqualTo(5000000000L);
}
@Test
public void updatesTimerForShortSqlObjectStrategy() throws Exception {
final StatementNameStrategy strategy = new ShortNameStrategy("jdbi");
final InstrumentedTimingCollector collector = new InstrumentedTimingCollector(registry,
strategy);
final StatementContext ctx = mock(StatementContext.class);
doReturn("SELECT 1").when(ctx).getRawSql();
doReturn(getClass()).when(ctx).getSqlObjectType();
doReturn(getClass().getMethod("updatesTimerForShortSqlObjectStrategy")).when(ctx)
.getSqlObjectMethod();
collector.collect(TimeUnit.SECONDS.toNanos(1), ctx);
final String name = strategy.getStatementName(ctx);
final Timer timer = registry.timer(name);
assertThat(name)
.isEqualTo(name("jdbi",
getClass().getSimpleName(),
"updatesTimerForShortSqlObjectStrategy"));
assertThat(timer.getSnapshot().getMax())
.isEqualTo(1000000000);
}
@Test
public void updatesTimerForShortContextClassStrategy() throws Exception {
final StatementNameStrategy strategy = new ShortNameStrategy("jdbi");
final InstrumentedTimingCollector collector = new InstrumentedTimingCollector(registry,
strategy);
final StatementContext ctx = mock(StatementContext.class);
doReturn("SELECT 1").when(ctx).getRawSql();
doReturn(getClass().getName()).when(ctx).getAttribute(NameStrategies.STATEMENT_CLASS);
doReturn("updatesTimerForShortContextClassStrategy").when(ctx)
.getAttribute(NameStrategies.STATEMENT_NAME);
collector.collect(TimeUnit.SECONDS.toNanos(3), ctx);
final String name = strategy.getStatementName(ctx);
final Timer timer = registry.timer(name);
assertThat(name)
.isEqualTo(name("jdbi",
getClass().getSimpleName(),
"updatesTimerForShortContextClassStrategy"));
assertThat(timer.getSnapshot().getMax())
.isEqualTo(3000000000L);
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.get;
import org.apache.lucene.index.Term;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.lucene.uid.VersionsResolver.DocIdAndVersion;
import org.elasticsearch.common.metrics.CounterMetric;
import org.elasticsearch.common.metrics.MeanMetric;
import org.elasticsearch.common.util.set.Sets;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.common.xcontent.support.XContentMapValues;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.VersionType;
import org.elasticsearch.index.engine.Engine;
import org.elasticsearch.index.fieldvisitor.CustomFieldsVisitor;
import org.elasticsearch.index.fieldvisitor.FieldsVisitor;
import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.ParentFieldMapper;
import org.elasticsearch.index.mapper.SourceFieldMapper;
import org.elasticsearch.index.mapper.Uid;
import org.elasticsearch.index.mapper.UidFieldMapper;
import org.elasticsearch.index.shard.AbstractIndexShardComponent;
import org.elasticsearch.index.shard.IndexShard;
import org.elasticsearch.search.fetch.subphase.FetchSourceContext;
import org.elasticsearch.search.fetch.subphase.ParentFieldSubFetchPhase;
import java.io.IOException;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.TimeUnit;
public final class ShardGetService extends AbstractIndexShardComponent {
private final MapperService mapperService;
private final MeanMetric existsMetric = new MeanMetric();
private final MeanMetric missingMetric = new MeanMetric();
private final CounterMetric currentMetric = new CounterMetric();
private final IndexShard indexShard;
public ShardGetService(IndexSettings indexSettings, IndexShard indexShard,
MapperService mapperService) {
super(indexShard.shardId(), indexSettings);
this.mapperService = mapperService;
this.indexShard = indexShard;
}
public GetStats stats() {
return new GetStats(existsMetric.count(), TimeUnit.NANOSECONDS.toMillis(existsMetric.sum()), missingMetric.count(), TimeUnit.NANOSECONDS.toMillis(missingMetric.sum()), currentMetric.count());
}
public GetResult get(String type, String id, String[] gFields, boolean realtime, long version, VersionType versionType, FetchSourceContext fetchSourceContext) {
currentMetric.inc();
try {
long now = System.nanoTime();
GetResult getResult = innerGet(type, id, gFields, realtime, version, versionType, fetchSourceContext);
if (getResult.isExists()) {
existsMetric.inc(System.nanoTime() - now);
} else {
missingMetric.inc(System.nanoTime() - now);
}
return getResult;
} finally {
currentMetric.dec();
}
}
/**
* Returns {@link GetResult} based on the specified {@link org.elasticsearch.index.engine.Engine.GetResult} argument.
* This method basically loads specified fields for the associated document in the engineGetResult.
* This method load the fields from the Lucene index and not from transaction log and therefore isn't realtime.
* <p>
* Note: Call <b>must</b> release engine searcher associated with engineGetResult!
*/
public GetResult get(Engine.GetResult engineGetResult, String id, String type, String[] fields, FetchSourceContext fetchSourceContext) {
if (!engineGetResult.exists()) {
return new GetResult(shardId.getIndexName(), type, id, -1, false, null, null);
}
currentMetric.inc();
try {
long now = System.nanoTime();
fetchSourceContext = normalizeFetchSourceContent(fetchSourceContext, fields);
GetResult getResult = innerGetLoadFromStoredFields(type, id, fields, fetchSourceContext, engineGetResult, mapperService);
if (getResult.isExists()) {
existsMetric.inc(System.nanoTime() - now);
} else {
missingMetric.inc(System.nanoTime() - now); // This shouldn't happen...
}
return getResult;
} finally {
currentMetric.dec();
}
}
/**
* decides what needs to be done based on the request input and always returns a valid non-null FetchSourceContext
*/
private FetchSourceContext normalizeFetchSourceContent(@Nullable FetchSourceContext context, @Nullable String[] gFields) {
if (context != null) {
return context;
}
if (gFields == null) {
return FetchSourceContext.FETCH_SOURCE;
}
for (String field : gFields) {
if (SourceFieldMapper.NAME.equals(field)) {
return FetchSourceContext.FETCH_SOURCE;
}
}
return FetchSourceContext.DO_NOT_FETCH_SOURCE;
}
private GetResult innerGet(String type, String id, String[] gFields, boolean realtime, long version, VersionType versionType, FetchSourceContext fetchSourceContext) {
fetchSourceContext = normalizeFetchSourceContent(fetchSourceContext, gFields);
Engine.GetResult get = null;
if (type == null || type.equals("_all")) {
for (String typeX : mapperService.types()) {
get = indexShard.get(new Engine.Get(realtime, new Term(UidFieldMapper.NAME, Uid.createUidAsBytes(typeX, id)))
.version(version).versionType(versionType));
if (get.exists()) {
type = typeX;
break;
} else {
get.release();
}
}
if (get == null) {
return new GetResult(shardId.getIndexName(), type, id, -1, false, null, null);
}
if (!get.exists()) {
// no need to release here as well..., we release in the for loop for non exists
return new GetResult(shardId.getIndexName(), type, id, -1, false, null, null);
}
} else {
get = indexShard.get(new Engine.Get(realtime, new Term(UidFieldMapper.NAME, Uid.createUidAsBytes(type, id)))
.version(version).versionType(versionType));
if (!get.exists()) {
get.release();
return new GetResult(shardId.getIndexName(), type, id, -1, false, null, null);
}
}
try {
// break between having loaded it from translog (so we only have _source), and having a document to load
return innerGetLoadFromStoredFields(type, id, gFields, fetchSourceContext, get, mapperService);
} finally {
get.release();
}
}
private GetResult innerGetLoadFromStoredFields(String type, String id, String[] gFields, FetchSourceContext fetchSourceContext, Engine.GetResult get, MapperService mapperService) {
Map<String, GetField> fields = null;
BytesReference source = null;
DocIdAndVersion docIdAndVersion = get.docIdAndVersion();
FieldsVisitor fieldVisitor = buildFieldsVisitors(gFields, fetchSourceContext);
if (fieldVisitor != null) {
try {
docIdAndVersion.context.reader().document(docIdAndVersion.docId, fieldVisitor);
} catch (IOException e) {
throw new ElasticsearchException("Failed to get type [" + type + "] and id [" + id + "]", e);
}
source = fieldVisitor.source();
if (!fieldVisitor.fields().isEmpty()) {
fieldVisitor.postProcess(mapperService);
fields = new HashMap<>(fieldVisitor.fields().size());
for (Map.Entry<String, List<Object>> entry : fieldVisitor.fields().entrySet()) {
fields.put(entry.getKey(), new GetField(entry.getKey(), entry.getValue()));
}
}
}
DocumentMapper docMapper = mapperService.documentMapper(type);
if (docMapper.parentFieldMapper().active()) {
String parentId = ParentFieldSubFetchPhase.getParentId(docMapper.parentFieldMapper(), docIdAndVersion.context.reader(), docIdAndVersion.docId);
if (fields == null) {
fields = new HashMap<>(1);
}
fields.put(ParentFieldMapper.NAME, new GetField(ParentFieldMapper.NAME, Collections.singletonList(parentId)));
}
if (gFields != null && gFields.length > 0) {
for (String field : gFields) {
FieldMapper fieldMapper = docMapper.mappers().smartNameFieldMapper(field);
if (fieldMapper == null) {
if (docMapper.objectMappers().get(field) != null) {
// Only fail if we know it is a object field, missing paths / fields shouldn't fail.
throw new IllegalArgumentException("field [" + field + "] isn't a leaf field");
}
}
}
}
if (!fetchSourceContext.fetchSource()) {
source = null;
} else if (fetchSourceContext.includes().length > 0 || fetchSourceContext.excludes().length > 0) {
Map<String, Object> sourceAsMap;
XContentType sourceContentType = null;
// TODO: The source might parsed and available in the sourceLookup but that one uses unordered maps so different. Do we care?
Tuple<XContentType, Map<String, Object>> typeMapTuple = XContentHelper.convertToMap(source, true);
sourceContentType = typeMapTuple.v1();
sourceAsMap = typeMapTuple.v2();
sourceAsMap = XContentMapValues.filter(sourceAsMap, fetchSourceContext.includes(), fetchSourceContext.excludes());
try {
source = XContentFactory.contentBuilder(sourceContentType).map(sourceAsMap).bytes();
} catch (IOException e) {
throw new ElasticsearchException("Failed to get type [" + type + "] and id [" + id + "] with includes/excludes set", e);
}
}
return new GetResult(shardId.getIndexName(), type, id, get.version(), get.exists(), source, fields);
}
private static FieldsVisitor buildFieldsVisitors(String[] fields, FetchSourceContext fetchSourceContext) {
if (fields == null || fields.length == 0) {
return fetchSourceContext.fetchSource() ? new FieldsVisitor(true) : null;
}
return new CustomFieldsVisitor(Sets.newHashSet(fields), fetchSourceContext.fetchSource());
}
}
| |
package bboss.org.jgroups.blocks;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.DataInputStream;
import java.io.DataOutputStream;
import java.io.Serializable;
import java.lang.reflect.Method;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.SortedMap;
import java.util.TreeMap;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.TimeUnit;
import bboss.org.jgroups.Address;
import bboss.org.jgroups.JChannel;
import bboss.org.jgroups.MembershipListener;
import bboss.org.jgroups.View;
import bboss.org.jgroups.annotations.Experimental;
import bboss.org.jgroups.annotations.ManagedAttribute;
import bboss.org.jgroups.annotations.ManagedOperation;
import bboss.org.jgroups.annotations.Unsupported;
import bboss.org.jgroups.logging.Log;
import bboss.org.jgroups.logging.LogFactory;
import bboss.org.jgroups.util.Rsp;
import bboss.org.jgroups.util.RspList;
import bboss.org.jgroups.util.TimeScheduler;
import bboss.org.jgroups.util.Util;
/**
* Cache which allows for replication factors <em>per data items</em>; the factor determines how many replicas
* of a key/value we create across the cluster.<br/>
* See doc/design/ReplCache.txt for details.
* @author Bela Ban
* @version $Id: ReplCache.java,v 1.27 2009/12/30 12:45:59 belaban Exp $
*/
@Experimental @Unsupported
public class ReplCache<K,V> implements MembershipListener, Cache.ChangeListener {
/** The cache in which all entries are located. The value is a tuple, consisting of the replication count and the
* actual value */
private Cache<K,Value<V>> l2_cache=new Cache<K, Value<V>>();
/** The local bounded cache, to speed up access to frequently accessed entries. Can be disabled or enabled */
private Cache<K,V> l1_cache=null;
private static final Log log=LogFactory.getLog(ReplCache.class);
private JChannel ch=null;
private Address local_addr=null;
private View view;
private RpcDispatcher disp=null;
@ManagedAttribute(writable=true)
private String props="udp.xml";
@ManagedAttribute(writable=true)
private String cluster_name="ReplCache-Cluster";
@ManagedAttribute(writable=true)
private long call_timeout=1000L;
@ManagedAttribute(writable=true)
private long caching_time=30000L; // in milliseconds. -1 means don't cache, 0 means cache forever (or until changed)
@ManagedAttribute
private short default_replication_count=1; // no replication by default
private HashFunction<K> hash_function=null;
private HashFunctionFactory<K> hash_function_factory=new HashFunctionFactory<K>() {
public HashFunction<K> create() {
return new ConsistentHashFunction<K>();
}
};
private Set<MembershipListener> membership_listeners=new HashSet<MembershipListener>();
private Set<ChangeListener> change_listeners=new HashSet<ChangeListener>();
/** On a view change, if a member P1 detects that for any given key K, P1 is not the owner of K, then
* it will compute the new owner P2 and transfer ownership for all Ks for which P2 is the new owner. P1
* will then also evict those keys from its L2 cache */
@ManagedAttribute(writable=true)
private boolean migrate_data=true;
private static final short PUT = 1;
private static final short PUT_FORCE = 2;
private static final short GET = 3;
private static final short REMOVE = 4;
private static final short REMOVE_MANY = 5;
protected static Map<Short, Method> methods=new ConcurrentHashMap<Short,Method>(8);
private TimeScheduler timer;
static {
try {
methods.put(PUT, ReplCache.class.getMethod("_put",
Object.class,
Object.class,
short.class,
long.class));
methods.put(PUT_FORCE, ReplCache.class.getMethod("_put",
Object.class,
Object.class,
short.class,
long.class, boolean.class));
methods.put(GET, ReplCache.class.getMethod("_get",
Object.class));
methods.put(REMOVE, ReplCache.class.getMethod("_remove", Object.class));
methods.put(REMOVE_MANY, ReplCache.class.getMethod("_removeMany", Set.class));
}
catch(NoSuchMethodException e) {
throw new RuntimeException(e);
}
}
public interface HashFunction<K> {
/**
* Function that, given a key and a replication count, returns replication_count number of <em>different</em>
* addresses of nodes.
* @param key
* @param replication_count
* @return
*/
List<Address> hash(K key, short replication_count);
/**
* When the topology changes, this method will be called. Implementations will typically cache the node list
* @param nodes
*/
void installNodes(List<Address> nodes);
}
public interface HashFunctionFactory<K> {
HashFunction<K> create();
}
public ReplCache(String props, String cluster_name) {
this.props=props;
this.cluster_name=cluster_name;
}
public String getProps() {
return props;
}
public void setProps(String props) {
this.props=props;
}
public Address getLocalAddress() {
return local_addr;
}
@ManagedAttribute
public String getLocalAddressAsString() {
return local_addr != null? local_addr.toString() : "null";
}
@ManagedAttribute
public String getView() {
return view != null? view.toString() : "null";
}
@ManagedAttribute
public int getClusterSize() {
return view != null? view.size() : 0;
}
@ManagedAttribute
public boolean isL1CacheEnabled() {
return l1_cache != null;
}
public String getClusterName() {
return cluster_name;
}
public void setClusterName(String cluster_name) {
this.cluster_name=cluster_name;
}
public long getCallTimeout() {
return call_timeout;
}
public void setCallTimeout(long call_timeout) {
this.call_timeout=call_timeout;
}
public long getCachingTime() {
return caching_time;
}
public void setCachingTime(long caching_time) {
this.caching_time=caching_time;
}
public boolean isMigrateData() {
return migrate_data;
}
public void setMigrateData(boolean migrate_data) {
this.migrate_data=migrate_data;
}
public short getDefaultReplicationCount() {
return default_replication_count;
}
public void setDefaultReplicationCount(short default_replication_count) {
this.default_replication_count=default_replication_count;
}
public HashFunction getHashFunction() {
return hash_function;
}
public void setHashFunction(HashFunction<K> hash_function) {
this.hash_function=hash_function;
}
public HashFunctionFactory getHashFunctionFactory() {
return hash_function_factory;
}
public void setHashFunctionFactory(HashFunctionFactory<K> hash_function_factory) {
this.hash_function_factory=hash_function_factory;
}
public void addMembershipListener(MembershipListener l) {
membership_listeners.add(l);
}
public void removeMembershipListener(MembershipListener l) {
membership_listeners.remove(l);
}
public void addChangeListener(ChangeListener l) {
change_listeners.add(l);
}
public void removeChangeListener(ChangeListener l) {
change_listeners.remove(l);
}
public Cache<K,V> getL1Cache() {
return l1_cache;
}
public void setL1Cache(Cache<K,V> cache) {
if(l1_cache != null)
l1_cache.stop();
l1_cache=cache;
}
public Cache<K,Value<V>> getL2Cache() {
return l2_cache;
}
public void setL2Cache(Cache<K,Value<V>> cache) {
if(cache != null) {
l2_cache.stop();
l2_cache=cache;
}
}
@ManagedOperation
public void start() throws Exception {
if(hash_function_factory != null) {
hash_function=hash_function_factory.create();
}
if(hash_function == null)
hash_function=new ConsistentHashFunction<K>();
ch=new JChannel(props);
disp=new RpcDispatcher(ch, null, this, this);
RpcDispatcher.Marshaller marshaller=new CustomMarshaller();
disp.setRequestMarshaller(marshaller);
disp.setResponseMarshaller(marshaller);
disp.setMethodLookup(new MethodLookup() {
public Method findMethod(short id) {
return methods.get(id);
}
});
ch.connect(cluster_name);
local_addr=ch.getAddress();
view=ch.getView();
timer=ch.getProtocolStack().getTransport().getTimer();
l2_cache.addChangeListener(this);
}
@ManagedOperation
public void stop() {
if(l1_cache != null)
l1_cache.stop();
if(migrate_data) {
List<Address> members_without_me=new ArrayList<Address>(view.getMembers());
members_without_me.remove(local_addr);
HashFunction<K> tmp_hash_function=hash_function_factory.create();
tmp_hash_function.installNodes(members_without_me);
for(Map.Entry<K,Cache.Value<Value<V>>> entry: l2_cache.entrySet()) {
K key=entry.getKey();
Cache.Value<Value<V>> val=entry.getValue();
if(val == null)
continue;
Value<V> tmp=val.getValue();
if(tmp == null)
continue;
short repl_count=tmp.getReplicationCount();
if(repl_count != 1) // we only handle keys which are not replicated and which are stored by us
continue;
List<Address> nodes=tmp_hash_function.hash(key, repl_count);
if(nodes == null || nodes.isEmpty())
continue;
if(!nodes.contains(local_addr)) {
Address dest=nodes.get(0); // should only have 1 element anyway
move(dest, key, tmp.getVal(), repl_count, val.getTimeout(), true);
_remove(key);
}
}
}
l2_cache.removeChangeListener(this);
l2_cache.stop();
disp.stop();
ch.close();
}
/**
* Places a key/value pair into one or several nodes in the cluster.
* @param key The key, needs to be serializable
* @param val The value, needs to be serializable
* @param repl_count Number of replicas. The total number of times a data item should be present in a cluster.
* Needs to be > 0
* <ul>
* <li>-1: create key/val in all the nodes in the cluster
* <li>1: create key/val only in one node in the cluster, picked by computing the consistent hash of KEY
* <li>K > 1: create key/val in those nodes in the cluster which match the consistent hashes created for KEY
* </ul>
* @param timeout Expiration time for key/value.
* <ul>
* <li>-1: don't cache at all in the L1 cache
* <li>0: cache forever, until removed or evicted because we need space for newer elements
* <li>> 0: number of milliseconds to keep an idle element in the cache. An element is idle when not accessed.
* </ul>
* @param synchronous Whether or not to block until all cluster nodes have applied the change
*/
@ManagedOperation
public void put(K key, V val, short repl_count, long timeout, boolean synchronous) {
if(repl_count == 0) {
if(log.isWarnEnabled())
log.warn("repl_count of 0 is invalid, data will not be stored in the cluster");
return;
}
mcastPut(key, val, repl_count, timeout, synchronous);
if(l1_cache != null && timeout >= 0)
l1_cache.put(key, val, timeout);
}
/**
* Places a key/value pair into one or several nodes in the cluster.
* @param key The key, needs to be serializable
* @param val The value, needs to be serializable
* @param repl_count Number of replicas. The total number of times a data item should be present in a cluster.
* Needs to be > 0
* <ul>
* <li>-1: create key/val in all the nodes in the cluster
* <li>1: create key/val only in one node in the cluster, picked by computing the consistent hash of KEY
* <li>K > 1: create key/val in those nodes in the cluster which match the consistent hashes created for KEY
* </ul>
* @param timeout Expiration time for key/value.
* <ul>
* <li>-1: don't cache at all in the L1 cache
* <li>0: cache forever, until removed or evicted because we need space for newer elements
* <li>> 0: number of milliseconds to keep an idle element in the cache. An element is idle when not accessed.
* </ul>
*/
@ManagedOperation
public void put(K key, V val, short repl_count, long timeout) {
put(key, val, repl_count, timeout, false); // don't block (asynchronous put) by default
}
@ManagedOperation
public void put(K key, V val) {
put(key, val, default_replication_count, caching_time);
}
/**
* Returns the value associated with key
* @param key The key, has to be serializable
* @return The value associated with key, or null
*/
@ManagedOperation
public V get(K key) {
// 1. Try the L1 cache first
if(l1_cache != null) {
V val=l1_cache.get(key);
if(val != null) {
if(log.isTraceEnabled())
log.trace("returned value " + val + " for " + key + " from L1 cache");
return val;
}
}
// 2. Try the local cache
Cache.Value<Value<V>> val=l2_cache.getEntry(key);
Value<V> tmp;
if(val != null) {
tmp=val.getValue();
if(tmp !=null) {
V real_value=tmp.getVal();
if(real_value != null && l1_cache != null && val.getTimeout() >= 0)
l1_cache.put(key, real_value, val.getTimeout());
return tmp.getVal();
}
}
// 3. Execute a cluster wide GET
try {
RspList rsps=disp.callRemoteMethods(null,
new MethodCall(GET, new Object[]{key}),
GroupRequest.GET_ALL,
call_timeout);
for(Rsp rsp: rsps.values()) {
Object obj=rsp.getValue();
if(obj == null || obj instanceof Throwable)
continue;
val=(Cache.Value<Value<V>>)rsp.getValue();
if(val != null) {
tmp=val.getValue();
if(tmp != null) {
V real_value=tmp.getVal();
if(real_value != null && l1_cache != null && val.getTimeout() >= 0)
l1_cache.put(key, real_value, val.getTimeout());
return real_value;
}
}
}
return null;
}
catch(Throwable t) {
if(log.isWarnEnabled())
log.warn("get() failed", t);
return null;
}
}
/**
* Removes key in all nodes in the cluster, both from their local hashmaps and L1 caches
* @param key The key, needs to be serializable
*/
@ManagedOperation
public void remove(K key) {
remove(key, false); // by default we use asynchronous removals
}
/**
* Removes key in all nodes in the cluster, both from their local hashmaps and L1 caches
* @param key The key, needs to be serializable
*/
@ManagedOperation
public void remove(K key, boolean synchronous) {
try {
disp.callRemoteMethods(null, new MethodCall(REMOVE, new Object[]{key}),
synchronous? GroupRequest.GET_ALL : GroupRequest.GET_NONE, call_timeout);
if(l1_cache != null)
l1_cache.remove(key);
}
catch(Throwable t) {
if(log.isWarnEnabled())
log.warn("remove() failed", t);
}
}
/**
* Removes all keys and values in the L2 and L1 caches
*/
@ManagedOperation
public void clear() {
Set<K> keys=new HashSet<K>(l2_cache.getInternalMap().keySet());
mcastClear(keys, false);
}
public V _put(K key, V val, short repl_count, long timeout) {
return _put(key, val, repl_count, timeout, false);
}
/**
*
* @param key
* @param val
* @param repl_count
* @param timeout
* @param force Skips acceptance checking and simply adds the key/value
* @return
*/
public V _put(K key, V val, short repl_count, long timeout, boolean force) {
if(!force) {
// check if we need to host the data
boolean accept=repl_count == -1;
if(!accept) {
if(view != null && repl_count >= view.size()) {
accept=true;
}
else {
List<Address> selected_hosts=hash_function != null? hash_function.hash(key, repl_count) : null;
if(selected_hosts != null) {
if(log.isTraceEnabled())
log.trace("local=" + local_addr + ", hosts=" + selected_hosts);
for(Address addr: selected_hosts) {
if(addr.equals(local_addr)) {
accept=true;
break;
}
}
}
if(!accept)
return null;
}
}
}
if(log.isTraceEnabled())
log.trace("_put(" + key + ", " + val + ", " + repl_count + ", " + timeout + ")");
Value<V> value=new Value<V>(val, repl_count);
Value<V> retval=l2_cache.put(key, value, timeout);
if(l1_cache != null)
l1_cache.remove(key);
notifyChangeListeners();
return retval != null? retval.getVal() : null;
}
public Cache.Value<Value<V>> _get(K key) {
if(log.isTraceEnabled())
log.trace("_get(" + key + ")");
return l2_cache.getEntry(key);
}
public V _remove(K key) {
if(log.isTraceEnabled())
log.trace("_remove(" + key + ")");
Value<V> retval=l2_cache.remove(key);
if(l1_cache != null)
l1_cache.remove(key);
notifyChangeListeners();
return retval != null? retval.getVal() : null;
}
public void _removeMany(Set<K> keys) {
if(log.isTraceEnabled())
log.trace("_removeMany(): " + keys.size() + " entries");
for(K key: keys)
_remove(key);
}
public void viewAccepted(final View new_view) {
final List<Address> old_nodes=this.view != null? new ArrayList<Address>(this.view.getMembers()) : null;
this.view=new_view;
if(log.isInfoEnabled())
log.info("new view: " + new_view);
if(hash_function != null)
hash_function.installNodes(new_view.getMembers());
for(MembershipListener l: membership_listeners)
l.viewAccepted(new_view);
if(old_nodes != null) {
timer.schedule(new Runnable() {
public void run() {
rebalance(old_nodes, new ArrayList<Address>(new_view.getMembers()));
}
}, 100, TimeUnit.MILLISECONDS);
}
}
public void suspect(Address suspected_mbr) {
}
public void block() {
}
public void changed() {
notifyChangeListeners();
}
public String toString() {
StringBuilder sb=new StringBuilder();
if(l1_cache != null)
sb.append("L1 cache: " + l1_cache.getSize() + " entries");
sb.append("\nL2 cache: " + l2_cache.getSize() + " entries()");
return sb.toString();
}
@ManagedOperation
public String dump() {
StringBuilder sb=new StringBuilder();
if(l1_cache != null) {
sb.append("L1 cache:\n").append(l1_cache.dump());
}
sb.append("\nL2 cache:\n").append(l2_cache.dump());
return sb.toString();
}
private void notifyChangeListeners() {
for(ChangeListener l: change_listeners) {
try {
l.changed();
}
catch(Throwable t) {
if(log.isErrorEnabled())
log.error("failed notifying change listener", t);
}
}
}
private void rebalance(List<Address> old_nodes, List<Address> new_nodes) {
HashFunction<K> old_func=hash_function_factory.create();
old_func.installNodes(old_nodes);
HashFunction<K> new_func=hash_function_factory.create();
new_func.installNodes(new_nodes);
boolean is_coord=Util.isCoordinator(ch);
List<K> keys=new ArrayList<K>(l2_cache.getInternalMap().keySet());
for(K key: keys) {
Cache.Value<Value<V>> val=l2_cache.getEntry(key);
if(log.isTraceEnabled())
log.trace("==== rebalancing " + key);
if(val == null) {
if(log.isWarnEnabled())
log.warn(key + " has no value associated; ignoring");
continue;
}
Value<V> tmp=val.getValue();
if(tmp == null) {
if(log.isWarnEnabled())
log.warn(key + " has no value associated; ignoring");
continue;
}
V real_value=tmp.getVal();
short repl_count=tmp.getReplicationCount();
List<Address> new_mbrs=Util.newMembers(old_nodes, new_nodes);
if(repl_count == -1) {
if(is_coord) {
for(Address new_mbr: new_mbrs) {
move(new_mbr, key, real_value, repl_count, val.getTimeout(), false);
}
}
}
else if(repl_count == 1) {
List<Address> tmp_nodes=new_func.hash(key, repl_count);
if(!tmp_nodes.isEmpty()) {
Address mbr=tmp_nodes.get(0);
if(!mbr.equals(local_addr)) {
move(mbr, key, real_value, repl_count, val.getTimeout(), false);
_remove(key);
}
}
}
else if(repl_count > 1) {
List<Address> tmp_old=old_func.hash(key, repl_count);
List<Address> tmp_new=new_func.hash(key, repl_count);
if(log.isTraceEnabled())
log.trace("old nodes: " + tmp_old + "\nnew nodes: " + tmp_new);
if(tmp_old != null && tmp_new != null && tmp_old.equals(tmp_new))
continue;
mcastPut(key, real_value, repl_count, val.getTimeout(), false);
if(tmp_new != null && !tmp_new.contains(local_addr)) {
_remove(key);
}
}
else {
throw new IllegalStateException("replication count is invalid (" + repl_count + ")");
}
}
}
public void mcastEntries() {
for(Map.Entry<K,Cache.Value<Value<V>>> entry: l2_cache.entrySet()) {
K key=entry.getKey();
Cache.Value<Value<V>> val=entry.getValue();
if(val == null) {
if(log.isWarnEnabled())
log.warn(key + " has no value associated; ignoring");
continue;
}
Value<V> tmp=val.getValue();
if(tmp == null) {
if(log.isWarnEnabled())
log.warn(key + " has no value associated; ignoring");
continue;
}
V real_value=tmp.getVal();
short repl_count=tmp.getReplicationCount();
if(repl_count > 1) {
_remove(key);
mcastPut(key, real_value, repl_count, val.getTimeout(), false);
}
}
}
private void mcastPut(K key, V val, short repl_count, long caching_time, boolean synchronous) {
try {
int mode=synchronous? GroupRequest.GET_ALL : GroupRequest.GET_NONE;
disp.callRemoteMethods(null, new MethodCall(PUT, new Object[]{key, val, repl_count, caching_time}), mode, call_timeout);
}
catch(Throwable t) {
if(log.isWarnEnabled())
log.warn("put() failed", t);
}
}
private void mcastClear(Set<K> keys, boolean synchronous) {
try {
int mode=synchronous? GroupRequest.GET_ALL : GroupRequest.GET_NONE;
disp.callRemoteMethods(null, new MethodCall(REMOVE_MANY, new Object[]{keys}), mode, call_timeout);
}
catch(Throwable t) {
if(log.isWarnEnabled())
log.warn("clear() failed", t);
}
}
private void move(Address dest, K key, V val, short repl_count, long caching_time, boolean synchronous) {
try {
int mode=synchronous? GroupRequest.GET_ALL : GroupRequest.GET_NONE;
disp.callRemoteMethod(dest, new MethodCall(PUT_FORCE, new Object[]{key, val, repl_count, caching_time, true}),
mode, call_timeout);
}
catch(Throwable t) {
if(log.isWarnEnabled())
log.warn("move() failed", t);
}
}
public static interface ChangeListener {
void changed();
}
public static class ConsistentHashFunction<K> implements HashFunction<K> {
private SortedMap<Short,Address> nodes=new TreeMap<Short,Address>();
private final static int HASH_SPACE=2000; // must be > max number of nodes in a cluster
private final static int FACTOR=3737; // to better spread the node out across the space
public List<Address> hash(K key, short replication_count) {
int hash=Math.abs(key.hashCode());
int index=hash % HASH_SPACE;
Set<Address> results=new LinkedHashSet<Address>();
List<Address> retval=new ArrayList<Address>();
SortedMap<Short, Address> tailmap=nodes.tailMap((short)index);
int count=0;
for(Map.Entry<Short,Address> entry: tailmap.entrySet()) {
Address val=entry.getValue();
results.add(val);
if(++count >= replication_count)
break;
}
if(count < replication_count) {
for(Map.Entry<Short,Address> entry: nodes.entrySet()) {
Address val=entry.getValue();
results.add(val);
if(++count >= replication_count)
break;
}
}
retval.addAll(results);
return retval;
}
public void installNodes(List<Address> new_nodes) {
nodes.clear();
for(Address node: new_nodes) {
int hash=Math.abs(node.hashCode() * FACTOR) % HASH_SPACE;
for(int i=hash; i < hash + HASH_SPACE; i++) {
short new_index=(short)(i % HASH_SPACE);
if(!nodes.containsKey(new_index)) {
nodes.put(new_index, node);
break;
}
}
}
if(log.isTraceEnabled()) {
StringBuilder sb=new StringBuilder("node mappings:\n");
for(Map.Entry<Short,Address> entry: nodes.entrySet()) {
sb.append(entry.getKey() + ": " + entry.getValue()).append("\n");
}
log.trace(sb);
}
}
}
/**
* Uses arrays to store hash values of addresses, plus addresses.
*/
/* public static class ArrayBasedConsistentHashFunction<K> extends MembershipListenerAdapter implements HashFunction<K> {
Object[] nodes=null;
private final static int HASH_SPACE=2000; // must be > max number of nodes in a cluster
public Address hash(K key, List<Address> members) {
int hash=Math.abs(key.hashCode());
int index=hash % HASH_SPACE;
if(members != null && !members.isEmpty()) {
Object[] tmp=new Object[nodes.length];
System.arraycopy(nodes, 0, tmp, 0, nodes.length);
for(int i=0; i < tmp.length; i+=2) {
if(!members.contains(tmp[i+1])) {
tmp[i]=tmp[i+1]=null;
}
}
return findFirst(tmp, index);
}
return findFirst(nodes, index);
}
public void viewAccepted(View new_view) {
nodes=new Object[new_view.size() * 2];
int index=0;
for(Address node: new_view.getMembers()) {
int hash=Math.abs(node.hashCode()) % HASH_SPACE;
nodes[index++]=hash;
nodes[index++]=node;
}
if(log.isTraceEnabled()) {
StringBuilder sb=new StringBuilder("node mappings:\n");
for(int i=0; i < nodes.length; i+=2) {
sb.append(nodes[i] + ": " + nodes[i+1]).append("\n");
}
log.trace(sb);
}
}
public void suspect(Address suspected_mbr) {
}
public void block() {
}
private static Address findFirst(Object[] array, int index) {
Address retval=null;
if(array == null)
return null;
for(int i=0; i < array.length; i+=2) {
if(array[i] == null)
continue;
if(array[i+1] != null)
retval=(Address)array[i+1];
if(((Integer)array[i]) >= index)
return (Address)array[i+1];
}
return retval;
}
}*/
public static class Value<V> implements Serializable {
private final V val;
private final short replication_count;
private static final long serialVersionUID=-2892941069742740027L;
public Value(V val, short replication_count) {
this.val=val;
this.replication_count=replication_count;
}
public V getVal() {
return val;
}
public short getReplicationCount() {
return replication_count;
}
public String toString() {
return val + " (" + replication_count + ")";
}
}
private static class CustomMarshaller implements RpcDispatcher.Marshaller {
static final byte NULL = 0;
static final byte OBJ = 1;
static final byte METHOD_CALL = 2;
static final byte VALUE = 3;
public byte[] objectToByteBuffer(Object obj) throws Exception {
ByteArrayOutputStream out_stream=new ByteArrayOutputStream(35);
DataOutputStream out=new DataOutputStream(out_stream);
try {
if(obj == null) {
out_stream.write(NULL);
out_stream.flush();
return out_stream.toByteArray();
}
if(obj instanceof MethodCall) {
out.writeByte(METHOD_CALL);
MethodCall call=(MethodCall)obj;
out.writeShort(call.getId());
Object[] args=call.getArgs();
if(args == null || args.length == 0) {
out.writeShort(0);
}
else {
out.writeShort(args.length);
for(int i=0; i < args.length; i++) {
Util.objectToStream(args[i], out);
}
}
}
else if(obj instanceof Cache.Value) {
Cache.Value value=(Cache.Value)obj;
out.writeByte(VALUE);
out.writeLong(value.getTimeout());
Util.objectToStream(value.getValue(), out);
}
else {
out.writeByte(OBJ);
Util.objectToStream(obj, out);
}
out.flush();
return out_stream.toByteArray();
}
finally {
Util.close(out);
}
}
public Object objectFromByteBuffer(byte[] buf) throws Exception {
if(buf == null)
return null;
DataInputStream in=new DataInputStream(new ByteArrayInputStream(buf));
byte type=in.readByte();
if(type == NULL)
return null;
if(type == METHOD_CALL) {
short id=in.readShort();
short length=in.readShort();
Object[] args=length > 0? new Object[length] : null;
if(args != null) {
for(int i=0; i < args.length; i++)
args[i]=Util.objectFromStream(in);
}
return new MethodCall(id, args);
}
else if(type == VALUE) {
long expiration_time=in.readLong();
Object obj=Util.objectFromStream(in);
return new Cache.Value(obj, expiration_time);
}
else
return Util.objectFromStream(in);
}
}
}
| |
/*
* Copyright (C) 2015 Giuseppe Cardone <ippatsuman@gmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.gcardone.junidecode;
/**
* Character map for Unicode characters with codepoint U+26xx.
* @author Giuseppe Cardone
* @version 0.1
*/
class X26 {
public static final String[] map = new String[]{
"", // 0x00
"", // 0x01
"", // 0x02
"", // 0x03
"", // 0x04
"", // 0x05
"", // 0x06
"", // 0x07
"", // 0x08
"", // 0x09
"", // 0x0a
"", // 0x0b
"", // 0x0c
"", // 0x0d
"", // 0x0e
"", // 0x0f
"", // 0x10
"", // 0x11
"", // 0x12
"", // 0x13
"[?]", // 0x14
"[?]", // 0x15
"[?]", // 0x16
"[?]", // 0x17
"[?]", // 0x18
"", // 0x19
"", // 0x1a
"", // 0x1b
"", // 0x1c
"", // 0x1d
"", // 0x1e
"", // 0x1f
"", // 0x20
"", // 0x21
"", // 0x22
"", // 0x23
"", // 0x24
"", // 0x25
"", // 0x26
"", // 0x27
"", // 0x28
"", // 0x29
"", // 0x2a
"", // 0x2b
"", // 0x2c
"", // 0x2d
"", // 0x2e
"", // 0x2f
"", // 0x30
"", // 0x31
"", // 0x32
"", // 0x33
"", // 0x34
"", // 0x35
"", // 0x36
"", // 0x37
"", // 0x38
"", // 0x39
"", // 0x3a
"", // 0x3b
"", // 0x3c
"", // 0x3d
"", // 0x3e
"", // 0x3f
"", // 0x40
"", // 0x41
"", // 0x42
"", // 0x43
"", // 0x44
"", // 0x45
"", // 0x46
"", // 0x47
"", // 0x48
"", // 0x49
"", // 0x4a
"", // 0x4b
"", // 0x4c
"", // 0x4d
"", // 0x4e
"", // 0x4f
"", // 0x50
"", // 0x51
"", // 0x52
"", // 0x53
"", // 0x54
"", // 0x55
"", // 0x56
"", // 0x57
"", // 0x58
"", // 0x59
"", // 0x5a
"", // 0x5b
"", // 0x5c
"", // 0x5d
"", // 0x5e
"", // 0x5f
"", // 0x60
"", // 0x61
"", // 0x62
"", // 0x63
"", // 0x64
"", // 0x65
"", // 0x66
"", // 0x67
"", // 0x68
"", // 0x69
"", // 0x6a
"", // 0x6b
"", // 0x6c
"", // 0x6d
"", // 0x6e
"", // 0x6f
"", // 0x70
"", // 0x71
"[?]", // 0x72
"[?]", // 0x73
"[?]", // 0x74
"[?]", // 0x75
"[?]", // 0x76
"[?]", // 0x77
"[?]", // 0x78
"[?]", // 0x79
"[?]", // 0x7a
"[?]", // 0x7b
"[?]", // 0x7c
"[?]", // 0x7d
"[?]", // 0x7e
"[?]", // 0x7f
"[?]", // 0x80
"[?]", // 0x81
"[?]", // 0x82
"[?]", // 0x83
"[?]", // 0x84
"[?]", // 0x85
"[?]", // 0x86
"[?]", // 0x87
"[?]", // 0x88
"[?]", // 0x89
"[?]", // 0x8a
"[?]", // 0x8b
"[?]", // 0x8c
"[?]", // 0x8d
"[?]", // 0x8e
"[?]", // 0x8f
"[?]", // 0x90
"[?]", // 0x91
"[?]", // 0x92
"[?]", // 0x93
"[?]", // 0x94
"[?]", // 0x95
"[?]", // 0x96
"[?]", // 0x97
"[?]", // 0x98
"[?]", // 0x99
"[?]", // 0x9a
"[?]", // 0x9b
"[?]", // 0x9c
"[?]", // 0x9d
"[?]", // 0x9e
"[?]", // 0x9f
"[?]", // 0xa0
"[?]", // 0xa1
"[?]", // 0xa2
"[?]", // 0xa3
"[?]", // 0xa4
"[?]", // 0xa5
"[?]", // 0xa6
"[?]", // 0xa7
"[?]", // 0xa8
"[?]", // 0xa9
"[?]", // 0xaa
"[?]", // 0xab
"[?]", // 0xac
"[?]", // 0xad
"[?]", // 0xae
"[?]", // 0xaf
"[?]", // 0xb0
"[?]", // 0xb1
"[?]", // 0xb2
"[?]", // 0xb3
"[?]", // 0xb4
"[?]", // 0xb5
"[?]", // 0xb6
"[?]", // 0xb7
"[?]", // 0xb8
"[?]", // 0xb9
"[?]", // 0xba
"[?]", // 0xbb
"[?]", // 0xbc
"[?]", // 0xbd
"[?]", // 0xbe
"[?]", // 0xbf
"[?]", // 0xc0
"[?]", // 0xc1
"[?]", // 0xc2
"[?]", // 0xc3
"[?]", // 0xc4
"[?]", // 0xc5
"[?]", // 0xc6
"[?]", // 0xc7
"[?]", // 0xc8
"[?]", // 0xc9
"[?]", // 0xca
"[?]", // 0xcb
"[?]", // 0xcc
"[?]", // 0xcd
"[?]", // 0xce
"[?]", // 0xcf
"[?]", // 0xd0
"[?]", // 0xd1
"[?]", // 0xd2
"[?]", // 0xd3
"[?]", // 0xd4
"[?]", // 0xd5
"[?]", // 0xd6
"[?]", // 0xd7
"[?]", // 0xd8
"[?]", // 0xd9
"[?]", // 0xda
"[?]", // 0xdb
"[?]", // 0xdc
"[?]", // 0xdd
"[?]", // 0xde
"[?]", // 0xdf
"[?]", // 0xe0
"[?]", // 0xe1
"[?]", // 0xe2
"[?]", // 0xe3
"[?]", // 0xe4
"[?]", // 0xe5
"[?]", // 0xe6
"[?]", // 0xe7
"[?]", // 0xe8
"[?]", // 0xe9
"[?]", // 0xea
"[?]", // 0xeb
"[?]", // 0xec
"[?]", // 0xed
"[?]", // 0xee
"[?]", // 0xef
"[?]", // 0xf0
"[?]", // 0xf1
"[?]", // 0xf2
"[?]", // 0xf3
"[?]", // 0xf4
"[?]", // 0xf5
"[?]", // 0xf6
"[?]", // 0xf7
"[?]", // 0xf8
"[?]", // 0xf9
"[?]", // 0xfa
"[?]", // 0xfb
"[?]", // 0xfc
"[?]", // 0xfd
"[?]", // 0xfe
"[?]" // 0xff
};
}
| |
package com.autodesk.forge.forgeviewer_android_sample;
import android.*;
import android.os.Bundle;
import android.support.design.widget.FloatingActionButton;
import android.support.design.widget.Snackbar;
import android.support.v7.app.AppCompatActivity;
import android.support.v7.widget.Toolbar;
import android.view.View;
import android.view.Menu;
import android.view.MenuItem;
import android.app.ProgressDialog;
import android.os.Environment;
import android.app.Dialog;
import android.app.AlertDialog;
import android.app.AlertDialog.Builder;
import android.content.DialogInterface;
import android.content.pm.*;
import android.support.v4.content.*;
import android.support.v4.app.ActivityCompat;
import android.content.Intent;
import android.net.Uri;
import android.webkit.WebView;
import android.webkit.WebViewClient;
import com.autodesk.client.ApiException;
import com.autodesk.client.ApiResponse;
import com.autodesk.client.api.BucketsApi;
import com.autodesk.client.api.DerivativesApi;
import com.autodesk.client.api.ObjectsApi;
import com.autodesk.client.auth.Credentials;
import com.autodesk.client.auth.OAuth2TwoLegged;
import com.autodesk.client.model.*;
import com.autodesk.client.model.Manifest;
import android.widget.Button;
import android.widget.TextView;
import android.widget.Toast;
//import javax.ws.rs.core.UriBuilder;
//import java.awt.*;
import java.io.*;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
public class MainActivity extends AppCompatActivity {
private Button btn_get_token;
private Button btn_create_bucket;
private Button btn_browser_model;
private Button btn_upload_model;
private Button btn_post_job;
private Button btn_show_thumbnail;
private Button btn_display_model;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
btn_get_token = (Button)findViewById(R.id.btnGetToken);
btn_get_token.setOnClickListener(new View.OnClickListener() {
public void onClick(View v) {
try {
ProgressDialog progress = new ProgressDialog(MainActivity.this);
AsyncGetToken task_gettoken = new AsyncGetToken(progress,MainActivity.this);
task_gettoken.execute();
}
catch(Exception ex){
Toast.makeText(
getApplicationContext(),
ex.toString(),
Toast.LENGTH_LONG).show();
}
}
});
btn_create_bucket = (Button)findViewById(R.id.btnCreateBucket);
btn_create_bucket.setOnClickListener(new View.OnClickListener() {
public void onClick(View v) {
try {
ProgressDialog progress = new ProgressDialog(MainActivity.this);
AsyncCreateBucket task_createtoken = new AsyncCreateBucket(progress,MainActivity.this);
task_createtoken.execute();
}
catch(Exception ex){
Toast.makeText(
getApplicationContext(),
ex.toString(),
Toast.LENGTH_LONG).show();
}
}
});
btn_browser_model = (Button)findViewById(R.id.btnBrowserModel);
btn_browser_model.setOnClickListener(new View.OnClickListener() {
public void onClick(View v) {
loadFileList();
myFileDialog(DIALOG_LOAD_FILE).show();
}
});
btn_upload_model = (Button)findViewById(R.id.btnUploadModel);
btn_upload_model.setOnClickListener(new View.OnClickListener() {
public void onClick(View v) {
if (mChosenFile==null || mChosenFile=="")
return;
ProgressDialog progress = new ProgressDialog(MainActivity.this);
AsyncUpload task_upload = new AsyncUpload(progress,MainActivity.this);
task_upload.execute();
}
});
btn_post_job = (Button)findViewById(R.id.btnPostJob);
btn_post_job.setOnClickListener(new View.OnClickListener() {
public void onClick(View v) {
ProgressDialog progress = new ProgressDialog(MainActivity.this);
AsyncPostJob task_post_job = new AsyncPostJob(progress,MainActivity.this);
task_post_job.execute();
}
});
btn_show_thumbnail = (Button)findViewById(R.id.btnShowthumbnail);
btn_show_thumbnail.setOnClickListener(new View.OnClickListener() {
public void onClick(View v) {
ProgressDialog progress = new ProgressDialog(MainActivity.this);
AsyncThumbnail task_thumbnail = new AsyncThumbnail(progress,MainActivity.this);
task_thumbnail.execute();
}
});
btn_display_model = (Button)findViewById(R.id.btndisplaymodel);
btn_display_model.setOnClickListener(new View.OnClickListener() {
public void onClick(View v) {
String viewUrl = "https://models.autodesk.io/view.html?";
TextView txtViewToken = (TextView)findViewById(R.id.textViewToken);
TextView txtViewUrn = (TextView)findViewById(R.id.textViewUrn);
viewUrl = viewUrl + "token=" + txtViewToken.getText().toString();
viewUrl = viewUrl + "&urn=" + txtViewUrn.getText().toString();
//start the browser activity
Intent viewModelIntent = new
Intent("android.intent.action.VIEW",Uri.parse(viewUrl));
startActivity(viewModelIntent);
}
});
}
private String[] mFileList;
private String mChosenFile;
private static final int DIALOG_LOAD_FILE = 1000;
private void loadFileList() {
//String xx = Environment.getExternalStorageDirectory() + "/";
File mPath = new File(Environment.getExternalStorageDirectory() + "/DCIM/" );//+
//getApplicationContext().getString(R.string.app_name));
try {
mPath.mkdirs();
} catch (SecurityException e) {
//Log.e(TAG, "unable to write on the sd card " + e.toString());
}
if (mPath.exists()) {
FilenameFilter filter = new FilenameFilter() {
@Override
public boolean accept(File dir, String filename) {
//add your filter if needed
File sel = new File(dir, filename);
return true;
}
};
mFileList = mPath.list(filter);
} else {
mFileList= new String[0];
}
}
protected Dialog myFileDialog(int id) {
Dialog dialog = null;
AlertDialog.Builder builder = new Builder(this);
switch (id) {
case DIALOG_LOAD_FILE:
builder.setTitle("Choose your file");
if (mFileList == null) {
dialog = builder.create();
return dialog;
}
builder.setItems(mFileList, new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
mChosenFile = mFileList[which];
TextView modelName = (TextView)findViewById(R.id.textViewModelName);
modelName.setText(mChosenFile);
}
});
break;
}
dialog = builder.show();
return dialog;
}
// In Android 6.0 Marshmallow, application will not be granted any permission at installation time.
// Instead, application has to ask user for a permission one-by-one at runtime.
//https://inthecheesefactory.com/blog/things-you-need-to-know-about-android-m-permission-developer-edition/en
// p: the specific permission
// e.g. android.Manifest.permission.WRITE_EXTERNAL_STORAGE
//conserve in case of use.
private void grantPermission(String p)
{
int REQUEST_CODE_ASK_PERMISSIONS = 124;
int hasWriteContactsPermission = ContextCompat.checkSelfPermission(
getApplicationContext(),
p);
//grant this specific permission
if (hasWriteContactsPermission != PackageManager.PERMISSION_GRANTED) {
ActivityCompat.requestPermissions(this,
new String[] {p},
REQUEST_CODE_ASK_PERMISSIONS);
return;
}
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
// Inflate the menu; this adds items to the action bar if it is present.
getMenuInflater().inflate(R.menu.menu_main, menu);
return true;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
// Handle action bar item clicks here. The action bar will
// automatically handle clicks on the Home/Up button, so long
// as you specify a parent activity in AndroidManifest.xml.
int id = item.getItemId();
//noinspection SimplifiableIfStatement
if (id == R.id.action_settings) {
return true;
}
return super.onOptionsItemSelected(item);
}
}
| |
/**
* Copyright (C) 2012 - 2014 Xeiam LLC http://xeiam.com
*
* Permission is hereby granted, free of charge, to any person obtaining a copy of
* this software and associated documentation files (the "Software"), to deal in
* the Software without restriction, including without limitation the rights to
* use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
* of the Software, and to permit persons to whom the Software is furnished to do
* so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package com.xeiam.xchange.coinfloor;
import java.io.DataOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.math.BigDecimal;
import java.math.BigInteger;
import java.nio.charset.Charset;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import java.util.Random;
import org.bouncycastle.crypto.digests.SHA224Digest;
import org.bouncycastle.crypto.params.ECDomainParameters;
import org.bouncycastle.crypto.params.ECPrivateKeyParameters;
import org.bouncycastle.crypto.signers.ECDSASigner;
import org.bouncycastle.jce.ECNamedCurveTable;
import org.bouncycastle.jce.spec.ECNamedCurveParameterSpec;
import org.bouncycastle.util.encoders.Base64;
import com.xeiam.xchange.ExchangeException;
/**
* @author obsessiveOrange
*/
public class CoinfloorUtils {
public enum CoinfloorCurrency {
BTC, GBP
}
private static final ECNamedCurveParameterSpec spec = ECNamedCurveTable.getParameterSpec("secp224k1");
private static final ECDomainParameters secp224k1 = new ECDomainParameters(spec.getCurve(), spec.getG(), spec.getN(), spec.getH());
public static byte[] buildClientNonce() {
byte[] clientNonce = new byte[16];
new Random().nextBytes(clientNonce);
return clientNonce;
}
public static List<String> buildSignature(long userID, String cookie, String passphrase, String serverNonce, byte[] clientNonce) {
try {
final SHA224Digest sha = new SHA224Digest();
DataOutputStream dos = new DataOutputStream(new OutputStream() {
@Override
public void write(int b) {
sha.update((byte) b);
}
@Override
public void write(byte[] buf, int off, int len) {
sha.update(buf, off, len);
}
});
dos.writeLong(userID);
dos.write(passphrase.getBytes(Charset.forName("UTF-8")));
dos.flush();
byte[] digest = new byte[28];
sha.doFinal(digest, 0);
ECDSASigner signer = new ECDSASigner();
signer.init(true, new ECPrivateKeyParameters(new BigInteger(1, digest), secp224k1));
dos.writeLong(userID);
dos.write(Base64.decode(serverNonce));
dos.write(clientNonce);
dos.flush();
dos.close();
sha.doFinal(digest, 0);
BigInteger[] signature = signer.generateSignature(digest);
return Arrays.asList(bigIntegerToBase64(signature[0]), bigIntegerToBase64(signature[1]));
} catch (IOException e) {
throw new ExchangeException("Could not build signature for authentication");
}
}
protected static String buildNonceString() {
long currentTime = System.currentTimeMillis();
return "::" + currentTime + ":";
}
private static String bigIntegerToBase64(BigInteger bi) {
byte[] bytes = bi.toByteArray();
return bytes[0] == 0 ? org.bouncycastle.util.encoders.Base64.toBase64String(bytes, 1, bytes.length - 1) : org.bouncycastle.util.encoders.Base64.toBase64String(bytes);
}
public static void checkSuccess(Map<String, Object> payload) {
if (payload.containsKey("error_code")) {
if (!(payload.get("error_code") instanceof Integer) || (Integer) payload.get("error_code") != 0) {
throw new ExchangeException("Server returned error " + payload.get("error_code") + ": " + payload.get("error_msg"));
}
}
}
public static CoinfloorCurrency currencyOf(String currency) {
if (currency.equals("BTC")) {
return CoinfloorCurrency.BTC;
}
else if (currency.equals("GBP")) {
return CoinfloorCurrency.GBP;
}
throw new ExchangeException("Currency " + currency + " not supported by coinfloor!");
}
public static CoinfloorCurrency getCurrency(int currencyCode) {
switch (currencyCode) {
case 0:
return null;
case 63488:
return CoinfloorCurrency.BTC;
case 64032:
return CoinfloorCurrency.GBP;
}
throw new ExchangeException("Currency Code " + currencyCode + " not supported by coinfloor!");
}
public static int toCurrencyCode(String currency) {
return toCurrencyCode(CoinfloorUtils.currencyOf(currency));
}
public static int toCurrencyCode(CoinfloorCurrency currency) {
switch (currency) {
case BTC:
return 63488;
case GBP:
return 64032;
}
throw new ExchangeException("Currency " + currency + " not supported by coinfloor!");
}
private static int getCurrencyScale(CoinfloorCurrency currency) {
switch (currency) {
case BTC:
return 4;
case GBP:
return 2;
}
throw new ExchangeException("Currency " + currency + " not supported by coinfloor!");
}
public static BigDecimal scaleToBigDecimal(String currency, Integer amountToScale) {
return scaleToBigDecimal(CoinfloorUtils.currencyOf(currency), amountToScale);
}
public static BigDecimal scaleToBigDecimal(CoinfloorCurrency currency, Integer amountToScale) {
return BigDecimal.valueOf(amountToScale, getCurrencyScale(currency));
}
public static int scaleToInt(String currency, BigDecimal amountToScale) {
return scaleToInt(CoinfloorUtils.currencyOf(currency), amountToScale);
}
public static int scaleToInt(CoinfloorCurrency currency, BigDecimal amountToScale) {
return amountToScale.movePointRight(getCurrencyScale(currency)).intValue();
}
/**
* Scale integer price results from API call to BigDecimal for local use.
*
* @param amountToScale The integer result recieved from API Call
* @return BigDecimal representation of integer amount
*/
public static BigDecimal scalePriceToBigDecimal(String currency, String counterCurrency, Integer amountToScale) {
return scalePriceToBigDecimal(CoinfloorUtils.currencyOf(currency), CoinfloorUtils.currencyOf(counterCurrency), amountToScale);
}
public static BigDecimal scalePriceToBigDecimal(CoinfloorCurrency baseCurrency, CoinfloorCurrency counterCurrency, Integer amountToScale) {
return BigDecimal.valueOf(amountToScale, getCurrencyScale(counterCurrency) - getCurrencyScale(baseCurrency) + 4);
}
/**
* Scale integer price results from API call to BigDecimal for local use.
*
* @param amountToScale The integer result recieved from API Call
* @return BigDecimal representation of integer amount
*/
public static int scalePriceToInt(String baseCurrency, String counterCurrency, BigDecimal amountToScale) {
return scalePriceToInt(CoinfloorUtils.currencyOf(baseCurrency), CoinfloorUtils.currencyOf(counterCurrency), amountToScale);
}
public static int scalePriceToInt(CoinfloorCurrency baseCurrency, CoinfloorCurrency counterCurrency, BigDecimal amountToScale) {
return amountToScale.movePointRight(getCurrencyScale(counterCurrency) - getCurrencyScale(baseCurrency) + 4).intValue();
}
}
| |
/*
* This file is generated by jOOQ.
*/
package com.rpkit.payments.bukkit.database.jooq.rpkit.tables.records;
import com.rpkit.payments.bukkit.database.jooq.rpkit.tables.RpkitPaymentGroupInvite;
import javax.annotation.Generated;
import org.jooq.Field;
import org.jooq.Record1;
import org.jooq.Record3;
import org.jooq.Row3;
import org.jooq.impl.UpdatableRecordImpl;
/**
* This class is generated by jOOQ.
*/
@Generated(
value = {
"http://www.jooq.org",
"jOOQ version:3.10.2"
},
comments = "This class is generated by jOOQ"
)
@SuppressWarnings({ "all", "unchecked", "rawtypes" })
public class RpkitPaymentGroupInviteRecord extends UpdatableRecordImpl<RpkitPaymentGroupInviteRecord> implements Record3<Integer, Integer, Integer> {
private static final long serialVersionUID = -483578916;
/**
* Setter for <code>rpkit.rpkit_payment_group_invite.id</code>.
*/
public void setId(Integer value) {
set(0, value);
}
/**
* Getter for <code>rpkit.rpkit_payment_group_invite.id</code>.
*/
public Integer getId() {
return (Integer) get(0);
}
/**
* Setter for <code>rpkit.rpkit_payment_group_invite.payment_group_id</code>.
*/
public void setPaymentGroupId(Integer value) {
set(1, value);
}
/**
* Getter for <code>rpkit.rpkit_payment_group_invite.payment_group_id</code>.
*/
public Integer getPaymentGroupId() {
return (Integer) get(1);
}
/**
* Setter for <code>rpkit.rpkit_payment_group_invite.character_id</code>.
*/
public void setCharacterId(Integer value) {
set(2, value);
}
/**
* Getter for <code>rpkit.rpkit_payment_group_invite.character_id</code>.
*/
public Integer getCharacterId() {
return (Integer) get(2);
}
// -------------------------------------------------------------------------
// Primary key information
// -------------------------------------------------------------------------
/**
* {@inheritDoc}
*/
@Override
public Record1<Integer> key() {
return (Record1) super.key();
}
// -------------------------------------------------------------------------
// Record3 type implementation
// -------------------------------------------------------------------------
/**
* {@inheritDoc}
*/
@Override
public Row3<Integer, Integer, Integer> fieldsRow() {
return (Row3) super.fieldsRow();
}
/**
* {@inheritDoc}
*/
@Override
public Row3<Integer, Integer, Integer> valuesRow() {
return (Row3) super.valuesRow();
}
/**
* {@inheritDoc}
*/
@Override
public Field<Integer> field1() {
return RpkitPaymentGroupInvite.RPKIT_PAYMENT_GROUP_INVITE.ID;
}
/**
* {@inheritDoc}
*/
@Override
public Field<Integer> field2() {
return RpkitPaymentGroupInvite.RPKIT_PAYMENT_GROUP_INVITE.PAYMENT_GROUP_ID;
}
/**
* {@inheritDoc}
*/
@Override
public Field<Integer> field3() {
return RpkitPaymentGroupInvite.RPKIT_PAYMENT_GROUP_INVITE.CHARACTER_ID;
}
/**
* {@inheritDoc}
*/
@Override
public Integer component1() {
return getId();
}
/**
* {@inheritDoc}
*/
@Override
public Integer component2() {
return getPaymentGroupId();
}
/**
* {@inheritDoc}
*/
@Override
public Integer component3() {
return getCharacterId();
}
/**
* {@inheritDoc}
*/
@Override
public Integer value1() {
return getId();
}
/**
* {@inheritDoc}
*/
@Override
public Integer value2() {
return getPaymentGroupId();
}
/**
* {@inheritDoc}
*/
@Override
public Integer value3() {
return getCharacterId();
}
/**
* {@inheritDoc}
*/
@Override
public RpkitPaymentGroupInviteRecord value1(Integer value) {
setId(value);
return this;
}
/**
* {@inheritDoc}
*/
@Override
public RpkitPaymentGroupInviteRecord value2(Integer value) {
setPaymentGroupId(value);
return this;
}
/**
* {@inheritDoc}
*/
@Override
public RpkitPaymentGroupInviteRecord value3(Integer value) {
setCharacterId(value);
return this;
}
/**
* {@inheritDoc}
*/
@Override
public RpkitPaymentGroupInviteRecord values(Integer value1, Integer value2, Integer value3) {
value1(value1);
value2(value2);
value3(value3);
return this;
}
// -------------------------------------------------------------------------
// Constructors
// -------------------------------------------------------------------------
/**
* Create a detached RpkitPaymentGroupInviteRecord
*/
public RpkitPaymentGroupInviteRecord() {
super(RpkitPaymentGroupInvite.RPKIT_PAYMENT_GROUP_INVITE);
}
/**
* Create a detached, initialised RpkitPaymentGroupInviteRecord
*/
public RpkitPaymentGroupInviteRecord(Integer id, Integer paymentGroupId, Integer characterId) {
super(RpkitPaymentGroupInvite.RPKIT_PAYMENT_GROUP_INVITE);
set(0, id);
set(1, paymentGroupId);
set(2, characterId);
}
}
| |
package com.asha.md360player4android;
import android.content.ContentResolver;
import android.content.Context;
import android.content.Intent;
import android.content.res.Resources;
import android.graphics.Bitmap;
import android.graphics.drawable.Drawable;
import android.net.Uri;
import android.os.Bundle;
import android.support.annotation.DrawableRes;
import android.support.v7.app.AppCompatActivity;
import android.support.v7.widget.LinearLayoutManager;
import android.support.v7.widget.RecyclerView;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.TextView;
import com.asha.vrlib.MDVRLibrary;
import com.asha.vrlib.model.MDHitEvent;
import com.asha.vrlib.texture.MD360BitmapTexture;
import com.google.android.apps.muzei.render.GLTextureView;
import com.squareup.picasso.Picasso;
import com.squareup.picasso.Target;
import java.util.ArrayList;
import java.util.List;
public class RecyclerViewActivity extends AppCompatActivity {
private static final String TAG = "MainActivity";
private Uri[] sMockData;
private VRLibManager manager;
public static void start(Context context) {
Intent i = new Intent(context, RecyclerViewActivity.class);
context.startActivity(i);
}
public RecyclerViewActivity() {}
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
sMockData = new Uri[] {
getDrawableUri(R.drawable.bitmap360)
,getDrawableUri(R.drawable.texture)
};
setContentView(R.layout.activity_main);
manager = new VRLibManager(this);
RecyclerView recyclerView = (RecyclerView) findViewById(R.id.recycler_view);
final FeedAdapter adapter = new FeedAdapter();
recyclerView.setLayoutManager(new LinearLayoutManager(this));
recyclerView.setItemAnimator(null);
recyclerView.setAdapter(adapter);
}
@Override
protected void onResume() {
super.onResume();
manager.fireResumed();
}
@Override
protected void onPause() {
super.onPause();
manager.firePaused();
}
@Override
protected void onDestroy() {
super.onDestroy();
manager.fireDestroy();
}
private static class FeedModel {
private final Uri uri;
private final int type;
public FeedModel(int type, Uri uri) {
this.type = type;
this.uri = uri;
}
}
private abstract class FeedVH extends RecyclerView.ViewHolder {
public FeedVH(ViewGroup vp, int layoutId) {
super(create(vp, layoutId));
}
public abstract void bind(FeedModel feedModel);
}
private class FeedTextVH extends FeedVH {
public FeedTextVH(ViewGroup vp) {
super(vp, R.layout.feed_text_layout);
}
@Override
public void bind(FeedModel feedModel) {
}
}
private class FeedVRVH extends FeedVH implements MDVRLibrary.IBitmapProvider {
private TextView text;
private GLTextureView glTextureView;
private ViewGroup parent;
private MDVRLibrary vrlib;
private FeedModel model;
private long ts;
public FeedVRVH(ViewGroup vp) {
super(vp, R.layout.feed_panorama_layout);
text = (TextView) itemView.findViewById(R.id.feed_text);
glTextureView = (GLTextureView) itemView.findViewById(R.id.feed_texture_view);
parent = (ViewGroup) glTextureView.getParent();
}
@Override
public void bind(FeedModel model) {
this.model = model;
ensureVRLib();
vrlib.notifyPlayerChanged();
}
private void ensureVRLib() {
if (vrlib == null) {
vrlib = manager.create(this, glTextureView);
vrlib.setEyePickChangedListener(new MDVRLibrary.IEyePickListener2() {
@Override
public void onHotspotHit(MDHitEvent hitEvent) {
long delta = System.currentTimeMillis() - ts;
if (delta < 500) {
return;
}
String brief = vrlib.getDirectorBrief().toString();
text.setText(brief);
ts = System.currentTimeMillis();
}
});
}
}
@Override
public void onProvideBitmap(final MD360BitmapTexture.Callback callback) {
if (model == null) {
return;
}
Picasso.with(itemView.getContext()).load(model.uri).into(new Target() {
@Override
public void onBitmapLoaded(Bitmap bitmap, Picasso.LoadedFrom from) {
vrlib.onTextureResize(bitmap.getWidth(), bitmap.getHeight());
callback.texture(bitmap);
}
@Override
public void onBitmapFailed(Drawable errorDrawable) {
}
@Override
public void onPrepareLoad(Drawable placeHolderDrawable) {
}
});
}
}
private Uri getDrawableUri(@DrawableRes int resId){
Resources resources = getResources();
return Uri.parse(ContentResolver.SCHEME_ANDROID_RESOURCE + "://" + resources.getResourcePackageName(resId) + '/' + resources.getResourceTypeName(resId) + '/' + resources.getResourceEntryName(resId) );
}
private class FeedAdapter extends RecyclerView.Adapter<FeedVH> {
private List<FeedModel> feeds = new ArrayList<>();
public FeedAdapter() {
int i = 0;
while (i++ < 50) {
Uri uri = sMockData[(int) (Math.random() * sMockData.length)];
feeds.add(new FeedModel(Math.random() > 0.3 ? 0 : 1, uri));
}
}
@Override
public FeedVH onCreateViewHolder(ViewGroup parent, int viewType) {
if (viewType == 0) {
return new FeedVRVH(parent);
} else {
return new FeedTextVH(parent);
}
}
@Override
public void onBindViewHolder(FeedVH holder, int position) {
holder.bind(feeds.get(position));
}
@Override
public void onViewRecycled(FeedVH holder) {
super.onViewRecycled(holder);
}
@Override
public int getItemViewType(int position) {
return feeds.get(position).type;
}
@Override
public int getItemCount() {
return feeds.size();
}
}
private static View create(ViewGroup vp, int layout) {
return LayoutInflater.from(vp.getContext()).inflate(layout, vp, false);
}
}
| |
/*
* Copyright 2008-2009 The Kuali Foundation
*
* Licensed under the Educational Community License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.opensource.org/licenses/ecl2.php
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kuali.rice.kim.service.impl;
import org.apache.commons.lang.exception.ExceptionUtils;
import org.apache.log4j.Logger;
import org.kuali.rice.core.api.criteria.QueryByCriteria;
import org.kuali.rice.kim.api.common.assignee.Assignee;
import org.kuali.rice.kim.api.group.Group;
import org.kuali.rice.kim.api.group.GroupService;
import org.kuali.rice.kim.api.identity.IdentityService;
import org.kuali.rice.kim.api.identity.CodedAttribute;
import org.kuali.rice.kim.api.identity.affiliation.EntityAffiliationType;
import org.kuali.rice.kim.api.identity.entity.Entity;
import org.kuali.rice.kim.api.identity.entity.EntityDefault;
import org.kuali.rice.kim.api.identity.entity.EntityDefaultQueryResults;
import org.kuali.rice.kim.api.identity.entity.EntityQueryResults;
import org.kuali.rice.kim.api.identity.external.EntityExternalIdentifierType;
import org.kuali.rice.kim.api.identity.principal.Principal;
import org.kuali.rice.kim.api.permission.Permission;
import org.kuali.rice.kim.api.permission.PermissionService;
import org.kuali.rice.kim.api.responsibility.Responsibility;
import org.kuali.rice.kim.api.responsibility.ResponsibilityAction;
import org.kuali.rice.kim.api.responsibility.ResponsibilityService;
import org.kuali.rice.kim.api.services.IdentityManagementService;
import org.kuali.rice.kim.api.services.KimApiServiceLocator;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
public class IdentityManagementServiceImpl implements IdentityManagementService {
private static final Logger LOG = Logger.getLogger( IdentityManagementServiceImpl.class );
private PermissionService permissionService;
private ResponsibilityService responsibilityService;
private IdentityService identityService;
private GroupService groupService;
@Override
public void flushAllCaches() {
}
@Override
public void flushEntityPrincipalCaches() {
}
@Override
public void flushGroupCaches() {
}
@Override
public void flushPermissionCaches() {
}
@Override
public void flushResponsibilityCaches() {
// nothing currently being cached
}
// AUTHORIZATION SERVICE
@Override
public boolean hasPermission(String principalId, String namespaceCode, String permissionName, Map<String, String> permissionDetails) {
if ( LOG.isDebugEnabled() ) {
logHasPermissionCheck("Permission", principalId, namespaceCode, permissionName, permissionDetails);
}
boolean hasPerm = getPermissionService().hasPermission(principalId, namespaceCode, permissionName, permissionDetails);
if ( LOG.isDebugEnabled() ) {
LOG.debug( "Result: " + hasPerm );
}
return hasPerm;
}
@Override
public boolean isAuthorized(String principalId, String namespaceCode, String permissionName, Map<String, String> permissionDetails, Map<String, String> qualification ) {
if ( qualification == null || qualification.isEmpty() ) {
return hasPermission( principalId, namespaceCode, permissionName, permissionDetails );
}
if ( LOG.isDebugEnabled() ) {
logAuthorizationCheck("Permission", principalId, namespaceCode, permissionName, permissionDetails, qualification);
}
boolean isAuthorized = getPermissionService().isAuthorized(principalId, namespaceCode, permissionName, permissionDetails, qualification);
if ( LOG.isDebugEnabled() ) {
LOG.debug( "Result: " + isAuthorized );
}
return isAuthorized;
}
@Override
public boolean hasPermissionByTemplateName(String principalId, String namespaceCode, String permissionTemplateName, Map<String, String> permissionDetails) {
if ( LOG.isDebugEnabled() ) {
logHasPermissionCheck("Perm Templ", principalId, namespaceCode, permissionTemplateName, permissionDetails);
}
boolean hasPerm = getPermissionService().hasPermissionByTemplateName(principalId, namespaceCode, permissionTemplateName, permissionDetails);
if ( LOG.isDebugEnabled() ) {
LOG.debug( "Result: " + hasPerm );
}
return hasPerm;
}
@Override
public boolean isAuthorizedByTemplateName(String principalId, String namespaceCode, String permissionTemplateName, Map<String, String> permissionDetails, Map<String, String> qualification ) {
if ( qualification == null || qualification.isEmpty() ) {
return hasPermissionByTemplateName( principalId, namespaceCode, permissionTemplateName, new HashMap<String, String>(permissionDetails) );
}
if ( LOG.isDebugEnabled() ) {
logAuthorizationCheck("Perm Templ", principalId, namespaceCode, permissionTemplateName, new HashMap<String, String>(permissionDetails), new HashMap<String, String>(qualification));
}
boolean isAuthorized = getPermissionService().isAuthorizedByTemplateName( principalId, namespaceCode, permissionTemplateName, new HashMap<String, String>(permissionDetails), new HashMap<String, String>(qualification) );
if ( LOG.isDebugEnabled() ) {
LOG.debug( "Result: " + isAuthorized );
}
return isAuthorized;
}
@Override
public List<Permission> getAuthorizedPermissions(String principalId,
String namespaceCode, String permissionName, Map<String, String> permissionDetails, Map<String, String> qualification) {
return getPermissionService().getAuthorizedPermissions(principalId, namespaceCode, permissionName, permissionDetails, qualification);
}
@Override
public List<Permission> getAuthorizedPermissionsByTemplateName(String principalId,
String namespaceCode, String permissionTemplateName, Map<String, String> permissionDetails, Map<String, String> qualification) {
return getPermissionService().getAuthorizedPermissionsByTemplateName(principalId, namespaceCode, permissionTemplateName, permissionDetails, qualification);
}
@Override
public boolean isPermissionDefinedForTemplateName(String namespaceCode, String permissionTemplateName, Map<String, String> permissionDetails) {
return getPermissionService().isPermissionDefinedByTemplateName(namespaceCode, permissionTemplateName,
permissionDetails);
}
@Override
public List<Assignee> getPermissionAssignees(String namespaceCode,
String permissionName, Map<String, String> permissionDetails, Map<String, String> qualification) {
return this.permissionService.getPermissionAssignees(namespaceCode, permissionName,
permissionDetails, qualification);
}
@Override
public List<Assignee> getPermissionAssigneesForTemplateName(String namespaceCode,
String permissionTemplateName, Map<String, String> permissionDetails,
Map<String, String> qualification) {
return this.permissionService.getPermissionAssigneesByTemplateName(namespaceCode, permissionTemplateName,
new HashMap<String, String>(permissionDetails), new HashMap<String, String>(qualification));
}
// GROUP SERVICE
@Override
public boolean isMemberOfGroup(String principalId, String groupId) {
return getGroupService().isMemberOfGroup(principalId, groupId);
}
@Override
public boolean isMemberOfGroup(String principalId, String namespaceCode, String groupName) {
Group group = getGroupByName(namespaceCode, groupName);
return group == null ? false : isMemberOfGroup(principalId, group.getId());
}
@Override
public boolean isGroupMemberOfGroup(String potentialMemberId, String potentialParentId)
{
return getGroupService()
.isGroupMemberOfGroup(potentialMemberId, potentialParentId);
}
@Override
public List<String> getGroupMemberPrincipalIds(String groupId) {
return getGroupService().getMemberPrincipalIds(groupId);
}
@Override
public List<String> getDirectGroupMemberPrincipalIds(String groupId) {
return getGroupService().getDirectMemberPrincipalIds(groupId);
}
@Override
public List<String> getGroupIdsForPrincipal(String principalId) {
return getGroupService().getGroupIdsByPrincipalId(principalId);
}
@Override
public List<String> getGroupIdsForPrincipal(String principalId, String namespaceCode ) {
return getGroupService().getGroupIdsByPrincipalIdAndNamespaceCode(principalId, namespaceCode);
}
@Override
public List<Group> getGroupsForPrincipal(String principalId) {
return getGroupService().getGroupsByPrincipalId(principalId);
}
@Override
public List<Group> getGroupsForPrincipal(String principalId, String namespaceCode ) {
return getGroupService().getGroupsByPrincipalIdAndNamespaceCode(principalId, namespaceCode);
}
@Override
public List<String> getMemberGroupIds(String groupId) {
return getGroupService().getMemberGroupIds(groupId);
}
@Override
public List<String> getDirectMemberGroupIds(String groupId) {
return getGroupService().getDirectMemberGroupIds(groupId);
}
@Override
public Group getGroup(String groupId) {
return getGroupService().getGroup(groupId);
}
@Override
public Group getGroupByName(String namespaceCode, String groupName) {
return getGroupService().getGroupByNameAndNamespaceCode(namespaceCode, groupName);
}
@Override
public List<String> getParentGroupIds(String groupId) {
return getGroupService().getParentGroupIds(groupId);
}
@Override
public List<String> getDirectParentGroupIds(String groupId) {
return getGroupService().getDirectParentGroupIds( groupId );
}
@Override
public boolean addGroupToGroup(String childId, String parentId) {
return getGroupService().addGroupToGroup(childId, parentId);
}
@Override
public boolean addPrincipalToGroup(String principalId, String groupId) {
return getGroupService().addPrincipalToGroup(principalId, groupId);
}
@Override
public boolean removeGroupFromGroup(String childId, String parentId) {
return getGroupService().removeGroupFromGroup(childId, parentId);
}
@Override
public boolean removePrincipalFromGroup(String principalId, String groupId) {
return getGroupService().removePrincipalFromGroup(principalId, groupId);
}
@Override
public Group createGroup(Group group) {
return getGroupService().createGroup(group);
}
@Override
public void removeAllMembers(String groupId) {
getGroupService().removeAllMembers(groupId);
}
@Override
public Group updateGroup(String groupId, Group group) {
return getGroupService().updateGroup(groupId, group);
}
// IDENTITY SERVICE
@Override
public Principal getPrincipal(String principalId) {
return getIdentityService().getPrincipal(principalId);
}
@Override
public Principal getPrincipalByPrincipalName(String principalName) {
return getIdentityService().getPrincipalByPrincipalName(principalName);
}
@Override
public Principal getPrincipalByPrincipalNameAndPassword(String principalName, String password) {
return getIdentityService().getPrincipalByPrincipalNameAndPassword(principalName, password);
}
@Override
public EntityDefault getEntityDefaultInfo(String entityId) {
return getIdentityService().getEntityDefault(entityId);
}
@Override
public EntityDefault getEntityDefaultInfoByPrincipalId(
String principalId) {
return getIdentityService().getEntityDefaultByPrincipalId(principalId);
}
@Override
public EntityDefault getEntityDefaultInfoByPrincipalName(
String principalName) {
return getIdentityService().getEntityDefaultByPrincipalName(principalName);
}
@Override
public EntityDefaultQueryResults findEntityDefaults(QueryByCriteria queryByCriteria) {
return getIdentityService().findEntityDefaults(queryByCriteria);
}
@Override
public Entity getEntity(String entityId) {
return getIdentityService().getEntity(entityId);
}
@Override
public Entity getEntityByPrincipalId(String principalId) {
return getIdentityService().getEntityByPrincipalId(principalId);
}
@Override
public Entity getEntityByPrincipalName(String principalName) {
return getIdentityService().getEntityByPrincipalName(principalName);
}
@Override
public EntityQueryResults findEntities(QueryByCriteria queryByCriteria) {
return getIdentityService().findEntities(queryByCriteria);
}
@Override
public CodedAttribute getAddressType( String code ) {
return getIdentityService().getAddressType(code);
}
@Override
public CodedAttribute getEmailType( String code ) {
return getIdentityService().getEmailType(code);
}
@Override
public EntityAffiliationType getAffiliationType( String code ) {
return getIdentityService().getAffiliationType(code);
}
@Override
public CodedAttribute getCitizenshipStatus( String code ) {
return CodedAttribute.Builder.create(getIdentityService().getCitizenshipStatus(code)).build();
}
@Override
public CodedAttribute getEmploymentStatus( String code ) {
return getIdentityService().getEmploymentStatus(code);
}
@Override
public CodedAttribute getEmploymentType( String code ) {
return getIdentityService().getEmploymentType(code);
}
@Override
public CodedAttribute getEntityNameType( String code ) {
return getIdentityService().getNameType(code);
}
@Override
public CodedAttribute getEntityType( String code ) {
return getIdentityService().getEntityType(code);
}
@Override
public EntityExternalIdentifierType getExternalIdentifierType( String code ) {
return getIdentityService().getExternalIdentifierType(code);
}
@Override
public CodedAttribute getPhoneType( String code ) {
return getIdentityService().getPhoneType(code);
}
// ----------------------
// Responsibility Methods
// ----------------------
@Override
public Responsibility getResponsibility(String responsibilityId) {
return getResponsibilityService().getResponsibility( responsibilityId );
}
@Override
public boolean hasResponsibility(String principalId, String namespaceCode,
String responsibilityName, Map<String, String> qualification,
Map<String, String> responsibilityDetails) {
return getResponsibilityService().hasResponsibility( principalId, namespaceCode, responsibilityName,
qualification, responsibilityDetails );
}
@Override
public Responsibility getResponsibilityByName( String namespaceCode, String responsibilityName) {
return getResponsibilityService().findRespByNamespaceCodeAndName(namespaceCode, responsibilityName);
}
@Override
public List<ResponsibilityAction> getResponsibilityActions( String namespaceCode, String responsibilityName,
Map<String, String> qualification, Map<String, String> responsibilityDetails) {
return getResponsibilityService().getResponsibilityActions( namespaceCode, responsibilityName, qualification,
responsibilityDetails );
}
@Override
public List<ResponsibilityAction> getResponsibilityActionsByTemplateName(
String namespaceCode, String responsibilityTemplateName,
Map<String, String> qualification, Map<String, String> responsibilityDetails) {
return getResponsibilityService().getResponsibilityActionsByTemplateName(namespaceCode, responsibilityTemplateName,
qualification, responsibilityDetails);
}
@Override
public boolean hasResponsibilityByTemplateName(String principalId,
String namespaceCode, String responsibilityTemplateName,
Map<String, String> qualification, Map<String, String> responsibilityDetails) {
return getResponsibilityService().hasResponsibilityByTemplateName(principalId, namespaceCode, responsibilityTemplateName,
qualification, responsibilityDetails);
}
protected void logAuthorizationCheck(String checkType, String principalId, String namespaceCode, String permissionName, Map<String, String> permissionDetails, Map<String, String> qualification ) {
StringBuilder sb = new StringBuilder();
sb.append( '\n' );
sb.append( "Is AuthZ for " ).append( checkType ).append( ": " ).append( namespaceCode ).append( "/" ).append( permissionName ).append( '\n' );
sb.append( " Principal: " ).append( principalId );
if ( principalId != null ) {
Principal principal = getPrincipal( principalId );
if ( principal != null ) {
sb.append( " (" ).append( principal.getPrincipalName() ).append( ')' );
}
}
sb.append( '\n' );
sb.append( " Details:\n" );
if ( permissionDetails != null ) {
sb.append( permissionDetails);
} else {
sb.append( " [null]\n" );
}
sb.append( " Qualifiers:\n" );
if ( qualification != null && !qualification.isEmpty() ) {
sb.append( qualification);
} else {
sb.append( " [null]\n" );
}
if (LOG.isTraceEnabled()) {
LOG.trace( sb.append(ExceptionUtils.getStackTrace(new Throwable())));
} else {
LOG.debug(sb.toString());
}
}
protected void logHasPermissionCheck(String checkType, String principalId, String namespaceCode, String permissionName, Map<String, String> permissionDetails ) {
StringBuilder sb = new StringBuilder();
sb.append( '\n' );
sb.append( "Has Perm for " ).append( checkType ).append( ": " ).append( namespaceCode ).append( "/" ).append( permissionName ).append( '\n' );
sb.append( " Principal: " ).append( principalId );
if ( principalId != null ) {
Principal principal = getPrincipal( principalId );
if ( principal != null ) {
sb.append( " (" ).append( principal.getPrincipalName() ).append( ')' );
}
}
sb.append( '\n' );
sb.append( " Details:\n" );
if ( permissionDetails != null ) {
sb.append( permissionDetails);
} else {
sb.append( " [null]\n" );
}
if (LOG.isTraceEnabled()) {
LOG.trace( sb.append( ExceptionUtils.getStackTrace(new Throwable())) );
} else {
LOG.debug(sb.toString());
}
}
// OTHER METHODS
public IdentityService getIdentityService() {
if ( identityService == null ) {
identityService = KimApiServiceLocator.getIdentityService();
}
return identityService;
}
public GroupService getGroupService() {
if ( groupService == null ) {
groupService = KimApiServiceLocator.getGroupService();
}
return groupService;
}
public PermissionService getPermissionService() {
if ( permissionService == null ) {
permissionService = KimApiServiceLocator.getPermissionService();
}
return permissionService;
}
public ResponsibilityService getResponsibilityService() {
if ( responsibilityService == null ) {
responsibilityService = KimApiServiceLocator.getResponsibilityService();
}
return responsibilityService;
}
}
| |
/**
* Copyright (c) 2016, The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.example.android.apis.content;
import com.example.android.apis.R;
//BEGIN_INCLUDE(job)
import android.app.job.JobInfo;
import android.app.job.JobParameters;
import android.app.job.JobScheduler;
import android.app.job.JobService;
import android.content.ComponentName;
import android.content.Context;
import android.database.Cursor;
import android.net.Uri;
import android.os.Environment;
import android.os.Handler;
import android.provider.MediaStore;
import android.util.Log;
import android.widget.Toast;
import java.util.ArrayList;
import java.util.List;
/**
* Example stub job to monitor when there is a change to photos in the media provider.
*/
public class PhotosContentJob extends JobService {
// The root URI of the media provider, to monitor for generic changes to its content.
static final Uri MEDIA_URI = Uri.parse("content://" + MediaStore.AUTHORITY + "/");
// Path segments for image-specific URIs in the provider.
static final List<String> EXTERNAL_PATH_SEGMENTS
= MediaStore.Images.Media.EXTERNAL_CONTENT_URI.getPathSegments();
// The columns we want to retrieve about a particular image.
static final String[] PROJECTION = new String[] {
MediaStore.Images.ImageColumns._ID, MediaStore.Images.ImageColumns.DATA
};
static final int PROJECTION_ID = 0;
static final int PROJECTION_DATA = 1;
// This is the external storage directory where cameras place pictures.
static final String DCIM_DIR = Environment.getExternalStoragePublicDirectory(
Environment.DIRECTORY_DCIM).getPath();
// A pre-built JobInfo we use for scheduling our job.
static final JobInfo JOB_INFO;
static {
JobInfo.Builder builder = new JobInfo.Builder(JobIds.PHOTOS_CONTENT_JOB,
new ComponentName("com.example.android.apis", PhotosContentJob.class.getName()));
// Look for specific changes to images in the provider.
builder.addTriggerContentUri(new JobInfo.TriggerContentUri(
MediaStore.Images.Media.EXTERNAL_CONTENT_URI,
JobInfo.TriggerContentUri.FLAG_NOTIFY_FOR_DESCENDANTS));
// Also look for general reports of changes in the overall provider.
builder.addTriggerContentUri(new JobInfo.TriggerContentUri(MEDIA_URI, 0));
JOB_INFO = builder.build();
}
// Fake job work. A real implementation would do some work on a separate thread.
final Handler mHandler = new Handler();
final Runnable mWorker = new Runnable() {
@Override public void run() {
scheduleJob(PhotosContentJob.this);
jobFinished(mRunningParams, false);
}
};
JobParameters mRunningParams;
// Schedule this job, replace any existing one.
public static void scheduleJob(Context context) {
JobScheduler js = context.getSystemService(JobScheduler.class);
js.schedule(JOB_INFO);
Log.i("PhotosContentJob", "JOB SCHEDULED!");
}
// Check whether this job is currently scheduled.
public static boolean isScheduled(Context context) {
JobScheduler js = context.getSystemService(JobScheduler.class);
List<JobInfo> jobs = js.getAllPendingJobs();
if (jobs == null) {
return false;
}
for (int i=0; i<jobs.size(); i++) {
if (jobs.get(i).getId() == JobIds.PHOTOS_CONTENT_JOB) {
return true;
}
}
return false;
}
// Cancel this job, if currently scheduled.
public static void cancelJob(Context context) {
JobScheduler js = context.getSystemService(JobScheduler.class);
js.cancel(JobIds.PHOTOS_CONTENT_JOB);
}
@Override
public boolean onStartJob(JobParameters params) {
Log.i("PhotosContentJob", "JOB STARTED!");
mRunningParams = params;
// Instead of real work, we are going to build a string to show to the user.
StringBuilder sb = new StringBuilder();
// Did we trigger due to a content change?
if (params.getTriggeredContentAuthorities() != null) {
boolean rescanNeeded = false;
if (params.getTriggeredContentUris() != null) {
// If we have details about which URIs changed, then iterate through them
// and collect either the ids that were impacted or note that a generic
// change has happened.
ArrayList<String> ids = new ArrayList<>();
for (Uri uri : params.getTriggeredContentUris()) {
List<String> path = uri.getPathSegments();
if (path != null && path.size() == EXTERNAL_PATH_SEGMENTS.size()+1) {
// This is a specific file.
ids.add(path.get(path.size()-1));
} else {
// Oops, there is some general change!
rescanNeeded = true;
}
}
if (ids.size() > 0) {
// If we found some ids that changed, we want to determine what they are.
// First, we do a query with content provider to ask about all of them.
StringBuilder selection = new StringBuilder();
for (int i=0; i<ids.size(); i++) {
if (selection.length() > 0) {
selection.append(" OR ");
}
selection.append(MediaStore.Images.ImageColumns._ID);
selection.append("='");
selection.append(ids.get(i));
selection.append("'");
}
// Now we iterate through the query, looking at the filenames of
// the items to determine if they are ones we are interested in.
Cursor cursor = null;
boolean haveFiles = false;
try {
cursor = getContentResolver().query(
MediaStore.Images.Media.EXTERNAL_CONTENT_URI,
PROJECTION, selection.toString(), null, null);
while (cursor.moveToNext()) {
// We only care about files in the DCIM directory.
String dir = cursor.getString(PROJECTION_DATA);
if (dir.startsWith(DCIM_DIR)) {
if (!haveFiles) {
haveFiles = true;
sb.append("New photos:\n");
}
sb.append(cursor.getInt(PROJECTION_ID));
sb.append(": ");
sb.append(dir);
sb.append("\n");
}
}
} catch (SecurityException e) {
sb.append("Error: no access to media!");
} finally {
if (cursor != null) {
cursor.close();
}
}
}
} else {
// We don't have any details about URIs (because too many changed at once),
// so just note that we need to do a full rescan.
rescanNeeded = true;
}
if (rescanNeeded) {
sb.append("Photos rescan needed!");
}
} else {
sb.append("(No photos content)");
}
Toast.makeText(this, sb.toString(), Toast.LENGTH_LONG).show();
// We will emulate taking some time to do this work, so we can see batching happen.
mHandler.postDelayed(mWorker, 10*1000);
return true;
}
@Override
public boolean onStopJob(JobParameters params) {
mHandler.removeCallbacks(mWorker);
return false;
}
}
//END_INCLUDE(job)
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.geode.internal.cache.execute;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
import java.io.Serializable;
import java.rmi.ServerException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Set;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.apache.geode.DataSerializable;
import org.apache.geode.cache.AttributesFactory;
import org.apache.geode.cache.CacheClosedException;
import org.apache.geode.cache.DataPolicy;
import org.apache.geode.cache.Region;
import org.apache.geode.cache.RegionAttributes;
import org.apache.geode.cache.RegionShortcut;
import org.apache.geode.cache.Scope;
import org.apache.geode.cache.client.ServerConnectivityException;
import org.apache.geode.cache.client.ServerOperationException;
import org.apache.geode.cache.execute.Execution;
import org.apache.geode.cache.execute.Function;
import org.apache.geode.cache.execute.FunctionAdapter;
import org.apache.geode.cache.execute.FunctionContext;
import org.apache.geode.cache.execute.FunctionException;
import org.apache.geode.cache.execute.FunctionInvocationTargetException;
import org.apache.geode.cache.execute.FunctionService;
import org.apache.geode.cache.execute.RegionFunctionContext;
import org.apache.geode.cache.execute.ResultCollector;
import org.apache.geode.distributed.DistributedSystem;
import org.apache.geode.internal.cache.PartitionedRegion;
import org.apache.geode.internal.cache.functions.TestFunction;
import org.apache.geode.internal.cache.tier.sockets.CacheServerTestUtil;
import org.apache.geode.test.dunit.Assert;
import org.apache.geode.test.dunit.IgnoredException;
import org.apache.geode.test.dunit.LogWriterUtils;
import org.apache.geode.test.dunit.SerializableRunnable;
import org.apache.geode.test.dunit.Wait;
import org.apache.geode.test.dunit.WaitCriterion;
import org.apache.geode.test.junit.categories.ClientServerTest;
import org.apache.geode.test.junit.categories.FunctionServiceTest;
@Category({ClientServerTest.class, FunctionServiceTest.class})
public class PRClientServerRegionFunctionExecutionDUnitTest extends PRClientServerTestBase {
private static final String TEST_FUNCTION7 = TestFunction.TEST_FUNCTION7;
private static final String TEST_FUNCTION2 = TestFunction.TEST_FUNCTION2;
Boolean isByName = null;
private static int retryCount = 0;
Boolean toRegister = null;
private static Region metaDataRegion;
static final String retryRegionName = "RetryDataRegion";
@Test
public void test_Bug_43126_Function_Not_Registered() throws InterruptedException {
createScenario();
try {
client
.invoke(() -> PRClientServerRegionFunctionExecutionDUnitTest.executeRegisteredFunction());
} catch (Exception e) {
assertEquals(true, (e.getCause() instanceof ServerOperationException));
assertTrue(
e.getCause().getMessage().contains("The function is not registered for function id"));
}
}
@Test
public void test_Bug43126() throws InterruptedException {
createScenario();
Function function = new TestFunction(true, TEST_FUNCTION2);
registerFunctionAtServer(function);
client.invoke(() -> PRClientServerRegionFunctionExecutionDUnitTest.executeRegisteredFunction());
}
/*
* Execution of the function on server with single key as the routing object and using the name of
* the function
*/
@Test
public void testServerSingleKeyExecution_byName() {
createScenario();
Function function = new TestFunction(true, TEST_FUNCTION2);
registerFunctionAtServer(function);
isByName = new Boolean(true);
toRegister = new Boolean(true);
SerializableRunnable suspect = new SerializableRunnable() {
public void run() {
cache.getLogger()
.info("<ExpectedException action=add>" + "No target node found for KEY = "
+ "|Server could not send the reply" + "|Unexpected exception during"
+ "</ExpectedException>");
}
};
runOnAllServers(suspect);
client.invoke(() -> PRClientServerRegionFunctionExecutionDUnitTest
.serverSingleKeyExecution(isByName, toRegister));
SerializableRunnable endSuspect = new SerializableRunnable() {
public void run() {
cache.getLogger()
.info("<ExpectedException action=remove>" + "No target node found for KEY = "
+ "|Server could not send the reply" + "|Unexpected exception during"
+ "</ExpectedException>");
}
};
runOnAllServers(endSuspect);
}
@Test
public void testServerSingleKeyExecution_Bug43513_OnRegion() {
createScenario_SingleConnection();
client.invoke(() -> PRClientServerRegionFunctionExecutionDUnitTest
.serverSingleKeyExecutionOnRegion_SingleConnection());
}
@Test
public void testServerSingleKeyExecution_SendException() {
createScenario();
Function function = new TestFunction(true, TestFunction.TEST_FUNCTION_SEND_EXCEPTION);
registerFunctionAtServer(function);
isByName = new Boolean(true);
toRegister = new Boolean(true);
client.invoke(() -> PRClientServerRegionFunctionExecutionDUnitTest
.serverSingleKeyExecution_SendException(isByName, toRegister));
}
@Test
public void testServerSingleKeyExecution_ThrowException() {
createScenario();
Function function = new TestFunction(true, TestFunction.TEST_FUNCTION_THROW_EXCEPTION);
registerFunctionAtServer(function);
isByName = new Boolean(true);
toRegister = new Boolean(true);
client.invoke(() -> PRClientServerRegionFunctionExecutionDUnitTest
.serverSingleKeyExecution_ThrowException(isByName, toRegister));
}
@Test
public void testClientWithoutPool_Bug41832() {
createScenarioWith2Regions();
Function function = new TestFunction(true, TEST_FUNCTION2);
registerFunctionAtServer(function);
isByName = new Boolean(true);
toRegister = new Boolean(true);
SerializableRunnable suspect = new SerializableRunnable() {
public void run() {
cache.getLogger()
.info("<ExpectedException action=add>" + "No target node found for KEY = "
+ "|Server could not send the reply" + "|Unexpected exception during"
+ "</ExpectedException>");
}
};
runOnAllServers(suspect);
client.invoke(() -> PRClientServerRegionFunctionExecutionDUnitTest
.serverSingleKeyExecutionWith2Regions(isByName, toRegister));
SerializableRunnable endSuspect = new SerializableRunnable() {
public void run() {
cache.getLogger()
.info("<ExpectedException action=remove>" + "No target node found for KEY = "
+ "|Server could not send the reply" + "|Unexpected exception during"
+ "</ExpectedException>");
}
};
runOnAllServers(endSuspect);
}
/*
* Execution of the function on server with single key as the routing object and using the name of
* the function
*/
@Test
public void testServerExecution_NoLastResult() {
createScenario();
Function function = new TestFunction(true, TestFunction.TEST_FUNCTION_NO_LASTRESULT);
registerFunctionAtServer(function);
isByName = new Boolean(true);
toRegister = new Boolean(true);
final IgnoredException ex = IgnoredException.addIgnoredException("did not send last result");
client.invoke(() -> PRClientServerRegionFunctionExecutionDUnitTest
.serverSingleKeyExecution_NoLastResult(isByName, toRegister));
ex.remove();
}
@Test
public void testServerSingleKeyExecution_byName_WithoutRegister() {
createScenario();
Function function = new TestFunction(true, TEST_FUNCTION2);
registerFunctionAtServer(function);
isByName = new Boolean(true);
toRegister = new Boolean(false);
SerializableRunnable suspect = new SerializableRunnable() {
public void run() {
cache.getLogger()
.info("<ExpectedException action=add>" + "No target node found for KEY = "
+ "|Server could not send the reply" + "|Unexpected exception during"
+ "</ExpectedException>");
}
};
runOnAllServers(suspect);
client.invoke(() -> PRClientServerRegionFunctionExecutionDUnitTest
.serverSingleKeyExecution(isByName, toRegister));
SerializableRunnable endSuspect = new SerializableRunnable() {
public void run() {
cache.getLogger()
.info("<ExpectedException action=remove>" + "No target node found for KEY = "
+ "|Server could not send the reply" + "|Unexpected exception during"
+ "</ExpectedException>");
}
};
runOnAllServers(endSuspect);
}
/*
* Execution of the function on server with single key as the routing. Function throws the
* FunctionInvocationTargetException. As this is the case of HA then system should retry the
* function execution. After 5th attempt function will send Boolean as last result.
*/
@Test
public void testserverSingleKeyExecution_FunctionInvocationTargetException() {
createScenario();
client.invoke(() -> PRClientServerRegionFunctionExecutionDUnitTest
.serverSingleKeyExecution_FunctionInvocationTargetException());
}
@Test
public void testServerSingleKeyExecution_SocketTimeOut() {
createScenario();
Function function = new TestFunction(true, TestFunction.TEST_FUNCTION_SOCKET_TIMEOUT);
registerFunctionAtServer(function);
isByName = new Boolean(true);
client.invoke(() -> PRClientServerRegionFunctionExecutionDUnitTest
.serverSingleKeyExecutionSocketTimeOut(isByName));
}
/*
* Execution of the function on server with single key as the routing object and using the
* instance of the function
*/
@Test
public void testServerSingleKeyExecution_byInstance() {
createScenario();
Function function = new TestFunction(true, TEST_FUNCTION2);
registerFunctionAtServer(function);
isByName = new Boolean(false);
toRegister = true;
client.invoke(() -> PRClientServerRegionFunctionExecutionDUnitTest
.serverSingleKeyExecution(isByName, toRegister));
}
/*
* Execution of the inline function on server with single key as the routing object
*/
@Test
public void testServerSingleKeyExecution_byInlineFunction() {
createScenario();
client.invoke(
() -> PRClientServerRegionFunctionExecutionDUnitTest.serverSingleKeyExecution_Inline());
}
/*
* Execution of the function on server with set multiple keys as the routing object and using the
* name of the function
*/
@Test
public void testserverMultiKeyExecution_byName() {
createScenario();
Function function = new TestFunction(true, TEST_FUNCTION2);
registerFunctionAtServer(function);
isByName = new Boolean(true);
client.invoke(
() -> PRClientServerRegionFunctionExecutionDUnitTest.serverMultiKeyExecution(isByName));
server1.invoke(() -> PRClientServerRegionFunctionExecutionDUnitTest.checkBucketsOnServer());
server2.invoke(() -> PRClientServerRegionFunctionExecutionDUnitTest.checkBucketsOnServer());
server3.invoke(() -> PRClientServerRegionFunctionExecutionDUnitTest.checkBucketsOnServer());
}
/*
* Execution of the function on server with bucket as filter
*/
@Test
public void testBucketFilter() {
createScenarioForBucketFilter();
Function function = new TestFunction(true, TestFunction.TEST_FUNCTION_BUCKET_FILTER);
registerFunctionAtServer(function);
// test multi key filter
Set<Integer> bucketFilterSet = new HashSet<Integer>();
bucketFilterSet.add(3);
bucketFilterSet.add(6);
bucketFilterSet.add(8);
client.invoke(() -> PRClientServerTestBase.serverBucketFilterExecution(bucketFilterSet));
bucketFilterSet.clear();
// Test single filter
bucketFilterSet.add(7);
client.invoke(() -> PRClientServerTestBase.serverBucketFilterExecution(bucketFilterSet));
}
@Test
public void testBucketFilterOverride() {
createScenarioForBucketFilter();
Function function = new TestFunction(true, TestFunction.TEST_FUNCTION_BUCKET_FILTER);
registerFunctionAtServer(function);
// test multi key filter
Set<Integer> bucketFilterSet = new HashSet<Integer>();
bucketFilterSet.add(3);
bucketFilterSet.add(6);
bucketFilterSet.add(8);
Set<Integer> keyFilterSet = new HashSet<Integer>();
keyFilterSet.add(75);
keyFilterSet.add(25);
client.invoke(() -> PRClientServerTestBase.serverBucketFilterOverrideExecution(bucketFilterSet,
keyFilterSet));
}
@Test
public void testserverMultiKeyExecution_SendException() {
createScenario();
Function function = new TestFunction(true, TestFunction.TEST_FUNCTION_SEND_EXCEPTION);
registerFunctionAtServer(function);
isByName = new Boolean(true);
client.invoke(() -> PRClientServerRegionFunctionExecutionDUnitTest
.serverMultiKeyExecution_SendException(isByName));
}
@Test
public void testserverMultiKeyExecution_ThrowException() {
createScenario();
Function function = new TestFunction(true, TestFunction.TEST_FUNCTION_THROW_EXCEPTION);
registerFunctionAtServer(function);
isByName = new Boolean(true);
client.invoke(() -> PRClientServerRegionFunctionExecutionDUnitTest
.serverMultiKeyExecution_ThrowException(isByName));
}
/*
* Execution of the inline function on server with set multiple keys as the routing object
*/
@Test
public void testserverMultiKeyExecution_byInlineFunction() {
createScenario();
client.invoke(
() -> PRClientServerRegionFunctionExecutionDUnitTest.serverMultiKeyExecution_Inline());
}
/*
* Execution of the inline function on server with set multiple keys as the routing object
* Function throws the FunctionInvocationTargetException. As this is the case of HA then system
* should retry the function execution. After 5th attempt function will send Boolean as last
* result.
*/
@Test
public void testserverMultiKeyExecution_FunctionInvocationTargetException() {
createScenario();
client.invoke(() -> PRClientServerRegionFunctionExecutionDUnitTest
.serverMultiKeyExecution_FunctionInvocationTargetException());
}
/*
* Execution of the function on server with set multiple keys as the routing object and using the
* name of the function
*/
@Test
public void testserverMultiKeyExecutionNoResult_byName() {
createScenario();
Function function = new TestFunction(false, TEST_FUNCTION7);
registerFunctionAtServer(function);
isByName = new Boolean(true);
client.invoke(() -> PRClientServerRegionFunctionExecutionDUnitTest
.serverMultiKeyExecutionNoResult(isByName));
}
/*
* Execution of the function on server with set multiple keys as the routing object and using the
* instance of the function
*/
@Test
public void testserverMultiKeyExecution_byInstance() {
createScenario();
Function function = new TestFunction(true, TEST_FUNCTION2);
registerFunctionAtServer(function);
isByName = new Boolean(false);
client.invoke(
() -> PRClientServerRegionFunctionExecutionDUnitTest.serverMultiKeyExecution(isByName));
}
/*
* Ensure that the execution is limited to a single bucket put another way, that the routing logic
* works correctly such that there is not extra execution
*/
@Test
public void testserverMultiKeyExecutionOnASingleBucket_byName() {
createScenario();
Function function = new TestFunction(true, TEST_FUNCTION2);
registerFunctionAtServer(function);
isByName = new Boolean(true);
client.invoke(() -> PRClientServerRegionFunctionExecutionDUnitTest
.serverMultiKeyExecutionOnASingleBucket(isByName));
}
/*
* Ensure that the execution is limited to a single bucket put another way, that the routing logic
* works correctly such that there is not extra execution
*/
@Test
public void testserverMultiKeyExecutionOnASingleBucket_byInstance() {
createScenario();
Function function = new TestFunction(true, TEST_FUNCTION2);
registerFunctionAtServer(function);
isByName = new Boolean(false);
client.invoke(() -> PRClientServerRegionFunctionExecutionDUnitTest
.serverMultiKeyExecutionOnASingleBucket(isByName));
}
public static void regionSingleKeyExecutionNonHA(Boolean isByName, Function function,
Boolean toRegister) throws Exception {
Region region = cache.getRegion(PartitionedRegionName);
assertNotNull(region);
final String testKey = "execKey";
final Set testKeysSet = new HashSet();
testKeysSet.add(testKey);
DistributedSystem.setThreadsSocketPolicy(false);
if (toRegister.booleanValue()) {
FunctionService.registerFunction(function);
} else {
FunctionService.unregisterFunction(function.getId());
assertNull(FunctionService.getFunction(function.getId()));
}
Execution dataSet = FunctionService.onRegion(region);
region.put(testKey, new Integer(1));
try {
ArrayList<String> args = new ArrayList<String>();
args.add(retryRegionName);
args.add("regionSingleKeyExecutionNonHA");
ResultCollector rs = execute(dataSet, testKeysSet, args, function, isByName);
fail("Expected ServerConnectivityException not thrown!");
} catch (Exception ex) {
if (!(ex.getCause() instanceof ServerConnectivityException)
&& !((ex instanceof FunctionInvocationTargetException
|| ex.getCause() instanceof FunctionInvocationTargetException))) {
throw ex;
}
}
}
public static void regionExecutionHAOneServerDown(Boolean isByName, Function function,
Boolean toRegister) throws Exception {
Region region = cache.getRegion(PartitionedRegionName);
assertNotNull(region);
final String testKey = "execKey";
final Set testKeysSet = new HashSet();
testKeysSet.add(testKey);
DistributedSystem.setThreadsSocketPolicy(false);
if (toRegister.booleanValue()) {
FunctionService.registerFunction(function);
} else {
FunctionService.unregisterFunction(function.getId());
assertNull(FunctionService.getFunction(function.getId()));
}
Execution dataSet = FunctionService.onRegion(region);
region.put(testKey, new Integer(1));
ArrayList<String> args = new ArrayList<String>();
args.add(retryRegionName);
args.add("regionExecutionHAOneServerDown");
ResultCollector rs = execute(dataSet, testKeysSet, args, function, isByName);
assertEquals(1, ((List) rs.getResult()).size());
}
public static void regionExecutionHATwoServerDown(Boolean isByName, Function function,
Boolean toRegister) throws Exception {
Region region = cache.getRegion(PartitionedRegionName);
assertNotNull(region);
final String testKey = "execKey";
final Set testKeysSet = new HashSet();
testKeysSet.add(testKey);
DistributedSystem.setThreadsSocketPolicy(false);
if (toRegister.booleanValue()) {
FunctionService.registerFunction(function);
} else {
FunctionService.unregisterFunction(function.getId());
assertNull(FunctionService.getFunction(function.getId()));
}
Execution dataSet = FunctionService.onRegion(region);
region.put(testKey, new Integer(1));
ArrayList<String> args = new ArrayList<String>();
args.add(retryRegionName);
args.add("regionExecutionHATwoServerDown");
ResultCollector rs = execute(dataSet, testKeysSet, args, function, isByName);
assertEquals(1, ((List) rs.getResult()).size());
}
public static void createReplicatedRegion() {
metaDataRegion = cache.createRegionFactory(RegionShortcut.REPLICATE).create(retryRegionName);
}
public static void createProxyRegion(String hostName) {
CacheServerTestUtil.disableShufflingOfEndpoints();
AttributesFactory factory = new AttributesFactory();
factory.setScope(Scope.LOCAL);
factory.setDataPolicy(DataPolicy.EMPTY);
factory.setPoolName(pool.getName());
RegionAttributes attrs = factory.create();
metaDataRegion = cache.createRegion(retryRegionName, attrs);
assertNotNull(metaDataRegion);
}
public static void verifyMetaData(Integer arg1, Integer arg2) {
if (arg1 == 0) {
assertNull(metaDataRegion.get("stopped"));
} else {
assertEquals(metaDataRegion.get("stopped"), arg1);
}
if (arg2 == 0) {
assertNull(metaDataRegion.get("sentresult"));
} else {
assertEquals(metaDataRegion.get("sentresult"), arg2);
}
}
public static void registerFunction() {
FunctionService.registerFunction(new FunctionAdapter() {
@Override
public void execute(FunctionContext context) {
if (context.getArguments() instanceof String) {
context.getResultSender().lastResult("Failure");
} else if (context.getArguments() instanceof Boolean) {
context.getResultSender().lastResult(Boolean.FALSE);
}
}
@Override
public String getId() {
return "Function";
}
@Override
public boolean hasResult() {
return true;
}
});
}
public static void FunctionExecution_Inline_Bug40714() {
Region region = cache.getRegion(PartitionedRegionName);
assertNotNull(region);
final HashSet testKeysSet = new HashSet();
for (int i = (totalNumBuckets.intValue() * 10); i > 0; i--) {
testKeysSet.add("execKey-" + i);
}
int j = 0;
for (Iterator i = testKeysSet.iterator(); i.hasNext();) {
Integer val = new Integer(j++);
region.put(i.next(), val);
}
List list = (List) FunctionService.onRegion(region).setArguments(Boolean.TRUE)
.execute(new FunctionAdapter() {
@Override
public void execute(FunctionContext context) {
if (context.getArguments() instanceof String) {
context.getResultSender().lastResult("Success");
} else if (context.getArguments() instanceof Boolean) {
context.getResultSender().lastResult(Boolean.TRUE);
}
}
@Override
public String getId() {
return "Function";
}
@Override
public boolean hasResult() {
return true;
}
}).getResult();
assertEquals(3, list.size());
Iterator iterator = list.iterator();
for (int i = 0; i < 3; i++) {
Boolean res = (Boolean) iterator.next();
assertEquals(Boolean.TRUE, res);
}
}
public static void verifyDeadAndLiveServers(final Integer expectedDeadServers,
final Integer expectedLiveServers) {
WaitCriterion wc = new WaitCriterion() {
String excuse;
public boolean done() {
int sz = pool.getConnectedServerCount();
LogWriterUtils.getLogWriter().info("Checking for the Live Servers : Expected : "
+ expectedLiveServers + " Available :" + sz);
if (sz == expectedLiveServers.intValue()) {
return true;
}
excuse = "Expected " + expectedLiveServers.intValue() + " but found " + sz;
return false;
}
public String description() {
return excuse;
}
};
Wait.waitForCriterion(wc, 3 * 60 * 1000, 1000, true);
}
public static void executeFunction() throws ServerException, InterruptedException {
Region region = cache.getRegion(PartitionedRegionName);
assertNotNull(region);
final HashSet testKeysSet = new HashSet();
for (int i = (totalNumBuckets.intValue() * 10); i > 0; i--) {
testKeysSet.add("execKey-" + i);
}
DistributedSystem.setThreadsSocketPolicy(false);
Function function = new TestFunction(true, TEST_FUNCTION2);
FunctionService.registerFunction(function);
Execution dataSet = FunctionService.onRegion(region);
try {
ResultCollector rc1 =
dataSet.withFilter(testKeysSet).setArguments(Boolean.TRUE).execute(function.getId());
List l = ((List) rc1.getResult());
LogWriterUtils.getLogWriter().info("Result size : " + l.size());
assertEquals(3, l.size());
for (Iterator i = l.iterator(); i.hasNext();) {
assertEquals(Boolean.TRUE, i.next());
}
} catch (CacheClosedException e) {
// okay - ignore
}
}
public static Object executeFunctionHA() throws Exception {
Region region = cache.getRegion(PartitionedRegionName);
final HashSet testKeysSet = new HashSet();
for (int i = (totalNumBuckets.intValue() * 10); i > 0; i--) {
testKeysSet.add("execKey-" + i);
}
DistributedSystem.setThreadsSocketPolicy(false);
Function function = new TestFunction(true, TestFunction.TEST_FUNCTION_HA);
FunctionService.registerFunction(function);
Execution dataSet = FunctionService.onRegion(region);
ResultCollector rc1 =
dataSet.withFilter(testKeysSet).setArguments(Boolean.TRUE).execute(function.getId());
List l = ((List) rc1.getResult());
LogWriterUtils.getLogWriter().info("Result size : " + l.size());
return l;
}
public static void putOperation() {
Region region = cache.getRegion(PartitionedRegionName);
assertNotNull(region);
final HashSet testKeysSet = new HashSet();
for (int i = (totalNumBuckets.intValue() * 10); i > 0; i--) {
testKeysSet.add("execKey-" + i);
}
int j = 0;
HashSet origVals = new HashSet();
for (Iterator i = testKeysSet.iterator(); i.hasNext();) {
Integer val = new Integer(j++);
origVals.add(val);
region.put(i.next(), val);
}
}
protected void createScenario() {
ArrayList commonAttributes =
createCommonServerAttributes("TestPartitionedRegion", null, 0, 13, null);
createClientServerScenarion(commonAttributes, 20, 20, 20);
}
protected void createScenarioForBucketFilter() {
ArrayList commonAttributes = createCommonServerAttributes("TestPartitionedRegion",
new BucketFilterPRResolver(), 0, 113, null);
createClientServerScenarion(commonAttributes, 20, 20, 20);
}
private void createScenario_SingleConnection() {
ArrayList commonAttributes =
createCommonServerAttributes("TestPartitionedRegion", null, 0, 13, null);
createClientServerScenarion_SingleConnection(commonAttributes, 0, 20, 20);
}
private void createScenarioWith2Regions() {
ArrayList commonAttributes =
createCommonServerAttributes(PartitionedRegionName, null, 0, 13, null);
createClientServerScenarionWith2Regions(commonAttributes, 20, 20, 20);
}
public static void checkBucketsOnServer() {
PartitionedRegion region = (PartitionedRegion) cache.getRegion(PartitionedRegionName);
HashMap localBucket2RegionMap = (HashMap) region.getDataStore().getSizeLocally();
LogWriterUtils.getLogWriter().info(
"Size of the " + PartitionedRegionName + " in this VM :- " + localBucket2RegionMap.size());
Set entrySet = localBucket2RegionMap.entrySet();
assertNotNull(entrySet);
}
public static void serverMultiKeyExecutionOnASingleBucket(Boolean isByName) throws Exception {
Region region = cache.getRegion(PartitionedRegionName);
assertNotNull(region);
final HashSet testKeysSet = new HashSet();
for (int i = (totalNumBuckets.intValue() * 2); i > 0; i--) {
testKeysSet.add("execKey-" + i);
}
int j = 0;
for (Iterator i = testKeysSet.iterator(); i.hasNext();) {
Integer val = new Integer(j++);
region.put(i.next(), val);
}
DistributedSystem.setThreadsSocketPolicy(false);
for (Iterator kiter = testKeysSet.iterator(); kiter.hasNext();) {
Set singleKeySet = Collections.singleton(kiter.next());
Function function = new TestFunction(true, TEST_FUNCTION2);
FunctionService.registerFunction(function);
Execution dataSet = FunctionService.onRegion(region);
ResultCollector rc1 = execute(dataSet, singleKeySet, Boolean.TRUE, function, isByName);
List l = null;
l = ((List) rc1.getResult());
assertEquals(1, l.size());
ResultCollector rc2 =
execute(dataSet, singleKeySet, new HashSet(singleKeySet), function, isByName);
List l2 = null;
l2 = ((List) rc2.getResult());
assertEquals(1, l2.size());
List subList = (List) l2.iterator().next();
assertEquals(1, subList.size());
assertEquals(region.get(singleKeySet.iterator().next()), subList.iterator().next());
}
}
public static void serverMultiKeyExecution(Boolean isByName) {
Region region = cache.getRegion(PartitionedRegionName);
assertNotNull(region);
final HashSet testKeysSet = new HashSet();
for (int i = (totalNumBuckets.intValue() * 2); i > 0; i--) {
testKeysSet.add("execKey-" + i);
}
DistributedSystem.setThreadsSocketPolicy(false);
Function function = new TestFunction(true, TEST_FUNCTION2);
FunctionService.registerFunction(function);
Execution dataSet = FunctionService.onRegion(region);
try {
int j = 0;
HashSet origVals = new HashSet();
for (Iterator i = testKeysSet.iterator(); i.hasNext();) {
Integer val = new Integer(j++);
origVals.add(val);
region.put(i.next(), val);
}
List l = null;
ResultCollector rc1 = execute(dataSet, testKeysSet, Boolean.TRUE, function, isByName);
l = ((List) rc1.getResult());
LogWriterUtils.getLogWriter().info("Result size : " + l.size());
assertEquals(3, l.size());
for (Iterator i = l.iterator(); i.hasNext();) {
assertEquals(Boolean.TRUE, i.next());
}
List l2 = null;
ResultCollector rc2 = execute(dataSet, testKeysSet, testKeysSet, function, isByName);
l2 = ((List) rc2.getResult());
assertEquals(3, l2.size());
HashSet foundVals = new HashSet();
for (Iterator i = l2.iterator(); i.hasNext();) {
ArrayList subL = (ArrayList) i.next();
assertTrue(subL.size() > 0);
for (Iterator subI = subL.iterator(); subI.hasNext();) {
assertTrue(foundVals.add(subI.next()));
}
}
assertEquals(origVals, foundVals);
} catch (Exception e) {
Assert.fail("Test failed after the put operation", e);
}
}
public static void serverMultiKeyExecution_SendException(Boolean isByName) {
Region region = cache.getRegion(PartitionedRegionName);
assertNotNull(region);
final HashSet testKeysSet = new HashSet();
for (int i = (totalNumBuckets.intValue() * 2); i > 0; i--) {
testKeysSet.add("execKey-" + i);
}
DistributedSystem.setThreadsSocketPolicy(false);
Function function = new TestFunction(true, TestFunction.TEST_FUNCTION_SEND_EXCEPTION);
FunctionService.registerFunction(function);
Execution dataSet = FunctionService.onRegion(region);
int j = 0;
HashSet origVals = new HashSet();
for (Iterator i = testKeysSet.iterator(); i.hasNext();) {
Integer val = new Integer(j++);
origVals.add(val);
region.put(i.next(), val);
}
try {
List l = null;
ResultCollector rc1 = execute(dataSet, testKeysSet, Boolean.TRUE, function, isByName);
l = ((List) rc1.getResult());
LogWriterUtils.getLogWriter().info("Result size : " + l.size());
assertEquals(3, l.size());
for (Iterator i = l.iterator(); i.hasNext();) {
assertTrue(i.next() instanceof MyFunctionExecutionException);
}
} catch (Exception ex) {
ex.printStackTrace();
fail("No Exception Expected");
}
try {
List l = null;
ResultCollector rc1 = execute(dataSet, testKeysSet, testKeysSet, function, isByName);
List resultList = (List) rc1.getResult();
assertEquals(((testKeysSet.size() * 3) + 3), resultList.size());
Iterator resultIterator = resultList.iterator();
int exceptionCount = 0;
while (resultIterator.hasNext()) {
Object o = resultIterator.next();
if (o instanceof MyFunctionExecutionException) {
exceptionCount++;
}
}
assertEquals(3, exceptionCount);
} catch (Exception ex) {
ex.printStackTrace();
fail("No Exception Expected");
}
}
public static void serverMultiKeyExecution_ThrowException(Boolean isByName) {
Region region = cache.getRegion(PartitionedRegionName);
assertNotNull(region);
final HashSet testKeysSet = new HashSet();
for (int i = (totalNumBuckets.intValue() * 2); i > 0; i--) {
testKeysSet.add("execKey-" + i);
}
DistributedSystem.setThreadsSocketPolicy(false);
Function function = new TestFunction(true, TestFunction.TEST_FUNCTION_THROW_EXCEPTION);
FunctionService.registerFunction(function);
Execution dataSet = FunctionService.onRegion(region);
int j = 0;
HashSet origVals = new HashSet();
for (Iterator i = testKeysSet.iterator(); i.hasNext();) {
Integer val = new Integer(j++);
origVals.add(val);
region.put(i.next(), val);
}
try {
List l = null;
ResultCollector rc1 = execute(dataSet, testKeysSet, Boolean.TRUE, function, isByName);
fail("Exception Expected");
} catch (Exception ex) {
ex.printStackTrace();
}
}
public static void serverMultiKeyExecutionSocketTimeOut(Boolean isByName) {
Region region = cache.getRegion(PartitionedRegionName);
assertNotNull(region);
final HashSet testKeysSet = new HashSet();
for (int i = (totalNumBuckets.intValue() * 2); i > 0; i--) {
testKeysSet.add("execKey-" + i);
}
DistributedSystem.setThreadsSocketPolicy(false);
Function function = new TestFunction(true, TestFunction.TEST_FUNCTION_SOCKET_TIMEOUT);
FunctionService.registerFunction(function);
Execution dataSet = FunctionService.onRegion(region);
try {
int j = 0;
HashSet origVals = new HashSet();
for (Iterator i = testKeysSet.iterator(); i.hasNext();) {
Integer val = new Integer(j++);
origVals.add(val);
region.put(i.next(), val);
}
List l = null;
ResultCollector rc1 = execute(dataSet, testKeysSet, Boolean.TRUE, function, isByName);
l = ((List) rc1.getResult());
LogWriterUtils.getLogWriter().info("Result size : " + l.size());
assertEquals(3, l.size());
for (Iterator i = l.iterator(); i.hasNext();) {
assertEquals(Boolean.TRUE, i.next());
}
} catch (Exception e) {
Assert.fail("Test failed after the put operation", e);
}
}
public static void serverSingleKeyExecutionSocketTimeOut(Boolean isByName) throws Exception {
Region region = cache.getRegion(PartitionedRegionName);
assertNotNull(region);
final String testKey = "execKey";
final Set testKeysSet = new HashSet();
testKeysSet.add(testKey);
DistributedSystem.setThreadsSocketPolicy(false);
Function function = new TestFunction(true, TestFunction.TEST_FUNCTION_SOCKET_TIMEOUT);
FunctionService.registerFunction(function);
Execution dataSet = FunctionService.onRegion(region);
region.put(testKey, new Integer(1));
ResultCollector rs = execute(dataSet, testKeysSet, Boolean.TRUE, function, isByName);
assertEquals(Boolean.TRUE, ((List) rs.getResult()).get(0));
ResultCollector rs2 = execute(dataSet, testKeysSet, testKey, function, isByName);
assertEquals(testKey, ((List) rs2.getResult()).get(0));
}
public static void serverMultiKeyExecution_Inline() {
Region region = cache.getRegion(PartitionedRegionName);
assertNotNull(region);
final HashSet testKeysSet = new HashSet();
for (int i = (totalNumBuckets.intValue() * 2); i > 0; i--) {
testKeysSet.add("execKey-" + i);
}
DistributedSystem.setThreadsSocketPolicy(false);
Execution dataSet = FunctionService.onRegion(region);
try {
int j = 0;
HashSet origVals = new HashSet();
for (Iterator i = testKeysSet.iterator(); i.hasNext();) {
Integer val = new Integer(j++);
origVals.add(val);
region.put(i.next(), val);
}
List l = null;
ResultCollector rc1 =
dataSet.withFilter(testKeysSet).setArguments(Boolean.TRUE).execute(new FunctionAdapter() {
@Override
public void execute(FunctionContext context) {
if (context.getArguments() instanceof String) {
context.getResultSender().lastResult("Success");
} else if (context.getArguments() instanceof Boolean) {
context.getResultSender().lastResult(Boolean.TRUE);
}
}
@Override
public String getId() {
return getClass().getName();
}
@Override
public boolean hasResult() {
return true;
}
});
l = ((List) rc1.getResult());
LogWriterUtils.getLogWriter().info("Result size : " + l.size());
assertEquals(3, l.size());
for (Iterator i = l.iterator(); i.hasNext();) {
assertEquals(Boolean.TRUE, i.next());
}
} catch (Exception e) {
LogWriterUtils.getLogWriter().info("Exception : " + e.getMessage());
e.printStackTrace();
fail("Test failed after the put operation");
}
}
public static void serverMultiKeyExecution_FunctionInvocationTargetException() {
Region region = cache.getRegion(PartitionedRegionName);
assertNotNull(region);
final HashSet testKeysSet = new HashSet();
for (int i = (totalNumBuckets.intValue() * 2); i > 0; i--) {
testKeysSet.add("execKey-" + i);
}
DistributedSystem.setThreadsSocketPolicy(false);
Execution dataSet = FunctionService.onRegion(region);
int j = 0;
HashSet origVals = new HashSet();
for (Iterator i = testKeysSet.iterator(); i.hasNext();) {
Integer val = new Integer(j++);
origVals.add(val);
region.put(i.next(), val);
}
ResultCollector rc1 = null;
try {
rc1 =
dataSet.withFilter(testKeysSet).setArguments(Boolean.TRUE).execute(new FunctionAdapter() {
@Override
public void execute(FunctionContext context) {
if (((RegionFunctionContext) context).isPossibleDuplicate()) {
context.getResultSender().lastResult(new Integer(retryCount));
return;
}
if (context.getArguments() instanceof Boolean) {
throw new FunctionInvocationTargetException("I have been thrown from TestFunction");
}
}
@Override
public String getId() {
return getClass().getName();
}
@Override
public boolean hasResult() {
return true;
}
});
List list = (ArrayList) rc1.getResult();
assertEquals(list.get(0), 0);
} catch (Throwable e) {
e.printStackTrace();
Assert.fail("This is not expected Exception", e);
}
}
public static void serverMultiKeyExecutionNoResult(Boolean isByName) throws Exception {
Region region = cache.getRegion(PartitionedRegionName);
assertNotNull(region);
final HashSet testKeysSet = new HashSet();
for (int i = (totalNumBuckets.intValue() * 2); i > 0; i--) {
testKeysSet.add("execKey-" + i);
}
DistributedSystem.setThreadsSocketPolicy(false);
Function function = new TestFunction(false, TEST_FUNCTION7);
FunctionService.registerFunction(function);
Execution dataSet = FunctionService.onRegion(region);
try {
String msg = "<ExpectedException action=add>" + "FunctionException" + "</ExpectedException>";
cache.getLogger().info(msg);
int j = 0;
HashSet origVals = new HashSet();
for (Iterator i = testKeysSet.iterator(); i.hasNext();) {
Integer val = new Integer(j++);
origVals.add(val);
region.put(i.next(), val);
}
ResultCollector rc1 = execute(dataSet, testKeysSet, Boolean.TRUE, function, isByName);
rc1.getResult();
Thread.sleep(20000);
fail("Test failed after the put operation");
} catch (FunctionException expected) {
assertTrue(expected.getMessage()
.startsWith((String.format("Cannot %s result as the Function#hasResult() is false",
"return any"))));
} finally {
cache.getLogger()
.info("<ExpectedException action=remove>" + "FunctionException" + "</ExpectedException>");
}
}
public static void serverSingleKeyExecutionOnRegion_SingleConnection() {
Region region = cache.getRegion(PartitionedRegionName);
assertNotNull(region);
for (int i = 0; i < 13; i++) {
region.put(new Integer(i), "KB_" + i);
}
Function function = new TestFunction(false, TEST_FUNCTION2);
Execution dataSet = FunctionService.onRegion(region);
dataSet.setArguments(Boolean.TRUE).execute(function);
region.put(new Integer(2), "KB_2");
assertEquals("KB_2", region.get(new Integer(2)));
}
public static void serverSingleKeyExecutionOnServer_SingleConnection() {
Region region = cache.getRegion(PartitionedRegionName);
assertNotNull(region);
Function function = new TestFunction(false, TEST_FUNCTION2);
Execution dataSet = FunctionService.onServer(pool);
dataSet.setArguments(Boolean.TRUE).execute(function);
region.put(new Integer(1), "KB_1");
assertEquals("KB_1", region.get(new Integer(1)));
}
public static void serverSingleKeyExecution(Boolean isByName, Boolean toRegister)
throws Exception {
Region region = cache.getRegion(PartitionedRegionName);
assertNotNull(region);
final String testKey = "execKey";
final Set testKeysSet = new HashSet();
testKeysSet.add(testKey);
DistributedSystem.setThreadsSocketPolicy(false);
Function function = new TestFunction(true, TEST_FUNCTION2);
if (toRegister.booleanValue()) {
FunctionService.registerFunction(function);
} else {
FunctionService.unregisterFunction(function.getId());
assertNull(FunctionService.getFunction(function.getId()));
}
Execution dataSet = FunctionService.onRegion(region);
try {
execute(dataSet, testKeysSet, Boolean.TRUE, function, isByName);
} catch (Exception ex) {
if (!(ex.getMessage().contains("No target node found for KEY = " + testKey)
|| ex.getMessage().startsWith("Server could not send the reply")
|| ex.getMessage().startsWith("Unexpected exception during"))) {
throw ex;
}
}
region.put(testKey, new Integer(1));
ResultCollector rs = execute(dataSet, testKeysSet, Boolean.TRUE, function, isByName);
assertEquals(Boolean.TRUE, ((List) rs.getResult()).get(0));
ResultCollector rs2 = execute(dataSet, testKeysSet, testKey, function, isByName);
assertEquals(new Integer(1), ((List) rs2.getResult()).get(0));
HashMap putData = new HashMap();
putData.put(testKey + "1", new Integer(2));
putData.put(testKey + "2", new Integer(3));
ResultCollector rs1 = execute(dataSet, testKeysSet, putData, function, isByName);
assertEquals(Boolean.TRUE, ((List) rs1.getResult()).get(0));
assertEquals(new Integer(2), region.get(testKey + "1"));
assertEquals(new Integer(3), region.get(testKey + "2"));
}
public static void executeRegisteredFunction() {
Region region = cache.getRegion(PartitionedRegionName);
assertNotNull(region);
final String testKey = "execKey";
final Set testKeysSet = new HashSet();
testKeysSet.add(testKey);
Execution dataSet = FunctionService.onRegion(region);
region.put(testKey, new Integer(1));
((AbstractExecution) dataSet).removeFunctionAttributes(TestFunction.TEST_FUNCTION2);
ResultCollector rs = dataSet.withFilter(testKeysSet).setArguments(Boolean.TRUE)
.execute(TestFunction.TEST_FUNCTION2);
assertEquals(Boolean.TRUE, ((List) rs.getResult()).get(0));
byte[] functionAttributes =
((AbstractExecution) dataSet).getFunctionAttributes(TestFunction.TEST_FUNCTION2);
assertNotNull(functionAttributes);
rs = dataSet.withFilter(testKeysSet).setArguments(Boolean.TRUE)
.execute(TestFunction.TEST_FUNCTION2);
assertEquals(Boolean.TRUE, ((List) rs.getResult()).get(0));
assertNotNull(functionAttributes);
}
public static void serverSingleKeyExecution_SendException(Boolean isByName, Boolean toRegister)
throws Exception {
Region region = cache.getRegion(PartitionedRegionName);
assertNotNull(region);
final String testKey = "execKey";
final Set testKeysSet = new HashSet();
testKeysSet.add(testKey);
DistributedSystem.setThreadsSocketPolicy(false);
Function function = new TestFunction(true, TestFunction.TEST_FUNCTION_SEND_EXCEPTION);
if (toRegister.booleanValue()) {
FunctionService.registerFunction(function);
} else {
FunctionService.unregisterFunction(function.getId());
assertNull(FunctionService.getFunction(function.getId()));
}
Execution dataSet = FunctionService.onRegion(region);
region.put(testKey, new Integer(1));
ResultCollector rs = null;
rs = execute(dataSet, testKeysSet, Boolean.TRUE, function, isByName);
assertTrue(((List) rs.getResult()).get(0) instanceof MyFunctionExecutionException);
rs = execute(dataSet, testKeysSet, (Serializable) testKeysSet, function, isByName);
List resultList = (List) rs.getResult();
assertEquals((testKeysSet.size() + 1), resultList.size());
Iterator resultIterator = resultList.iterator();
int exceptionCount = 0;
while (resultIterator.hasNext()) {
Object o = resultIterator.next();
if (o instanceof MyFunctionExecutionException) {
exceptionCount++;
}
}
assertEquals(1, exceptionCount);
}
public static void serverSingleKeyExecution_ThrowException(Boolean isByName, Boolean toRegister) {
Region region = cache.getRegion(PartitionedRegionName);
assertNotNull(region);
final String testKey = "execKey";
final Set testKeysSet = new HashSet();
testKeysSet.add(testKey);
DistributedSystem.setThreadsSocketPolicy(false);
Function function = new TestFunction(true, TestFunction.TEST_FUNCTION_THROW_EXCEPTION);
if (toRegister.booleanValue()) {
FunctionService.registerFunction(function);
} else {
FunctionService.unregisterFunction(function.getId());
assertNull(FunctionService.getFunction(function.getId()));
}
Execution dataSet = FunctionService.onRegion(region);
region.put(testKey, new Integer(1));
ResultCollector rs = null;
try {
rs = execute(dataSet, testKeysSet, Boolean.TRUE, function, isByName);
fail("Exception Expected");
} catch (Exception ex) {
}
}
public static void serverSingleKeyExecutionWith2Regions(Boolean isByName, Boolean toRegister) {
Region region1 = cache.getRegion(PartitionedRegionName + "1");
assertNotNull(region1);
final String testKey = "execKey";
final Set testKeysSet = new HashSet();
testKeysSet.add(testKey);
DistributedSystem.setThreadsSocketPolicy(false);
Function function = new TestFunction(true, TEST_FUNCTION2);
if (toRegister.booleanValue()) {
FunctionService.registerFunction(function);
} else {
FunctionService.unregisterFunction(function.getId());
assertNull(FunctionService.getFunction(function.getId()));
}
Execution dataSet1 = FunctionService.onRegion(region1);
region1.put(testKey, new Integer(1));
ResultCollector rs = dataSet1.execute(function.getId());
assertEquals(Boolean.FALSE, ((List) rs.getResult()).get(0));
Region region2 = cache.getRegion(PartitionedRegionName + "2");
assertNotNull(region2);
Execution dataSet2 = FunctionService.onRegion(region2);
region2.put(testKey, new Integer(1));
try {
rs = dataSet2.execute(function.getId());
assertEquals(Boolean.TRUE, ((List) rs.getResult()).get(0));
fail("Expected FunctionException");
} catch (Exception ex) {
if (!ex.getMessage().startsWith("No Replicated Region found for executing function")) {
throw ex;
}
}
}
public static void serverSingleKeyExecution_NoLastResult(Boolean isByName, Boolean toRegister)
throws Exception {
Region region = cache.getRegion(PartitionedRegionName);
assertNotNull(region);
final String testKey = "execKey";
final Set testKeysSet = new HashSet();
testKeysSet.add(testKey);
DistributedSystem.setThreadsSocketPolicy(false);
Function function = new TestFunction(true, TestFunction.TEST_FUNCTION_NO_LASTRESULT);
if (toRegister.booleanValue()) {
FunctionService.registerFunction(function);
} else {
FunctionService.unregisterFunction(function.getId());
assertNull(FunctionService.getFunction(function.getId()));
}
Execution dataSet = FunctionService.onRegion(region);
region.put(testKey, new Integer(1));
try {
ResultCollector rs = execute(dataSet, testKeysSet, Boolean.TRUE, function, isByName);
assertEquals(Boolean.TRUE, ((List) rs.getResult()).get(0));
fail("Expected FunctionException : Function did not send last result");
} catch (Exception ex) {
if (!ex.getMessage().contains("did not send last result")) {
throw ex;
}
}
}
public static void serverSingleKeyExecution_FunctionInvocationTargetException() throws Exception {
Region region = cache.getRegion(PartitionedRegionName);
assertNotNull(region);
final String testKey = "execKey";
final Set testKeysSet = new HashSet();
testKeysSet.add(testKey);
DistributedSystem.setThreadsSocketPolicy(false);
Function function = new TestFunction(true, TestFunction.TEST_FUNCTION_REEXECUTE_EXCEPTION);
FunctionService.registerFunction(function);
Execution dataSet = FunctionService.onRegion(region);
region.put(testKey, new Integer(1));
ResultCollector rs = execute(dataSet, testKeysSet, Boolean.TRUE, function, false);
ArrayList list = (ArrayList) rs.getResult();
assertTrue(((Integer) list.get(0)) >= 5);
}
public static void serverSingleKeyExecution_Inline() throws Exception {
Region region = cache.getRegion(PartitionedRegionName);
assertNotNull(region);
final String testKey = "execKey";
final Set testKeysSet = new HashSet();
testKeysSet.add(testKey);
DistributedSystem.setThreadsSocketPolicy(false);
Execution dataSet = FunctionService.onRegion(region);
try {
cache.getLogger()
.info("<ExpectedException action=add>" + "No target node found for KEY = "
+ "|Server could not send the reply" + "|Unexpected exception during"
+ "</ExpectedException>");
dataSet.withFilter(testKeysSet).setArguments(Boolean.TRUE).execute(new FunctionAdapter() {
@Override
public void execute(FunctionContext context) {
if (context.getArguments() instanceof String) {
context.getResultSender().lastResult("Success");
}
context.getResultSender().lastResult("Failure");
}
@Override
public String getId() {
return getClass().getName();
}
@Override
public boolean hasResult() {
return true;
}
});
} catch (Exception ex) {
if (!(ex.getMessage().contains("No target node found for KEY = " + testKey)
|| ex.getMessage().startsWith("Server could not send the reply")
|| ex.getMessage().startsWith("Unexpected exception during"))) {
throw ex;
}
} finally {
cache.getLogger()
.info("<ExpectedException action=remove>" + "No target node found for KEY = "
+ "|Server could not send the reply" + "|Unexpected exception during"
+ "</ExpectedException>");
}
region.put(testKey, new Integer(1));
ResultCollector rs =
dataSet.withFilter(testKeysSet).setArguments(Boolean.TRUE).execute(new FunctionAdapter() {
@Override
public void execute(FunctionContext context) {
if (context.getArguments() instanceof String) {
context.getResultSender().lastResult("Success");
} else {
context.getResultSender().lastResult("Failure");
}
}
@Override
public String getId() {
return getClass().getName();
}
@Override
public boolean hasResult() {
return true;
}
});
assertEquals("Failure", ((List) rs.getResult()).get(0));
ResultCollector rs2 =
dataSet.withFilter(testKeysSet).setArguments(testKey).execute(new FunctionAdapter() {
@Override
public void execute(FunctionContext context) {
if (context.getArguments() instanceof String) {
context.getResultSender().lastResult("Success");
} else {
context.getResultSender().lastResult("Failure");
}
}
@Override
public String getId() {
return getClass().getName();
}
@Override
public boolean hasResult() {
return true;
}
});
assertEquals("Success", ((List) rs2.getResult()).get(0));
}
/**
* This class can be serialized but its deserialization will always fail
*
*/
private static class UnDeserializable implements DataSerializable {
public void toData(DataOutput out) throws IOException {}
public void fromData(DataInput in) throws IOException, ClassNotFoundException {
throw new RuntimeException("deserialization is not allowed on this class");
}
}
public static void serverBug43430() {
Region region = cache.getRegion(PartitionedRegionName);
assertNotNull(region);
final String testKey = "execKey";
final Set testKeysSet = new HashSet();
testKeysSet.add(testKey);
DistributedSystem.setThreadsSocketPolicy(false);
Execution dataSet = FunctionService.onRegion(region);
region.put(testKey, new Integer(1));
try {
cache.getLogger()
.info("<ExpectedException action=add>"
+ "Could not create an instance of org.apache.geode.internal.cache.execute.PRClientServerRegionFunctionExecutionDUnitTest$UnDeserializable"
+ "</ExpectedException>");
dataSet.withFilter(testKeysSet).setArguments(new UnDeserializable())
.execute(new FunctionAdapter() {
@Override
public void execute(FunctionContext context) {
if (context.getArguments() instanceof String) {
context.getResultSender().lastResult("Success");
}
context.getResultSender().lastResult("Failure");
}
@Override
public String getId() {
return getClass().getName();
}
@Override
public boolean hasResult() {
return true;
}
});
} catch (Exception expected) {
if (!expected.getCause().getMessage().contains(
"Could not create an instance of org.apache.geode.internal.cache.execute.PRClientServerRegionFunctionExecutionDUnitTest$UnDeserializable")) {
throw expected;
} ;
} finally {
cache.getLogger()
.info("<ExpectedException action=remove>"
+ "Could not create an instance of org.apache.geode.internal.cache.execute.PRClientServerRegionFunctionExecutionDUnitTest$UnDeserializable"
+ "</ExpectedException>");
}
}
private static ResultCollector execute(Execution dataSet, Set testKeysSet, Serializable args,
Function function, Boolean isByName) throws Exception {
if (isByName.booleanValue()) {// by name
return dataSet.withFilter(testKeysSet).setArguments(args).execute(function.getId());
} else { // By Instance
return dataSet.withFilter(testKeysSet).setArguments(args).execute(function);
}
}
/**
* Attempt to do a client server function execution with an arg that fail deserialization on the
* server. The client should see an exception instead of a hang if bug 43430 is fixed.
*/
@Test
public void testBug43430() {
createScenario();
Function function = new TestFunction(true, TEST_FUNCTION2);
registerFunctionAtServer(function);
SerializableRunnable suspect = new SerializableRunnable() {
public void run() {
cache.getLogger()
.info("<ExpectedException action=add>" + "No target node found for KEY = "
+ "|Server could not send the reply" + "|Unexpected exception during"
+ "</ExpectedException>");
}
};
runOnAllServers(suspect);
client.invoke(() -> PRClientServerRegionFunctionExecutionDUnitTest.serverBug43430());
SerializableRunnable endSuspect = new SerializableRunnable() {
public void run() {
cache.getLogger()
.info("<ExpectedException action=remove>" + "No target node found for KEY = "
+ "|Server could not send the reply" + "|Unexpected exception during"
+ "</ExpectedException>");
}
};
runOnAllServers(endSuspect);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.axis2.description;
import org.apache.axiom.om.OMAbstractFactory;
import org.apache.axiom.om.OMAttribute;
import org.apache.axiom.om.OMElement;
import org.apache.axiom.om.OMFactory;
import org.apache.axiom.om.OMNamespace;
import org.apache.axiom.om.OMNode;
import org.apache.axiom.soap.SOAP11Constants;
import org.apache.axiom.soap.SOAP12Constants;
import org.apache.axis2.AxisFault;
import org.apache.axis2.Constants;
import org.apache.axis2.addressing.AddressingConstants;
import org.apache.axis2.addressing.AddressingHelper;
import org.apache.axis2.description.java2wsdl.Java2WSDLConstants;
import org.apache.axis2.engine.AxisConfiguration;
import org.apache.axis2.util.ExternalPolicySerializer;
import org.apache.axis2.util.JavaUtils;
import org.apache.axis2.util.PolicyLocator;
import org.apache.axis2.util.PolicyUtil;
import org.apache.axis2.util.WSDLSerializationUtil;
import org.apache.axis2.util.XMLUtils;
import org.apache.axis2.wsdl.WSDLConstants;
import org.apache.neethi.Policy;
import org.apache.neethi.PolicyComponent;
import org.apache.neethi.PolicyReference;
import org.apache.ws.commons.schema.XmlSchema;
import org.apache.ws.commons.schema.XmlSchemaComplexType;
import org.apache.ws.commons.schema.XmlSchemaElement;
import org.apache.ws.commons.schema.XmlSchemaParticle;
import org.apache.ws.commons.schema.XmlSchemaSequence;
import org.apache.ws.commons.schema.XmlSchemaSequenceMember;
import org.apache.ws.commons.schema.XmlSchemaSimpleType;
import org.apache.ws.commons.schema.XmlSchemaType;
import javax.xml.namespace.QName;
import javax.xml.stream.FactoryConfigurationError;
import javax.xml.stream.XMLStreamException;
import java.io.ByteArrayInputStream;
import java.io.StringWriter;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
public class AxisService2WSDL20 implements WSDL2Constants {
protected AxisService axisService;
protected String serviceName;
private String[] eprs = null;
private OMNamespace wsaw;
private OMNamespace wsdl;
private OMNamespace wsoap;
private OMNamespace whttp;
private OMNamespace wsdlx;
private OMNamespace wrpc;
private OMNamespace tns;
private String interfaceName;
private HashMap policiesInDescription = new HashMap();
private ExternalPolicySerializer filter = null;
private boolean checkIfEndPointActive = true;
public AxisService2WSDL20() { }
protected void init() throws AxisFault {
}
public AxisService2WSDL20(AxisService service) {
this.axisService = service;
this.serviceName = service.getName();
}
public AxisService2WSDL20(AxisService service, String serviceName) {
this.axisService = service;
this.serviceName = serviceName;
}
/**
* Sets whether to make a check if endpoint is active before adding the endpoint
* to the WSDL. By default an endpoint is not added if a transport for the endpoint
* is not found.
*
* @param flag true=check if endpoint is active before adding endpoint.
* false=add endpoint independent of whether endpoint is active.
*/
public void setCheckIfEndPointActive(boolean flag) {
checkIfEndPointActive = flag;
}
/**
* Generates a WSDL 2.0 document for this web service
* @return The WSDL2 document element
* @throws Exception - Thrown in case an exception occurs
*/
public OMElement generateOM() throws Exception {
OMFactory omFactory = OMAbstractFactory.getOMFactory();
OMElement descriptionElement = generateDescription(omFactory);
// Add the documentation element
OMElement documentation = generateDocumentation(omFactory);
if (documentation != null) {
descriptionElement.addChild(documentation);
}
OMElement types = generateTypes(omFactory);
if (types != null) {
descriptionElement.addChild(types);
}
OMElement interfaces = generateInterface(omFactory);
if (interfaces != null) {
descriptionElement.addChild(interfaces);
}
generateService(omFactory, descriptionElement, isDisableREST(), isDisableSOAP12(),
isDisableSOAP11());
addPoliciesToDescriptionElement(getPoliciesInDefinitions(),
descriptionElement);
return descriptionElement;
}
/**
* Generates the interface element for the service
*
* @param wsdl The WSDL namespace
* @param tns The target namespace
* @param wsdlx The WSDL extensions namespace
* @param fac The active OMFactory
* @param interfaceName The name of the interface
* @return The generated interface element
* @throws FactoryConfigurationError
* @throws XMLStreamException
*/
private OMElement getInterfaceElement(OMNamespace wsdl, OMNamespace tns, OMNamespace wsdlx,
OMNamespace wrpc, OMFactory fac, String interfaceName)
throws URISyntaxException, AxisFault, XMLStreamException, FactoryConfigurationError {
OMElement interfaceElement = fac.createOMElement(WSDL2Constants.INTERFACE_LOCAL_NAME, wsdl);
interfaceElement.addAttribute(fac.createOMAttribute(WSDL2Constants.ATTRIBUTE_NAME, null,
interfaceName));
addPolicyAsExtensibleElement(axisService, interfaceElement);
Iterator iterator = axisService.getOperations();
ArrayList interfaceOperations = new ArrayList();
ArrayList interfaceFaults = new ArrayList();
int i = 0;
while (iterator.hasNext()) {
AxisOperation axisOperation = (AxisOperation) iterator.next();
if (axisOperation.isControlOperation()) {
continue;
}
interfaceOperations.add(i, generateInterfaceOperationElement(axisOperation, wsdl, tns,
wsdlx, wrpc));
i++;
Iterator faultsIterator = axisOperation.getFaultMessages().iterator();
while (faultsIterator.hasNext()) {
AxisMessage faultMessage = (AxisMessage) faultsIterator.next();
String name = faultMessage.getName();
if (!interfaceFaults.contains(name)) {
OMElement faultElement =
fac.createOMElement(WSDL2Constants.FAULT_LOCAL_NAME, wsdl);
faultElement.addAttribute(
fac.createOMAttribute(WSDL2Constants.ATTRIBUTE_NAME, null, name));
faultElement.addAttribute(fac.createOMAttribute(
WSDL2Constants.ATTRIBUTE_ELEMENT, null, WSDLSerializationUtil
.getElementName(faultMessage, axisService.getNamespaceMap())));
interfaceFaults.add(name);
interfaceElement.addChild(faultElement);
}
}
}
for (i = 0; i < interfaceOperations.size(); i++) {
interfaceElement.addChild((OMNode) interfaceOperations.get(i));
}
return interfaceElement;
}
/**
* Generates the service element for the service
*
* @param wsdl the WSDL namespace
* @param tns the target namespace
* @param omFactory the active OMFactory
* @param interfaceName the name of the interface
* @return the generated service element
*/
private OMElement getServiceElement(OMNamespace wsdl, OMNamespace tns, OMFactory omFactory,
String interfaceName) {
OMElement serviceElement =
omFactory.createOMElement(WSDL2Constants.SERVICE_LOCAL_NAME, wsdl);
serviceElement.addAttribute(
omFactory.createOMAttribute(WSDL2Constants.ATTRIBUTE_NAME, null,
serviceName));
serviceElement.addAttribute(omFactory.createOMAttribute(WSDL2Constants.INTERFACE_LOCAL_NAME,
null, tns.getPrefix() + ":" +
interfaceName));
return serviceElement;
}
/**
* Generates the interface Operation element. As with the binding operations we dont need to
* ask AxisMessage to serialize its message cause AxisMessage does not have specific properties
* as bindings.
*
* @param axisOperation the operation to write
* @param wsdl the WSDL namespace
* @param tns the target namespace
* @param wsdlx the WSDL extentions namespace (WSDL 2.0)
* @return the generated <operation> element
* @throws FactoryConfigurationError
* @throws XMLStreamException
*/
public OMElement generateInterfaceOperationElement(AxisOperation axisOperation,
OMNamespace wsdl,
OMNamespace tns,
OMNamespace wsdlx,
OMNamespace wrpc) throws
URISyntaxException, AxisFault, XMLStreamException, FactoryConfigurationError {
OMFactory omFactory = OMAbstractFactory.getOMFactory();
OMElement axisOperationElement =
omFactory.createOMElement(WSDL2Constants.OPERATION_LOCAL_NAME, wsdl);
WSDLSerializationUtil
.addWSDLDocumentationElement(axisOperation, axisOperationElement, omFactory, wsdl);
axisOperationElement.addAttribute(omFactory.createOMAttribute(WSDL2Constants.ATTRIBUTE_NAME,
null,
axisOperation
.getName().getLocalPart()));
addPolicyAsExtensibleElement(axisOperation, axisOperationElement);
URI[] opStyle = (URI[]) axisOperation.getParameterValue(WSDL2Constants.OPERATION_STYLE);
if (opStyle == null) {
opStyle = checkStyle(axisOperation);
Parameter opStyleParameter = new Parameter();
opStyleParameter.setName(WSDL2Constants.OPERATION_STYLE);
opStyleParameter.setValue(opStyle);
axisOperation.addParameter(opStyleParameter);
}
if (opStyle != null && opStyle.length > 0) {
String style = opStyle[0].toString();
for (int i = 1; i < opStyle.length; i++) {
URI uri = opStyle[i];
style = style + " " + uri;
}
axisOperationElement.addAttribute(
omFactory.createOMAttribute(WSDL2Constants.ATTRIBUTE_STYLE, null, style));
if (style.indexOf(WSDL2Constants.STYLE_RPC) >= 0) {
axisOperationElement.addAttribute(
omFactory.createOMAttribute(WSDL2Constants.ATTRIBUTE_SIGNATURE, wrpc,
(String) axisOperation.getParameterValue(
WSDL2Constants.ATTR_WRPC_SIGNATURE)));
}
}
axisOperationElement.addAttribute(omFactory.createOMAttribute(
WSDL2Constants.ATTRIBUTE_NAME_PATTERN, null, axisOperation.getMessageExchangePattern()));
Parameter param = axisOperation.getParameter(WSDL2Constants.ATTR_WSDLX_SAFE);
if (param != null) {
axisOperationElement.addAttribute(omFactory.createOMAttribute(
WSDL2Constants.ATTRIBUTE_SAFE, wsdlx, (param.getValue()).toString()));
}
AxisService axisService = axisOperation.getAxisService();
Map nameSpaceMap = axisService.getNamespaceMap();
// Add the input element
AxisMessage inMessage = (AxisMessage) axisOperation.getChild(WSDLConstants.WSDL_MESSAGE_IN_MESSAGE);
if (inMessage != null) {
OMElement inMessageElement = omFactory.createOMElement(WSDL2Constants.IN_PUT_LOCAL_NAME, wsdl);
inMessageElement.addAttribute(omFactory.createOMAttribute(
WSDL2Constants.ATTRIBUTE_ELEMENT, null,
WSDLSerializationUtil.getElementName(inMessage, nameSpaceMap)));
WSDLSerializationUtil.addWSAWActionAttribute(inMessageElement, axisOperation.getInputAction(),wsaw);
WSDLSerializationUtil.addWSDLDocumentationElement(inMessage, inMessageElement, omFactory, wsdl);
axisOperationElement.addChild(inMessageElement);
}
// Add the output element
// here we need to consider the mep. since at the AxisOperationFactory class even for the roubust in only
// messages it creates an InOutAxis Operation
// case WSDLConstants.MEP_CONSTANT_ROBUST_IN_ONLY : {
// abOpdesc = new InOutAxisOperation();
// abOpdesc.setMessageExchangePattern(WSDL2Constants.MEP_URI_ROBUST_IN_ONLY);
// break;
// }
// get the same logic from the AxisServiceToWSDL11 class.
String mep = axisOperation.getMessageExchangePattern();
if (WSDL2Constants.MEP_URI_OUT_ONLY.equals(mep)
|| WSDL2Constants.MEP_URI_OUT_OPTIONAL_IN.equals(mep)
|| WSDL2Constants.MEP_URI_IN_OPTIONAL_OUT.equals(mep)
|| WSDL2Constants.MEP_URI_ROBUST_OUT_ONLY.equals(mep)
|| WSDL2Constants.MEP_URI_IN_OUT.equals(mep)) {
AxisMessage outMessage = (AxisMessage) axisOperation.getChild(WSDLConstants.WSDL_MESSAGE_OUT_MESSAGE);
if (outMessage != null) {
OMElement outMessageElement = omFactory.createOMElement(WSDL2Constants.OUT_PUT_LOCAL_NAME, wsdl);
outMessageElement.addAttribute(omFactory.createOMAttribute(
WSDL2Constants.ATTRIBUTE_ELEMENT, null,
WSDLSerializationUtil.getElementName(outMessage, nameSpaceMap)));
WSDLSerializationUtil.addWSAWActionAttribute(outMessageElement, axisOperation.getOutputAction(), wsaw);
WSDLSerializationUtil.addWSDLDocumentationElement(outMessage, outMessageElement, omFactory, wsdl);
axisOperationElement.addChild(outMessageElement);
}
}
// Add the fault element
ArrayList faults = axisOperation.getFaultMessages();
if (faults != null) {
Iterator iterator = faults.iterator();
while (iterator.hasNext()) {
AxisMessage faultMessage = (AxisMessage) iterator.next();
OMElement faultElement;
if (WSDLConstants.WSDL_MESSAGE_DIRECTION_IN.equals(faultMessage.getDirection())) {
faultElement = omFactory.createOMElement(WSDL2Constants.IN_FAULT_LOCAL_NAME, wsdl);
} else {
faultElement = omFactory.createOMElement(WSDL2Constants.OUT_FAULT_LOCAL_NAME, wsdl);
}
faultElement.addAttribute(omFactory.createOMAttribute(WSDL2Constants.ATTRIBUTE_REF,
null, tns.getPrefix() + ":" +
faultMessage.getName()));
WSDLSerializationUtil.addWSAWActionAttribute(faultElement,
axisOperation.getFaultAction(
faultMessage.getName()), wsaw);
WSDLSerializationUtil
.addWSDLDocumentationElement(faultMessage, faultElement, omFactory, wsdl);
axisOperationElement.addChild(faultElement);
}
}
return axisOperationElement;
}
public void setEPRs(String[] eprs) {
this.eprs = eprs;
}
/**
* This function checks the schema and returns the WSDL 2.0 styles that it conform to.
* It checks for RPC, IRI and Multipart styles.
* For full details on the rules please refer http://www.w3.org/TR/2007/REC-wsdl20-adjuncts-20070626/#styles
* @param axisOperation - The axisOperation that needs to be checked
* @return String [] - An array of styles that the operation adheres to.
*/
private URI [] checkStyle(AxisOperation axisOperation) throws URISyntaxException, AxisFault {
boolean isRPC = true;
boolean isMultipart = true;
boolean isIRI = true;
ArrayList styles = new ArrayList(3);
String mep = axisOperation.getMessageExchangePattern();
if (!(WSDL2Constants.MEP_URI_IN_ONLY.equals(mep) ||
WSDL2Constants.MEP_URI_IN_OUT.equals(mep))) {
isRPC = false;
}
QName inMessageElementQname;
Map inMessageElementDetails = new LinkedHashMap();
AxisMessage inMessage = axisOperation.getMessage(WSDL2Constants.MESSAGE_LABEL_IN);
if (inMessage != null) {
QName qName = inMessage.getElementQName();
if (qName == null || Constants.XSD_ANY.equals(qName)) {
return new URI [0];
}
XmlSchemaElement schemaElement = inMessage.getSchemaElement();
if (schemaElement != null) {
if (!axisOperation.getName().getLocalPart().equals(schemaElement.getName())) {
return new URI [0];
}
inMessageElementQname = schemaElement.getQName();
XmlSchemaType type = schemaElement.getSchemaType();
if (type != null && type instanceof XmlSchemaComplexType) {
XmlSchemaComplexType complexType = (XmlSchemaComplexType) type;
XmlSchemaParticle particle = complexType.getParticle();
if (particle != null && particle instanceof XmlSchemaSequence) {
XmlSchemaSequence xmlSchemaSequence = (XmlSchemaSequence) particle;
if (!xmlSchemaSequence.getItems().isEmpty()) {
for (XmlSchemaSequenceMember member : xmlSchemaSequence.getItems()) {
if (!(member instanceof XmlSchemaElement)) {
return new URI[0];
}
XmlSchemaElement innerElement = (XmlSchemaElement) member;
if (innerElement.getRef().getTargetQName() != null) {
return new URI[0];
}
if (innerElement.getMinOccurs() != 1 ||
innerElement.getMaxOccurs() != 1) {
isMultipart = false;
}
XmlSchemaType schemaType = innerElement.getSchemaType();
QName innerElementQName = innerElement.getSchemaTypeName();
if (schemaType instanceof XmlSchemaSimpleType) {
if (Constants.XSD_QNAME.equals(innerElementQName) ||
Constants.XSD_NOTATION.equals(innerElementQName) ||
Constants.XSD_HEXBIN.equals(innerElementQName) ||
Constants.XSD_BASE64.equals(innerElementQName)) {
isIRI = false;
}
} else {
isIRI = false;
}
if (Constants.XSD_ANY.equals(innerElementQName)) {
isRPC = false;
}
String name = innerElement.getName();
if (inMessageElementDetails.get(name) != null) {
isRPC = false;
isMultipart = false;
}
inMessageElementDetails.put(name, innerElementQName);
}
}
} else {
return new URI[0];
}
} else {
return new URI[0];
}
} else {
return new URI [0];
}
} else {
return new URI [0];
}
AxisMessage outMessage = null;
Map outMessageElementDetails = new LinkedHashMap();
if (isRPC && !WSDL2Constants.MEP_URI_IN_ONLY.equals(mep)) {
outMessage = axisOperation.getMessage(WSDL2Constants.MESSAGE_LABEL_OUT);
QName qName = outMessage.getElementQName();
if (qName == null && Constants.XSD_ANY.equals(qName)) {
isRPC = false;
}
XmlSchemaElement schemaElement = outMessage.getSchemaElement();
if (schemaElement != null) {
if (!(axisOperation.getName().getLocalPart() + Java2WSDLConstants.RESPONSE)
.equals(schemaElement.getName())) {
isRPC = false;
}
if (!schemaElement.getQName().getNamespaceURI()
.equals(inMessageElementQname.getNamespaceURI())) {
isRPC = false;
}
XmlSchemaType type = schemaElement.getSchemaType();
if (type != null && type instanceof XmlSchemaComplexType) {
XmlSchemaComplexType complexType = (XmlSchemaComplexType) type;
XmlSchemaParticle particle = complexType.getParticle();
if (particle != null && particle instanceof XmlSchemaSequence) {
XmlSchemaSequence xmlSchemaSequence = (XmlSchemaSequence) particle;
if (!xmlSchemaSequence.getItems().isEmpty()) {
for (XmlSchemaSequenceMember member : xmlSchemaSequence.getItems()) {
if (!(member instanceof XmlSchemaElement)) {
isRPC = false;
}
XmlSchemaElement innerElement = (XmlSchemaElement) member;
QName schemaTypeName = innerElement.getSchemaTypeName();
String name = innerElement.getName();
if (innerElement.getRef().getTargetQName() != null) {
isRPC = false;
}
if (outMessageElementDetails.get(name) != null) {
isRPC = false;
}
QName inMessageElementType =
(QName) inMessageElementDetails.get(name);
if (inMessageElementType != null &&
inMessageElementType != schemaTypeName) {
isRPC = false;
}
outMessageElementDetails.put(name, schemaTypeName);
}
}
} else {
isRPC = false;
}
} else {
isRPC = false;
}
} else {
isRPC = false;
}
}
int count = 0;
if (isRPC) {
styles.add(new URI(WSDL2Constants.STYLE_RPC));
axisOperation.addParameter(WSDL2Constants.ATTR_WRPC_SIGNATURE, generateRPCSignature(
inMessageElementDetails, outMessageElementDetails));
count ++;
}
if (isIRI) {
styles.add(new URI(WSDL2Constants.STYLE_IRI));
count ++;
}
if (isMultipart) {
styles.add(new URI(WSDL2Constants.STYLE_MULTIPART));
count ++;
}
return (URI[]) styles.toArray(new URI[count]);
}
private String generateRPCSignature(Map inMessageElementDetails, Map outMessageElementDetails) {
String in = "";
String out = "";
String inOut = "";
Set inElementSet = inMessageElementDetails.keySet();
Set outElementSet = outMessageElementDetails.keySet();
Iterator inElementIterator = inElementSet.iterator();
while (inElementIterator.hasNext()) {
String inElementName = (String) inElementIterator.next();
if (outElementSet.contains(inElementName)) {
inOut = inOut + inElementName + " " + WSDL2Constants.RPC_INOUT + " ";
outElementSet.remove(inElementName);
} else {
in = in + inElementName + " " + WSDL2Constants.RPC_IN + " ";
}
}
Iterator outElementIterator = outElementSet.iterator();
while (outElementIterator.hasNext()) {
String outElementName = (String) outElementIterator.next();
out = out + outElementName + " " + WSDL2Constants.RPC_RETURN + " ";
}
return in + out + inOut;
}
private void addPolicyAsExtensibleElement(AxisDescription axisDescription,
OMElement descriptionElement) throws XMLStreamException,
FactoryConfigurationError, AxisFault {
PolicySubject policySubject = axisDescription.getPolicySubject();
Collection attachPolicyComponents = policySubject
.getAttachedPolicyComponents();
for (Iterator iterator = attachPolicyComponents.iterator(); iterator
.hasNext();) {
Object policyElement = iterator.next();
if (policyElement instanceof Policy) {
PolicyReference policyReference = PolicyUtil
.createPolicyReference((Policy) policyElement);
OMElement policyRefElement = PolicyUtil
.getPolicyComponentAsOMElement(
(PolicyComponent) policyReference, filter);
OMNode firstChildElem = descriptionElement.getFirstElement();
if (firstChildElem == null) {
descriptionElement.addChild(policyRefElement);
} else {
firstChildElem.insertSiblingBefore(policyRefElement);
}
String key = ((PolicyReference) policyReference).getURI();
if (key.startsWith("#")) {
key = key.substring(key.indexOf("#") + 1);
}
policiesInDescription.put(key, (Policy) policyElement);
} else if (policyElement instanceof PolicyReference) {
OMElement child = PolicyUtil
.getPolicyComponentAsOMElement((PolicyComponent) policyElement);
OMElement firstChildElem = descriptionElement.getFirstElement();
if (firstChildElem == null) {
descriptionElement.addChild(child);
} else {
firstChildElem.insertSiblingBefore(child);
}
String key = ((PolicyReference) policyElement).getURI();
if (key.startsWith("#")) {
key = key.substring(key.indexOf("#") + 1);
}
PolicyLocator locator = new PolicyLocator(axisService);
Policy p = locator.lookup(key);
if (p == null) {
throw new AxisFault("Policy not found for uri : " + key);
}
policiesInDescription.put(key, p);
}
}
}
protected void addPoliciesToDescriptionElement(List policies,
OMElement descriptionElement) throws XMLStreamException,
FactoryConfigurationError {
for (int i = 0; i < policies.size(); i++) {
Policy policy = (Policy) policies.get(i);
OMElement policyElement = PolicyUtil.getPolicyComponentAsOMElement(
policy, filter);
OMNode firstChild = descriptionElement.getFirstOMChild();
if (firstChild != null) {
firstChild.insertSiblingBefore(policyElement);
} else {
descriptionElement.addChild(policyElement);
}
}
}
protected OMElement generateDescription(OMFactory omFactory) {
Map nameSpacesMap = axisService.getNamespaceMap();
filter = new ExternalPolicySerializer();
AxisConfiguration axisConfiguration = axisService
.getAxisConfiguration();
if (axisConfiguration != null) {
filter.setAssertionsToFilter(axisConfiguration
.getLocalPolicyAssertions());
}
//
if (nameSpacesMap != null && nameSpacesMap.containsValue(WSDL2Constants.WSDL_NAMESPACE)) {
wsdl = omFactory
.createOMNamespace(WSDL2Constants.WSDL_NAMESPACE,
WSDLSerializationUtil.getPrefix(
WSDL2Constants.WSDL_NAMESPACE, nameSpacesMap));
} else {
wsdl = omFactory
.createOMNamespace(WSDL2Constants.WSDL_NAMESPACE,
WSDL2Constants.DEFAULT_WSDL_NAMESPACE_PREFIX);
}
OMElement descriptionElement = omFactory.createOMElement(WSDL2Constants.DESCRIPTION, wsdl);
// Declare all the defined namespaces in the document
WSDLSerializationUtil.populateNamespaces(descriptionElement, nameSpacesMap);
descriptionElement.declareNamespace(axisService.getTargetNamespace(),
axisService.getTargetNamespacePrefix());
wsaw = descriptionElement.declareNamespace(AddressingConstants.Final.WSAW_NAMESPACE, "wsaw");
// Need to add the targetnamespace as an attribute according to the wsdl 2.0 spec
OMAttribute targetNamespace = omFactory
.createOMAttribute(WSDL2Constants.TARGET_NAMESPACE, null,
axisService.getTargetNamespace());
descriptionElement.addAttribute(targetNamespace);
// Check whether the required namespaces are already in namespaceMap, if they are not
// present declare them.
tns = omFactory
.createOMNamespace(axisService.getTargetNamespace(),
axisService.getTargetNamespacePrefix());
if (nameSpacesMap != null && nameSpacesMap.containsValue(WSDL2Constants.URI_WSDL2_SOAP)) {
wsoap = omFactory
.createOMNamespace(WSDL2Constants.URI_WSDL2_SOAP,
WSDLSerializationUtil.getPrefix(
WSDL2Constants.URI_WSDL2_SOAP, nameSpacesMap));
} else {
wsoap = descriptionElement
.declareNamespace(WSDL2Constants.URI_WSDL2_SOAP, WSDL2Constants.SOAP_PREFIX);
}
if (nameSpacesMap != null && nameSpacesMap.containsValue(WSDL2Constants.URI_WSDL2_HTTP)) {
whttp = omFactory
.createOMNamespace(WSDL2Constants.URI_WSDL2_HTTP,
WSDLSerializationUtil.getPrefix(
WSDL2Constants.URI_WSDL2_HTTP, nameSpacesMap));
} else {
whttp = descriptionElement
.declareNamespace(WSDL2Constants.URI_WSDL2_HTTP, WSDL2Constants.HTTP_PREFIX);
}
if (nameSpacesMap != null && nameSpacesMap.containsValue(WSDL2Constants.URI_WSDL2_EXTENSIONS)) {
wsdlx = omFactory
.createOMNamespace(WSDL2Constants.URI_WSDL2_EXTENSIONS,
WSDLSerializationUtil.getPrefix(
WSDL2Constants.URI_WSDL2_EXTENSIONS, nameSpacesMap));
} else {
wsdlx = descriptionElement.declareNamespace(WSDL2Constants.URI_WSDL2_EXTENSIONS,
WSDL2Constants.WSDL_EXTENTION_PREFIX);
}
if (nameSpacesMap != null && nameSpacesMap.containsValue(WSDL2Constants.URI_WSDL2_RPC)) {
wrpc = omFactory
.createOMNamespace(WSDL2Constants.URI_WSDL2_RPC,
WSDLSerializationUtil.getPrefix(
WSDL2Constants.URI_WSDL2_RPC, nameSpacesMap));
} else {
wrpc = descriptionElement.declareNamespace(WSDL2Constants.URI_WSDL2_RPC,
WSDL2Constants.WSDL_RPC_PREFIX);
}
return descriptionElement;
}
protected OMElement generateDocumentation(OMFactory omFactory) {
return WSDLSerializationUtil.generateDocumentationElement(axisService,
omFactory, wsdl);
}
protected OMElement generateTypes(OMFactory omFactory) throws AxisFault {
// Add types element
OMElement typesElement = omFactory.createOMElement(WSDL2Constants.TYPES_LOCAL_NALE, wsdl);
axisService.populateSchemaMappings();
ArrayList schemas = axisService.getSchema();
for (int i = 0; i < schemas.size(); i++) {
StringWriter writer = new StringWriter();
XmlSchema schema = axisService.getSchema(i);
if (!org.apache.axis2.namespace.Constants.URI_2001_SCHEMA_XSD
.equals(schema.getTargetNamespace())) {
schema.write(writer);
String schemaString = writer.toString();
if (!"".equals(schemaString)) {
try {
typesElement.addChild(
XMLUtils.toOM(new ByteArrayInputStream(schemaString.getBytes())));
} catch (XMLStreamException e) {
throw AxisFault.makeFault(e);
}
}
}
}
return typesElement;
}
protected OMElement generateInterface(OMFactory omFactory) throws AxisFault, URISyntaxException, XMLStreamException, FactoryConfigurationError {
Parameter parameter = axisService.getParameter(WSDL2Constants.INTERFACE_LOCAL_NAME);
if (parameter != null) {
interfaceName = (String) parameter.getValue();
} else {
interfaceName = WSDL2Constants.DEFAULT_INTERFACE_NAME;
}
// Add the interface element
return getInterfaceElement(wsdl, tns, wsdlx, wrpc, omFactory,
interfaceName);
}
protected OMElement generateService(OMFactory omFactory, OMElement descriptionElement,
boolean disableREST, boolean disableSOAP12, boolean disableSOAP11) throws AxisFault {
// Check whether the axisService has any endpoints. If they exists serialize them else
// generate default endpoint elements.
OMElement serviceElement;
Set bindings = new HashSet();
Map endpointMap = axisService.getEndpoints();
Object value = axisService.getParameterValue("isCodegen");
boolean isCodegen = false;
if (JavaUtils.isTrueExplicitly(value)) {
isCodegen = true;
}
if (endpointMap != null && endpointMap.size() > 0) {
serviceElement = getServiceElement(wsdl, tns, omFactory, interfaceName);
Iterator iterator = endpointMap.values().iterator();
while (iterator.hasNext()) {
// With the new binding hierachy in place we need to do some extra checking here.
// If a service has both http and https listners up we should show two separate eprs
// If the service was deployed with a WSDL and it had two endpoints for http and
// https then we have two endpoints populated so we should serialize them instead
// of updating the endpoints.
AxisEndpoint axisEndpoint = (AxisEndpoint) iterator.next();
/*
* Some transports might not be active at runtime.
*/
if (!isCodegen && checkIfEndPointActive && !axisEndpoint.isActive()) {
continue;
}
AxisBinding axisBinding = axisEndpoint.getBinding();
String type = axisBinding.getType();
// If HTTP binding is disabled, do not add.
if (WSDL2Constants.URI_WSDL2_HTTP.equals(type)) {
if (isDisableREST()) {
continue;
}
}
// If SOAP 1.2 binding is disabled, do not add.
String propertySOAPVersion =
(String) axisBinding.getProperty(WSDL2Constants.ATTR_WSOAP_VERSION);
if (SOAP12Constants.SOAP_ENVELOPE_NAMESPACE_URI.equals(propertySOAPVersion)) {
if (isDisableSOAP12()) {
continue;
}
}
if (SOAP11Constants.SOAP_ENVELOPE_NAMESPACE_URI.equals(propertySOAPVersion)) {
if (isDisableSOAP11()) {
continue;
}
}
bindings.add(axisBinding);
OMElement endpointElement = axisEndpoint.toWSDL20(wsdl, tns, whttp);
boolean endpointAlreadyAdded = false;
Iterator endpointsAdded = serviceElement.getChildren();
while (endpointsAdded.hasNext()) {
OMElement endpoint = (OMElement) endpointsAdded.next();
// Checking whether a endpoint with the same binding and address exists.
if (endpoint.getAttribute(new QName(WSDL2Constants.BINDING_LOCAL_NAME))
.getAttributeValue().equals(endpointElement.getAttribute(
new QName(WSDL2Constants.BINDING_LOCAL_NAME)).getAttributeValue())
&& endpoint
.getAttribute(new QName(WSDL2Constants.ATTRIBUTE_ADDRESS))
.getAttributeValue().equals(endpointElement.getAttribute(
new QName(WSDL2Constants.ATTRIBUTE_ADDRESS)).getAttributeValue())) {
endpointAlreadyAdded = true;
}
}
if (!endpointAlreadyAdded) {
// addPolicyAsExtensibleElement(axisEndpoint, endpointElement);
Parameter modifyAddressParam = axisService
.getParameter("modifyUserWSDLPortAddress");
if (modifyAddressParam != null) {
if (Boolean.parseBoolean((String) modifyAddressParam
.getValue())) {
String endpointURL = axisEndpoint
.calculateEndpointURL();
endpointElement
.getAttribute(
new QName(
WSDL2Constants.ATTRIBUTE_ADDRESS))
.setAttributeValue(endpointURL);
}
}
serviceElement.addChild(modifyEndpoint(endpointElement));
}
}
Iterator iter = bindings.iterator();
while (iter.hasNext()) {
AxisBinding binding = (AxisBinding) iter.next();
OMElement bindingElement = binding.toWSDL20(wsdl, tns, wsoap, whttp,
interfaceName,
axisService.getNamespaceMap(),
AddressingHelper.getAddressingRequirementParemeterValue(axisService),
serviceName,wsaw);
descriptionElement
.addChild(modifyBinding(bindingElement));
}
descriptionElement.addChild(serviceElement);
} else {
// There are no andpoints defined hence generate default bindings and endpoints
descriptionElement.addChild(
WSDLSerializationUtil.generateSOAP11Binding(omFactory, axisService, wsdl, wsoap,
tns, serviceName));
if (!isDisableSOAP12()) {
descriptionElement.addChild(modifyBinding(
WSDLSerializationUtil.generateSOAP12Binding(omFactory, axisService, wsdl, wsoap,
tns, serviceName)));
}
if (!isDisableSOAP11()) {
descriptionElement.addChild(modifyBinding(
WSDLSerializationUtil.generateSOAP11Binding(omFactory, axisService, wsdl, wsoap,
tns, serviceName)));
}
if (!isDisableREST()) {
descriptionElement.addChild(modifyBinding(
WSDLSerializationUtil.generateHTTPBinding(omFactory, axisService, wsdl,
whttp,
tns, serviceName)));
}
serviceElement = generateServiceElement(omFactory, wsdl, tns, axisService,
isDisableREST(), isDisableSOAP12(), isDisableSOAP11(), eprs, serviceName);
descriptionElement.addChild(serviceElement);
}
return serviceElement;
}
protected boolean isDisableREST() {
// axis2.xml indicated no HTTP binding?
boolean disableREST = false;
Parameter disableRESTParameter = axisService
.getParameter(org.apache.axis2.Constants.Configuration.DISABLE_REST);
if (disableRESTParameter != null
&& JavaUtils.isTrueExplicitly(disableRESTParameter.getValue())) {
disableREST = true;
}
return disableREST;
}
protected boolean isDisableSOAP11() {
boolean disableSOAP11 = false;
Parameter disableSOAP11Parameter = axisService
.getParameter(org.apache.axis2.Constants.Configuration.DISABLE_SOAP11);
if (disableSOAP11Parameter != null
&& JavaUtils.isTrueExplicitly(disableSOAP11Parameter.getValue())) {
disableSOAP11 = true;
}
return disableSOAP11;
}
protected boolean isDisableSOAP12() {
// axis2.xml indicated no SOAP 1.2 binding?
boolean disableSOAP12 = false;
Parameter disableSOAP12Parameter = axisService
.getParameter(org.apache.axis2.Constants.Configuration.DISABLE_SOAP12);
if (disableSOAP12Parameter != null
&& JavaUtils.isTrueExplicitly(disableSOAP12Parameter.getValue())) {
disableSOAP12 = true;
}
return disableSOAP12;
}
protected List getPoliciesInDefinitions() {
return new ArrayList(policiesInDescription.values());
}
protected OMElement modifyEndpoint(OMElement endpoint) {
return endpoint;
}
protected OMElement modifyBinding(OMElement binding) {
return binding;
}
/**
* Generates a default service element
* @param omFactory - The OMFactory
* @param wsdl the WSDL namespace
* @param tns - The targetnamespace
* @param axisService - The AxisService
* @param disableREST only generate REST endpoint if this is false
* @param disableSOAP12 only generate SOAP 1.2 endpoint if this is false
* @return - The generated service element
* @throws AxisFault - Thrown in case an exception occurs
*/
public OMElement generateServiceElement(OMFactory omFactory, OMNamespace wsdl,
OMNamespace tns, AxisService axisService,
boolean disableREST, boolean disableSOAP12, boolean disableSOAP11,
String serviceName)
throws AxisFault {
return generateServiceElement(omFactory, wsdl, tns, axisService, disableREST, disableSOAP12,disableSOAP11,
null, serviceName);
}
/**
* Generates a default service element
* @param omFactory - The OMFactory
* @param wsdl the WSDL namespace
* @param tns - The targetnamespace
* @param axisService - The AxisService
* @param disableREST only generate REST endpoint if this is false
* @param disableSOAP12 only generate SOAP 1.2 endpoint if this is false
* @return - The generated service element
* @throws AxisFault - Thrown in case an exception occurs
*/
public OMElement generateServiceElement(OMFactory omFactory, OMNamespace wsdl,
OMNamespace tns, AxisService axisService,
boolean disableREST, boolean disableSOAP12, boolean disableSOAP11,
String[] eprs, String serviceName)
throws AxisFault {
if(eprs == null){
eprs = axisService.getEPRs();
if (eprs == null) {
eprs = new String[]{serviceName};
}
}
OMElement serviceElement;
serviceElement = omFactory.createOMElement(WSDL2Constants.SERVICE_LOCAL_NAME, wsdl);
serviceElement.addAttribute(omFactory.createOMAttribute(WSDL2Constants.ATTRIBUTE_NAME,
null, serviceName));
serviceElement.addAttribute(omFactory.createOMAttribute(
WSDL2Constants.INTERFACE_LOCAL_NAME, null,
tns.getPrefix() + ":" + WSDL2Constants.DEFAULT_INTERFACE_NAME));
for (int i = 0; i < eprs.length; i++) {
String name = "";
String epr = eprs[i];
if (epr.startsWith("https://")) {
name = WSDL2Constants.DEFAULT_HTTPS_PREFIX;
}
OMElement soap11EndpointElement =
null;
if (!disableSOAP11) {
soap11EndpointElement = omFactory.createOMElement(WSDL2Constants.ENDPOINT_LOCAL_NAME, wsdl);
soap11EndpointElement.addAttribute(omFactory.createOMAttribute(
WSDL2Constants.ATTRIBUTE_NAME, null,
name + WSDL2Constants.DEFAULT_SOAP11_ENDPOINT_NAME));
soap11EndpointElement.addAttribute(omFactory.createOMAttribute(
WSDL2Constants.BINDING_LOCAL_NAME, null,
tns.getPrefix() + ":" + serviceName +
Java2WSDLConstants.BINDING_NAME_SUFFIX));
soap11EndpointElement.addAttribute(
omFactory.createOMAttribute(WSDL2Constants.ATTRIBUTE_ADDRESS, null, epr));
serviceElement.addChild(modifyEndpoint(soap11EndpointElement));
}
OMElement soap12EndpointElement = null;
if (!disableSOAP12) {
soap12EndpointElement =
omFactory.createOMElement(WSDL2Constants.ENDPOINT_LOCAL_NAME, wsdl);
soap12EndpointElement.addAttribute(omFactory.createOMAttribute(
WSDL2Constants.ATTRIBUTE_NAME, null,
name + WSDL2Constants.DEFAULT_SOAP12_ENDPOINT_NAME));
soap12EndpointElement.addAttribute(omFactory.createOMAttribute(
WSDL2Constants.BINDING_LOCAL_NAME, null,
tns.getPrefix() + ":" + serviceName +
Java2WSDLConstants.SOAP12BINDING_NAME_SUFFIX));
soap12EndpointElement.addAttribute(
omFactory.createOMAttribute(WSDL2Constants.ATTRIBUTE_ADDRESS, null, epr));
serviceElement.addChild(modifyEndpoint(soap12EndpointElement));
}
OMElement httpEndpointElement = null;
if (!disableREST) {
httpEndpointElement =
omFactory.createOMElement(WSDL2Constants.ENDPOINT_LOCAL_NAME, wsdl);
httpEndpointElement.addAttribute(omFactory.createOMAttribute(
WSDL2Constants.ATTRIBUTE_NAME, null,
name + WSDL2Constants.DEFAULT_HTTP_ENDPOINT_NAME));
httpEndpointElement.addAttribute(omFactory.createOMAttribute(
WSDL2Constants.BINDING_LOCAL_NAME, null,
tns.getPrefix() + ":" + serviceName + Java2WSDLConstants
.HTTP_BINDING));
httpEndpointElement.addAttribute(
omFactory.createOMAttribute(WSDL2Constants.ATTRIBUTE_ADDRESS, null, epr));
serviceElement.addChild(modifyEndpoint(httpEndpointElement));
}
if (epr.startsWith("https://")) {
if (!disableSOAP11) {
OMElement soap11Documentation = omFactory.createOMElement(WSDL2Constants.DOCUMENTATION, wsdl);
soap11Documentation.setText("This endpoint exposes a SOAP 11 binding over a HTTPS");
soap11EndpointElement.addChild(soap11Documentation);
}
if (!disableSOAP12) {
OMElement soap12Documentation = omFactory.createOMElement(WSDL2Constants.DOCUMENTATION, wsdl);
soap12Documentation.setText("This endpoint exposes a SOAP 12 binding over a HTTPS");
soap12EndpointElement.addChild(soap12Documentation);
}
if (!disableREST) {
OMElement httpDocumentation =
omFactory.createOMElement(WSDL2Constants.DOCUMENTATION, wsdl);
httpDocumentation.setText("This endpoint exposes a HTTP binding over a HTTPS");
httpEndpointElement.addChild(httpDocumentation);
}
} else if (epr.startsWith("http://")) {
if (!disableSOAP11) {
OMElement soap11Documentation = omFactory.createOMElement(WSDL2Constants.DOCUMENTATION, wsdl);
soap11Documentation.setText("This endpoint exposes a SOAP 11 binding over a HTTP");
soap11EndpointElement.addChild(soap11Documentation);
}
if (!disableSOAP12) {
OMElement soap12Documentation = omFactory.createOMElement(WSDL2Constants.DOCUMENTATION, wsdl);
soap12Documentation.setText("This endpoint exposes a SOAP 12 binding over a HTTP");
soap12EndpointElement.addChild(soap12Documentation);
}
if (!disableREST) {
OMElement httpDocumentation =
omFactory.createOMElement(WSDL2Constants.DOCUMENTATION, wsdl);
httpDocumentation.setText("This endpoint exposes a HTTP binding over a HTTP");
httpEndpointElement.addChild(httpDocumentation);
}
}
}
return serviceElement;
}
}
| |
/*
* Copyright LWJGL. All rights reserved.
* License terms: https://www.lwjgl.org/license
* MACHINE GENERATED FILE, DO NOT EDIT
*/
package org.lwjgl.util.vma;
import javax.annotation.*;
import java.nio.*;
import org.lwjgl.*;
import org.lwjgl.system.*;
import static org.lwjgl.system.Checks.*;
import static org.lwjgl.system.MemoryUtil.*;
import static org.lwjgl.system.MemoryStack.*;
import org.lwjgl.vulkan.*;
/**
* Description of an Allocator to be created.
*
* <h3>Layout</h3>
*
* <pre><code>
* struct VmaAllocatorCreateInfo {
* VmaAllocatorCreateFlags {@link #flags};
* VkPhysicalDevice {@link #physicalDevice};
* VkDevice {@link #device};
* VkDeviceSize {@link #preferredLargeHeapBlockSize};
* {@link VkAllocationCallbacks VkAllocationCallbacks} const * {@link #pAllocationCallbacks};
* {@link VmaDeviceMemoryCallbacks VmaDeviceMemoryCallbacks} const * {@link #pDeviceMemoryCallbacks};
* uint32_t {@link #frameInUseCount};
* VkDeviceSize const * {@link #pHeapSizeLimit};
* {@link VmaVulkanFunctions VmaVulkanFunctions} const * {@link #pVulkanFunctions};
* {@link VmaRecordSettings VmaRecordSettings} const * {@link #pRecordSettings};
* VkInstance {@link #instance};
* uint32_t {@link #vulkanApiVersion};
* VkExternalMemoryHandleTypeFlagsKHR const * {@link #pTypeExternalMemoryHandleTypes};
* }</code></pre>
*/
public class VmaAllocatorCreateInfo extends Struct implements NativeResource {
/** The struct size in bytes. */
public static final int SIZEOF;
/** The struct alignment in bytes. */
public static final int ALIGNOF;
/** The struct member offsets. */
public static final int
FLAGS,
PHYSICALDEVICE,
DEVICE,
PREFERREDLARGEHEAPBLOCKSIZE,
PALLOCATIONCALLBACKS,
PDEVICEMEMORYCALLBACKS,
FRAMEINUSECOUNT,
PHEAPSIZELIMIT,
PVULKANFUNCTIONS,
PRECORDSETTINGS,
INSTANCE,
VULKANAPIVERSION,
PTYPEEXTERNALMEMORYHANDLETYPES;
static {
Layout layout = __struct(
__member(4),
__member(POINTER_SIZE),
__member(POINTER_SIZE),
__member(8),
__member(POINTER_SIZE),
__member(POINTER_SIZE),
__member(4),
__member(POINTER_SIZE),
__member(POINTER_SIZE),
__member(POINTER_SIZE),
__member(POINTER_SIZE),
__member(4),
__member(POINTER_SIZE)
);
SIZEOF = layout.getSize();
ALIGNOF = layout.getAlignment();
FLAGS = layout.offsetof(0);
PHYSICALDEVICE = layout.offsetof(1);
DEVICE = layout.offsetof(2);
PREFERREDLARGEHEAPBLOCKSIZE = layout.offsetof(3);
PALLOCATIONCALLBACKS = layout.offsetof(4);
PDEVICEMEMORYCALLBACKS = layout.offsetof(5);
FRAMEINUSECOUNT = layout.offsetof(6);
PHEAPSIZELIMIT = layout.offsetof(7);
PVULKANFUNCTIONS = layout.offsetof(8);
PRECORDSETTINGS = layout.offsetof(9);
INSTANCE = layout.offsetof(10);
VULKANAPIVERSION = layout.offsetof(11);
PTYPEEXTERNALMEMORYHANDLETYPES = layout.offsetof(12);
}
/**
* Creates a {@code VmaAllocatorCreateInfo} instance at the current position of the specified {@link ByteBuffer} container. Changes to the buffer's content will be
* visible to the struct instance and vice versa.
*
* <p>The created instance holds a strong reference to the container object.</p>
*/
public VmaAllocatorCreateInfo(ByteBuffer container) {
super(memAddress(container), __checkContainer(container, SIZEOF));
}
@Override
public int sizeof() { return SIZEOF; }
/** flags for created allocator. Use {@code VmaAllocatorCreateFlagBits} enum. One of:<br><table><tr><td>{@link Vma#VMA_ALLOCATOR_CREATE_EXTERNALLY_SYNCHRONIZED_BIT ALLOCATOR_CREATE_EXTERNALLY_SYNCHRONIZED_BIT}</td></tr><tr><td>{@link Vma#VMA_ALLOCATOR_CREATE_KHR_DEDICATED_ALLOCATION_BIT ALLOCATOR_CREATE_KHR_DEDICATED_ALLOCATION_BIT}</td></tr><tr><td>{@link Vma#VMA_ALLOCATOR_CREATE_KHR_BIND_MEMORY2_BIT ALLOCATOR_CREATE_KHR_BIND_MEMORY2_BIT}</td></tr><tr><td>{@link Vma#VMA_ALLOCATOR_CREATE_EXT_MEMORY_BUDGET_BIT ALLOCATOR_CREATE_EXT_MEMORY_BUDGET_BIT}</td></tr><tr><td>{@link Vma#VMA_ALLOCATOR_CREATE_AMD_DEVICE_COHERENT_MEMORY_BIT ALLOCATOR_CREATE_AMD_DEVICE_COHERENT_MEMORY_BIT}</td></tr><tr><td>{@link Vma#VMA_ALLOCATOR_CREATE_BUFFER_DEVICE_ADDRESS_BIT ALLOCATOR_CREATE_BUFFER_DEVICE_ADDRESS_BIT}</td></tr><tr><td>{@link Vma#VMA_ALLOCATOR_CREATE_EXT_MEMORY_PRIORITY_BIT ALLOCATOR_CREATE_EXT_MEMORY_PRIORITY_BIT}</td></tr></table> */
@NativeType("VmaAllocatorCreateFlags")
public int flags() { return nflags(address()); }
/** Vulkan physical device. It must be valid throughout whole lifetime of created allocator. */
@NativeType("VkPhysicalDevice")
public long physicalDevice() { return nphysicalDevice(address()); }
/** Vulkan device. It must be valid throughout whole lifetime of created allocator. */
@NativeType("VkDevice")
public long device() { return ndevice(address()); }
/**
* preferred size of a single {@code VkDeviceMemory} block to be allocated from large heaps > 1 GiB. Set to 0 to use default, which is currently 256
* MiB. Optional.
*/
@NativeType("VkDeviceSize")
public long preferredLargeHeapBlockSize() { return npreferredLargeHeapBlockSize(address()); }
/** custom CPU memory allocation callbacks. Optional, can be null. When specified, will also be used for all CPU-side memory allocations. Optional. */
@Nullable
@NativeType("VkAllocationCallbacks const *")
public VkAllocationCallbacks pAllocationCallbacks() { return npAllocationCallbacks(address()); }
/** informative callbacks for {@code vkAllocateMemory}, {@code vkFreeMemory}. Optional. */
@Nullable
@NativeType("VmaDeviceMemoryCallbacks const *")
public VmaDeviceMemoryCallbacks pDeviceMemoryCallbacks() { return npDeviceMemoryCallbacks(address()); }
/**
* Maximum number of additional frames that are in use at the same time as current frame.
*
* <p>This value is used only when you make allocations with {@link Vma#VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT ALLOCATION_CREATE_CAN_BECOME_LOST_BIT} flag. Such allocation cannot become lost if
* {@code allocation.lastUseFrameIndex >= allocator.currentFrameIndex - frameInUseCount}.</p>
*
* <p>For example, if you double-buffer your command buffers, so resources used for rendering in previous frame may still be in use by the GPU at the moment
* you allocate resources needed for the current frame, set this value to 1.</p>
*
* <p>If you want to allow any allocations other than used in the current frame to become lost, set this value to 0.</p>
*/
@NativeType("uint32_t")
public int frameInUseCount() { return nframeInUseCount(address()); }
/**
* @param capacity the number of elements in the returned buffer
*
* @return Either {@code NULL} or a pointer to an array of limits on maximum number of bytes that can be allocated out of particular Vulkan memory heap.
*
* <p>If not {@code NULL}, it must be a pointer to an array of {@code VkPhysicalDeviceMemoryProperties::memoryHeapCount} elements, defining limit on maximum number
* of bytes that can be allocated out of particular Vulkan memory heap.</p>
*
* <p>Any of the elements may be equal to {@code VK_WHOLE_SIZE}, which means no limit on that heap. This is also the default in case of
* {@code pHeapSizeLimit = NULL}.</p>
*
* <p>If there is a limit defined for a heap:</p>
*
* <ul>
* <li>If user tries to allocate more memory from that heap using this allocator, the allocation fails with {@code VK_ERROR_OUT_OF_DEVICE_MEMORY}.</li>
* <li>If the limit is smaller than heap size reported in {@code VkMemoryHeap::size}, the value of this limit will be reported instead when using
* {@link Vma#vmaGetMemoryProperties GetMemoryProperties}.</li>
* </ul>
*
* <p>Warning! Using this feature may not be equivalent to installing a GPU with smaller amount of memory, because graphics driver doesn't necessary fail new
* allocations with {@code VK_ERROR_OUT_OF_DEVICE_MEMORY} result when memory capacity is exceeded. It may return success and just silently migrate some
* device memory blocks to system RAM. This driver behavior can also be controlled using {@code VK_AMD_memory_overallocation_behavior} extension.</p>
*/
@Nullable
@NativeType("VkDeviceSize const *")
public LongBuffer pHeapSizeLimit(int capacity) { return npHeapSizeLimit(address(), capacity); }
/** pointers to Vulkan functions */
@NativeType("VmaVulkanFunctions const *")
public VmaVulkanFunctions pVulkanFunctions() { return npVulkanFunctions(address()); }
/**
* parameters for recording of VMA calls. Can be null.
*
* <p>If not null, it enables recording of calls to VMA functions to a file. If support for recording is not enabled using {@code VMA_RECORDING_ENABLED}
* macro, creation of the allocator object fails with {@code VK_ERROR_FEATURE_NOT_PRESENT}.</p>
*/
@Nullable
@NativeType("VmaRecordSettings const *")
public VmaRecordSettings pRecordSettings() { return npRecordSettings(address()); }
/** handle to Vulkan instance object. */
@NativeType("VkInstance")
public long instance() { return ninstance(address()); }
/**
* the highest version of Vulkan that the application is designed to use. (optional)
*
* <p>It must be a value in the format as created by macro {@code VK_MAKE_VERSION} or a constant like: {@code VK_API_VERSION_1_1},
* {@code VK_API_VERSION_1_0}. The patch version number specified is ignored. Only the major and minor versions are considered. It must be less or equal
* (preferably equal) to value as passed to {@code vkCreateInstance} as {@code VkApplicationInfo::apiVersion}. Only versions 1.0, 1.1 and 1.2 are supported by
* the current implementation.</p>
*
* <p>Leaving it initialized to zero is equivalent to {@code VK_API_VERSION_1_0}.</p>
*/
@NativeType("uint32_t")
public int vulkanApiVersion() { return nvulkanApiVersion(address()); }
/**
* @param capacity the number of elements in the returned buffer
*
* @return Either null or a pointer to an array of external memory handle types for each Vulkan memory type.
*
* <p>If not {@code NULL}, it must be a pointer to an array of {@code VkPhysicalDeviceMemoryProperties::memoryTypeCount} elements, defining external memory handle
* types of particular Vulkan memory type, to be passed using {@code VkExportMemoryAllocateInfoKHR}.</p>
*
* <p>Any of the elements may be equal to 0, which means not to use {@code VkExportMemoryAllocateInfoKHR} on this memory type. This is also the default in
* case of {@code pTypeExternalMemoryHandleTypes = NULL}.</p>
*/
@Nullable
@NativeType("VkExternalMemoryHandleTypeFlagsKHR const *")
public IntBuffer pTypeExternalMemoryHandleTypes(int capacity) { return npTypeExternalMemoryHandleTypes(address(), capacity); }
/** Sets the specified value to the {@link #flags} field. */
public VmaAllocatorCreateInfo flags(@NativeType("VmaAllocatorCreateFlags") int value) { nflags(address(), value); return this; }
/** Sets the specified value to the {@link #physicalDevice} field. */
public VmaAllocatorCreateInfo physicalDevice(VkPhysicalDevice value) { nphysicalDevice(address(), value); return this; }
/** Sets the specified value to the {@link #device} field. */
public VmaAllocatorCreateInfo device(VkDevice value) { ndevice(address(), value); return this; }
/** Sets the specified value to the {@link #preferredLargeHeapBlockSize} field. */
public VmaAllocatorCreateInfo preferredLargeHeapBlockSize(@NativeType("VkDeviceSize") long value) { npreferredLargeHeapBlockSize(address(), value); return this; }
/** Sets the address of the specified {@link VkAllocationCallbacks} to the {@link #pAllocationCallbacks} field. */
public VmaAllocatorCreateInfo pAllocationCallbacks(@Nullable @NativeType("VkAllocationCallbacks const *") VkAllocationCallbacks value) { npAllocationCallbacks(address(), value); return this; }
/** Sets the address of the specified {@link VmaDeviceMemoryCallbacks} to the {@link #pDeviceMemoryCallbacks} field. */
public VmaAllocatorCreateInfo pDeviceMemoryCallbacks(@Nullable @NativeType("VmaDeviceMemoryCallbacks const *") VmaDeviceMemoryCallbacks value) { npDeviceMemoryCallbacks(address(), value); return this; }
/** Sets the specified value to the {@link #frameInUseCount} field. */
public VmaAllocatorCreateInfo frameInUseCount(@NativeType("uint32_t") int value) { nframeInUseCount(address(), value); return this; }
/** Sets the address of the specified {@link LongBuffer} to the {@link #pHeapSizeLimit} field. */
public VmaAllocatorCreateInfo pHeapSizeLimit(@Nullable @NativeType("VkDeviceSize const *") LongBuffer value) { npHeapSizeLimit(address(), value); return this; }
/** Sets the address of the specified {@link VmaVulkanFunctions} to the {@link #pVulkanFunctions} field. */
public VmaAllocatorCreateInfo pVulkanFunctions(@NativeType("VmaVulkanFunctions const *") VmaVulkanFunctions value) { npVulkanFunctions(address(), value); return this; }
/** Sets the address of the specified {@link VmaRecordSettings} to the {@link #pRecordSettings} field. */
public VmaAllocatorCreateInfo pRecordSettings(@Nullable @NativeType("VmaRecordSettings const *") VmaRecordSettings value) { npRecordSettings(address(), value); return this; }
/** Sets the specified value to the {@link #instance} field. */
public VmaAllocatorCreateInfo instance(VkInstance value) { ninstance(address(), value); return this; }
/** Sets the specified value to the {@link #vulkanApiVersion} field. */
public VmaAllocatorCreateInfo vulkanApiVersion(@NativeType("uint32_t") int value) { nvulkanApiVersion(address(), value); return this; }
/** Sets the address of the specified {@link IntBuffer} to the {@link #pTypeExternalMemoryHandleTypes} field. */
public VmaAllocatorCreateInfo pTypeExternalMemoryHandleTypes(@Nullable @NativeType("VkExternalMemoryHandleTypeFlagsKHR const *") IntBuffer value) { npTypeExternalMemoryHandleTypes(address(), value); return this; }
/** Initializes this struct with the specified values. */
public VmaAllocatorCreateInfo set(
int flags,
VkPhysicalDevice physicalDevice,
VkDevice device,
long preferredLargeHeapBlockSize,
@Nullable VkAllocationCallbacks pAllocationCallbacks,
@Nullable VmaDeviceMemoryCallbacks pDeviceMemoryCallbacks,
int frameInUseCount,
@Nullable LongBuffer pHeapSizeLimit,
VmaVulkanFunctions pVulkanFunctions,
@Nullable VmaRecordSettings pRecordSettings,
VkInstance instance,
int vulkanApiVersion,
@Nullable IntBuffer pTypeExternalMemoryHandleTypes
) {
flags(flags);
physicalDevice(physicalDevice);
device(device);
preferredLargeHeapBlockSize(preferredLargeHeapBlockSize);
pAllocationCallbacks(pAllocationCallbacks);
pDeviceMemoryCallbacks(pDeviceMemoryCallbacks);
frameInUseCount(frameInUseCount);
pHeapSizeLimit(pHeapSizeLimit);
pVulkanFunctions(pVulkanFunctions);
pRecordSettings(pRecordSettings);
instance(instance);
vulkanApiVersion(vulkanApiVersion);
pTypeExternalMemoryHandleTypes(pTypeExternalMemoryHandleTypes);
return this;
}
/**
* Copies the specified struct data to this struct.
*
* @param src the source struct
*
* @return this struct
*/
public VmaAllocatorCreateInfo set(VmaAllocatorCreateInfo src) {
memCopy(src.address(), address(), SIZEOF);
return this;
}
// -----------------------------------
/** Returns a new {@code VmaAllocatorCreateInfo} instance allocated with {@link MemoryUtil#memAlloc memAlloc}. The instance must be explicitly freed. */
public static VmaAllocatorCreateInfo malloc() {
return wrap(VmaAllocatorCreateInfo.class, nmemAllocChecked(SIZEOF));
}
/** Returns a new {@code VmaAllocatorCreateInfo} instance allocated with {@link MemoryUtil#memCalloc memCalloc}. The instance must be explicitly freed. */
public static VmaAllocatorCreateInfo calloc() {
return wrap(VmaAllocatorCreateInfo.class, nmemCallocChecked(1, SIZEOF));
}
/** Returns a new {@code VmaAllocatorCreateInfo} instance allocated with {@link BufferUtils}. */
public static VmaAllocatorCreateInfo create() {
ByteBuffer container = BufferUtils.createByteBuffer(SIZEOF);
return wrap(VmaAllocatorCreateInfo.class, memAddress(container), container);
}
/** Returns a new {@code VmaAllocatorCreateInfo} instance for the specified memory address. */
public static VmaAllocatorCreateInfo create(long address) {
return wrap(VmaAllocatorCreateInfo.class, address);
}
/** Like {@link #create(long) create}, but returns {@code null} if {@code address} is {@code NULL}. */
@Nullable
public static VmaAllocatorCreateInfo createSafe(long address) {
return address == NULL ? null : wrap(VmaAllocatorCreateInfo.class, address);
}
// -----------------------------------
/** Deprecated for removal in 3.4.0. Use {@link #malloc(MemoryStack)} instead. */
@Deprecated public static VmaAllocatorCreateInfo mallocStack() { return malloc(stackGet()); }
/** Deprecated for removal in 3.4.0. Use {@link #calloc(MemoryStack)} instead. */
@Deprecated public static VmaAllocatorCreateInfo callocStack() { return calloc(stackGet()); }
/** Deprecated for removal in 3.4.0. Use {@link #malloc(MemoryStack)} instead. */
@Deprecated public static VmaAllocatorCreateInfo mallocStack(MemoryStack stack) { return malloc(stack); }
/** Deprecated for removal in 3.4.0. Use {@link #calloc(MemoryStack)} instead. */
@Deprecated public static VmaAllocatorCreateInfo callocStack(MemoryStack stack) { return calloc(stack); }
/** Deprecated for removal in 3.4.0. Use {@link #malloc(int, MemoryStack)} instead. */
/**
* Returns a new {@code VmaAllocatorCreateInfo} instance allocated on the specified {@link MemoryStack}.
*
* @param stack the stack from which to allocate
*/
public static VmaAllocatorCreateInfo malloc(MemoryStack stack) {
return wrap(VmaAllocatorCreateInfo.class, stack.nmalloc(ALIGNOF, SIZEOF));
}
/**
* Returns a new {@code VmaAllocatorCreateInfo} instance allocated on the specified {@link MemoryStack} and initializes all its bits to zero.
*
* @param stack the stack from which to allocate
*/
public static VmaAllocatorCreateInfo calloc(MemoryStack stack) {
return wrap(VmaAllocatorCreateInfo.class, stack.ncalloc(ALIGNOF, 1, SIZEOF));
}
// -----------------------------------
/** Unsafe version of {@link #flags}. */
public static int nflags(long struct) { return UNSAFE.getInt(null, struct + VmaAllocatorCreateInfo.FLAGS); }
/** Unsafe version of {@link #physicalDevice}. */
public static long nphysicalDevice(long struct) { return memGetAddress(struct + VmaAllocatorCreateInfo.PHYSICALDEVICE); }
/** Unsafe version of {@link #device}. */
public static long ndevice(long struct) { return memGetAddress(struct + VmaAllocatorCreateInfo.DEVICE); }
/** Unsafe version of {@link #preferredLargeHeapBlockSize}. */
public static long npreferredLargeHeapBlockSize(long struct) { return UNSAFE.getLong(null, struct + VmaAllocatorCreateInfo.PREFERREDLARGEHEAPBLOCKSIZE); }
/** Unsafe version of {@link #pAllocationCallbacks}. */
@Nullable public static VkAllocationCallbacks npAllocationCallbacks(long struct) { return VkAllocationCallbacks.createSafe(memGetAddress(struct + VmaAllocatorCreateInfo.PALLOCATIONCALLBACKS)); }
/** Unsafe version of {@link #pDeviceMemoryCallbacks}. */
@Nullable public static VmaDeviceMemoryCallbacks npDeviceMemoryCallbacks(long struct) { return VmaDeviceMemoryCallbacks.createSafe(memGetAddress(struct + VmaAllocatorCreateInfo.PDEVICEMEMORYCALLBACKS)); }
/** Unsafe version of {@link #frameInUseCount}. */
public static int nframeInUseCount(long struct) { return UNSAFE.getInt(null, struct + VmaAllocatorCreateInfo.FRAMEINUSECOUNT); }
/** Unsafe version of {@link #pHeapSizeLimit(int) pHeapSizeLimit}. */
@Nullable public static LongBuffer npHeapSizeLimit(long struct, int capacity) { return memLongBufferSafe(memGetAddress(struct + VmaAllocatorCreateInfo.PHEAPSIZELIMIT), capacity); }
/** Unsafe version of {@link #pVulkanFunctions}. */
public static VmaVulkanFunctions npVulkanFunctions(long struct) { return VmaVulkanFunctions.create(memGetAddress(struct + VmaAllocatorCreateInfo.PVULKANFUNCTIONS)); }
/** Unsafe version of {@link #pRecordSettings}. */
@Nullable public static VmaRecordSettings npRecordSettings(long struct) { return VmaRecordSettings.createSafe(memGetAddress(struct + VmaAllocatorCreateInfo.PRECORDSETTINGS)); }
/** Unsafe version of {@link #instance}. */
public static long ninstance(long struct) { return memGetAddress(struct + VmaAllocatorCreateInfo.INSTANCE); }
/** Unsafe version of {@link #vulkanApiVersion}. */
public static int nvulkanApiVersion(long struct) { return UNSAFE.getInt(null, struct + VmaAllocatorCreateInfo.VULKANAPIVERSION); }
/** Unsafe version of {@link #pTypeExternalMemoryHandleTypes(int) pTypeExternalMemoryHandleTypes}. */
@Nullable public static IntBuffer npTypeExternalMemoryHandleTypes(long struct, int capacity) { return memIntBufferSafe(memGetAddress(struct + VmaAllocatorCreateInfo.PTYPEEXTERNALMEMORYHANDLETYPES), capacity); }
/** Unsafe version of {@link #flags(int) flags}. */
public static void nflags(long struct, int value) { UNSAFE.putInt(null, struct + VmaAllocatorCreateInfo.FLAGS, value); }
/** Unsafe version of {@link #physicalDevice(VkPhysicalDevice) physicalDevice}. */
public static void nphysicalDevice(long struct, VkPhysicalDevice value) { memPutAddress(struct + VmaAllocatorCreateInfo.PHYSICALDEVICE, value.address()); }
/** Unsafe version of {@link #device(VkDevice) device}. */
public static void ndevice(long struct, VkDevice value) { memPutAddress(struct + VmaAllocatorCreateInfo.DEVICE, value.address()); }
/** Unsafe version of {@link #preferredLargeHeapBlockSize(long) preferredLargeHeapBlockSize}. */
public static void npreferredLargeHeapBlockSize(long struct, long value) { UNSAFE.putLong(null, struct + VmaAllocatorCreateInfo.PREFERREDLARGEHEAPBLOCKSIZE, value); }
/** Unsafe version of {@link #pAllocationCallbacks(VkAllocationCallbacks) pAllocationCallbacks}. */
public static void npAllocationCallbacks(long struct, @Nullable VkAllocationCallbacks value) { memPutAddress(struct + VmaAllocatorCreateInfo.PALLOCATIONCALLBACKS, memAddressSafe(value)); }
/** Unsafe version of {@link #pDeviceMemoryCallbacks(VmaDeviceMemoryCallbacks) pDeviceMemoryCallbacks}. */
public static void npDeviceMemoryCallbacks(long struct, @Nullable VmaDeviceMemoryCallbacks value) { memPutAddress(struct + VmaAllocatorCreateInfo.PDEVICEMEMORYCALLBACKS, memAddressSafe(value)); }
/** Unsafe version of {@link #frameInUseCount(int) frameInUseCount}. */
public static void nframeInUseCount(long struct, int value) { UNSAFE.putInt(null, struct + VmaAllocatorCreateInfo.FRAMEINUSECOUNT, value); }
/** Unsafe version of {@link #pHeapSizeLimit(LongBuffer) pHeapSizeLimit}. */
public static void npHeapSizeLimit(long struct, @Nullable LongBuffer value) { memPutAddress(struct + VmaAllocatorCreateInfo.PHEAPSIZELIMIT, memAddressSafe(value)); }
/** Unsafe version of {@link #pVulkanFunctions(VmaVulkanFunctions) pVulkanFunctions}. */
public static void npVulkanFunctions(long struct, VmaVulkanFunctions value) { memPutAddress(struct + VmaAllocatorCreateInfo.PVULKANFUNCTIONS, value.address()); }
/** Unsafe version of {@link #pRecordSettings(VmaRecordSettings) pRecordSettings}. */
public static void npRecordSettings(long struct, @Nullable VmaRecordSettings value) { memPutAddress(struct + VmaAllocatorCreateInfo.PRECORDSETTINGS, memAddressSafe(value)); }
/** Unsafe version of {@link #instance(VkInstance) instance}. */
public static void ninstance(long struct, VkInstance value) { memPutAddress(struct + VmaAllocatorCreateInfo.INSTANCE, value.address()); }
/** Unsafe version of {@link #vulkanApiVersion(int) vulkanApiVersion}. */
public static void nvulkanApiVersion(long struct, int value) { UNSAFE.putInt(null, struct + VmaAllocatorCreateInfo.VULKANAPIVERSION, value); }
/** Unsafe version of {@link #pTypeExternalMemoryHandleTypes(IntBuffer) pTypeExternalMemoryHandleTypes}. */
public static void npTypeExternalMemoryHandleTypes(long struct, @Nullable IntBuffer value) { memPutAddress(struct + VmaAllocatorCreateInfo.PTYPEEXTERNALMEMORYHANDLETYPES, memAddressSafe(value)); }
/**
* Validates pointer members that should not be {@code NULL}.
*
* @param struct the struct to validate
*/
public static void validate(long struct) {
check(memGetAddress(struct + VmaAllocatorCreateInfo.PHYSICALDEVICE));
check(memGetAddress(struct + VmaAllocatorCreateInfo.DEVICE));
long pAllocationCallbacks = memGetAddress(struct + VmaAllocatorCreateInfo.PALLOCATIONCALLBACKS);
if (pAllocationCallbacks != NULL) {
VkAllocationCallbacks.validate(pAllocationCallbacks);
}
long pVulkanFunctions = memGetAddress(struct + VmaAllocatorCreateInfo.PVULKANFUNCTIONS);
check(pVulkanFunctions);
VmaVulkanFunctions.validate(pVulkanFunctions);
long pRecordSettings = memGetAddress(struct + VmaAllocatorCreateInfo.PRECORDSETTINGS);
if (pRecordSettings != NULL) {
VmaRecordSettings.validate(pRecordSettings);
}
check(memGetAddress(struct + VmaAllocatorCreateInfo.INSTANCE));
}
}
| |
/*
* Copyright 2014-2018 JKOOL, LLC.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.jkoolcloud.tnt4j.streams.inputs;
import java.io.*;
import java.nio.charset.Charset;
import java.util.jar.JarInputStream;
import java.util.regex.Pattern;
import java.util.zip.GZIPInputStream;
import java.util.zip.InflaterInputStream;
import java.util.zip.ZipEntry;
import java.util.zip.ZipInputStream;
import org.apache.commons.lang3.StringUtils;
import com.jkoolcloud.tnt4j.core.OpLevel;
import com.jkoolcloud.tnt4j.sink.EventSink;
import com.jkoolcloud.tnt4j.streams.configure.StreamProperties;
import com.jkoolcloud.tnt4j.streams.utils.LoggerUtils;
import com.jkoolcloud.tnt4j.streams.utils.StreamsResources;
import com.jkoolcloud.tnt4j.streams.utils.Utils;
/**
* Implements a zipped content activity stream, where each line of the zipped file entry is assumed to represent a
* single activity or event which should be recorded. Zip file and entry names to stream are defined using "FileName"
* property in stream configuration.
* <p>
* This activity stream requires parsers that can support {@link String} data.
* <p>
* This activity stream supports the following configuration properties (in addition to those supported by
* {@link TNTParseableInputStream}):
* <ul>
* <li>FileName - defines zip file path and concrete zip file entry name or entry name pattern defined using characters
* '*' and '?'. Definition pattern is "zipFilePath!entryNameWildcard". I.e.:
* "./tnt4j-streams-core/samples/zip-stream/sample.zip!2/*.txt". (Required)</li>
* <li>ArchType - defines archive type. Can be one of: ZIP, GZIP, JAR. Default value - ZIP. (Optional)</li>
* <li>Charset - charset name used to decode file(s) contained data. Charset name must comply Java specification
* (resolvable by {@link java.nio.charset.Charset#forName(String)} to be handled properly. Default value - one returned
* by {@link java.nio.charset.Charset#defaultCharset()}. (Optional)</li>
* </ul>
*
* @version $Revision: 1 $
*
* @see com.jkoolcloud.tnt4j.streams.parsers.ActivityParser#isDataClassSupported(Object)
*/
public class ZipLineStream extends TNTParseableInputStream<String> {
private static final EventSink LOGGER = LoggerUtils.getLoggerSink(ZipLineStream.class);
private static final String ZIP_PATH_SEPARATOR = "!"; // NON-NLS
private String zipFileName;
private String archType;
private String zipPath;
private String zipEntriesMask;
private LineNumberReader lineReader;
private InflaterInputStream zipStream;
private Charset charset = Charset.defaultCharset();
private int lineNumber = 0;
private int totalBytesCount = 0;
/**
* Constructs a new ZipLineStream.
*/
public ZipLineStream() {
archType = ArchiveTypes.ZIP.name();
}
@Override
protected EventSink logger() {
return LOGGER;
}
@Override
public void setProperty(String name, String value) {
super.setProperty(name, value);
if (StreamProperties.PROP_FILENAME.equalsIgnoreCase(name)) {
zipFileName = value;
if (StringUtils.isNotEmpty(zipFileName)) {
String zdp[] = zipFileName.split(Pattern.quote(ZIP_PATH_SEPARATOR));
if (zdp != null) {
if (zdp.length > 0) {
zipPath = zdp[0];
}
if (zdp.length > 1) {
zipEntriesMask = StringUtils.isEmpty(zdp[1]) ? null
: Utils.wildcardToRegex2(zdp[1].replace("\\", "/")); // NON-NLS
if (zipEntriesMask != null) {
zipEntriesMask = '^' + zipEntriesMask + '$'; // NON-NLS
}
}
}
}
} else if (StreamProperties.PROP_ARCH_TYPE.equalsIgnoreCase(name)) {
archType = value;
} else if (StreamProperties.PROP_CHARSET.equalsIgnoreCase(name)) {
charset = Charset.forName(value);
}
}
@Override
public Object getProperty(String name) {
if (StreamProperties.PROP_FILENAME.equalsIgnoreCase(name)) {
return zipFileName;
}
if (StreamProperties.PROP_ARCH_TYPE.equalsIgnoreCase(name)) {
return archType;
}
if (StreamProperties.PROP_CHARSET.equalsIgnoreCase(name)) {
return charset.name();
}
return super.getProperty(name);
}
@Override
protected void applyProperties() throws Exception {
super.applyProperties();
if (StringUtils.isEmpty(zipFileName)) {
throw new IllegalStateException(StreamsResources.getStringFormatted(StreamsResources.RESOURCE_BUNDLE_NAME,
"TNTInputStream.property.undefined", StreamProperties.PROP_FILENAME));
}
}
@Override
protected void initialize() throws Exception {
super.initialize();
logger().log(OpLevel.DEBUG, StreamsResources.getBundle(StreamsResources.RESOURCE_BUNDLE_NAME),
"ZipLineStream.initializing.stream", zipFileName);
InputStream fis = loadFile(zipPath);
try {
if (ArchiveTypes.JAR.name().equalsIgnoreCase(archType)) {
zipStream = new JarInputStream(fis);
} else if (ArchiveTypes.GZIP.name().equalsIgnoreCase(archType)) {
zipStream = new GZIPInputStream(fis);
} else {
zipStream = new ZipInputStream(fis);
}
} catch (IOException exc) {
Utils.close(fis);
throw exc;
}
if (zipStream instanceof GZIPInputStream) {
lineReader = new LineNumberReader(new BufferedReader(new InputStreamReader(zipStream, charset)));
} else {
hasNextEntry();
}
}
/**
* Loads zip file as input stream to read.
*
* @param zipPath
* system dependent zip file path
* @return file input stream to read
* @throws Exception
* If path defined file is not found
*/
protected InputStream loadFile(String zipPath) throws Exception {
return new FileInputStream(zipPath);
}
/**
* {@inheritDoc}
* <p>
* This method returns a string containing the contents of the next line in the zip file entry.
*/
@Override
public String getNextItem() throws Exception {
if (lineReader == null) {
throw new IllegalStateException(StreamsResources.getString(StreamsResources.RESOURCE_BUNDLE_NAME,
"ZipLineStream.zip.input.not.opened"));
}
String line = Utils.getNonEmptyLine(lineReader);
lineNumber = lineReader.getLineNumber();
if (line == null && hasNextEntry()) {
line = getNextItem();
}
if (line != null) {
addStreamedBytesCount(line.getBytes().length);
}
return line;
}
/**
* {@inheritDoc}
* <p>
* This method returns line number of the zip file entry last read.
*/
@Override
public int getActivityPosition() {
return lineNumber;
}
@Override
public long getTotalBytes() {
return totalBytesCount;
}
@Override
protected void cleanup() {
Utils.close(lineReader);
lineReader = null;
if (zipStream instanceof ZipInputStream) {
try {
((ZipInputStream) zipStream).closeEntry();
} catch (IOException exc) {
}
}
Utils.close(zipStream);
zipStream = null;
super.cleanup();
}
private boolean hasNextEntry() throws IOException {
if (zipStream instanceof ZipInputStream) {
ZipInputStream zis = (ZipInputStream) zipStream;
ZipEntry entry;
while ((entry = zis.getNextEntry()) != null) {
String entryName = entry.getName();
if (entry.getSize() != 0 && (zipEntriesMask == null || entryName.matches(zipEntriesMask))) {
totalBytesCount += entry.getSize();
lineReader = new LineNumberReader(new BufferedReader(new InputStreamReader(zis, charset)));
lineNumber = 0;
logger().log(OpLevel.DEBUG, StreamsResources.getBundle(StreamsResources.RESOURCE_BUNDLE_NAME),
"ZipLineStream.opening.entry", entryName);
return true;
}
}
}
return false;
}
private enum ArchiveTypes {
/**
* Zip archive type.
*/
ZIP,
/**
* GZip archive type.
*/
GZIP,
/**
* Jar archive type.
*/
JAR
}
}
| |
// Copyright 2000-2021 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.ide.projectWizard;
import com.intellij.diagnostic.PluginException;
import com.intellij.framework.addSupport.FrameworkSupportInModuleProvider;
import com.intellij.ide.JavaUiBundle;
import com.intellij.ide.util.PropertiesComponent;
import com.intellij.ide.util.frameworkSupport.FrameworkRole;
import com.intellij.ide.util.frameworkSupport.FrameworkSupportUtil;
import com.intellij.ide.util.newProjectWizard.*;
import com.intellij.ide.util.newProjectWizard.impl.FrameworkSupportModelBase;
import com.intellij.ide.util.projectWizard.*;
import com.intellij.ide.wizard.CommitStepException;
import com.intellij.internal.statistic.eventLog.FeatureUsageData;
import com.intellij.internal.statistic.service.fus.collectors.FUCounterUsageLogger;
import com.intellij.internal.statistic.utils.PluginInfoDetectorKt;
import com.intellij.openapi.Disposable;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.extensions.ExtensionPointName;
import com.intellij.openapi.module.Module;
import com.intellij.openapi.module.ModuleType;
import com.intellij.openapi.module.WebModuleTypeBase;
import com.intellij.openapi.options.ConfigurationException;
import com.intellij.openapi.progress.ProgressIndicator;
import com.intellij.openapi.progress.ProgressManager;
import com.intellij.openapi.progress.Task;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.roots.ModifiableRootModel;
import com.intellij.openapi.roots.ui.configuration.ModulesProvider;
import com.intellij.openapi.roots.ui.configuration.projectRoot.LibrariesContainer;
import com.intellij.openapi.roots.ui.configuration.projectRoot.LibrariesContainerFactory;
import com.intellij.openapi.ui.popup.ListItemDescriptorAdapter;
import com.intellij.openapi.util.Disposer;
import com.intellij.openapi.util.NlsContexts;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.platform.ProjectTemplate;
import com.intellij.platform.ProjectTemplateEP;
import com.intellij.platform.ProjectTemplatesFactory;
import com.intellij.platform.templates.*;
import com.intellij.psi.impl.DebugUtil;
import com.intellij.ui.CollectionListModel;
import com.intellij.ui.ListSpeedSearch;
import com.intellij.ui.SingleSelectionModel;
import com.intellij.ui.components.JBLabel;
import com.intellij.ui.components.JBList;
import com.intellij.ui.popup.list.GroupedItemsListRenderer;
import com.intellij.util.Function;
import com.intellij.util.PlatformUtils;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.util.containers.Convertor;
import com.intellij.util.containers.FactoryMap;
import com.intellij.util.containers.MultiMap;
import com.intellij.util.ui.JBUI;
import com.intellij.util.ui.UIUtil;
import com.intellij.util.ui.update.UiNotifyConnector;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.jetbrains.annotations.TestOnly;
import javax.swing.*;
import java.awt.*;
import java.net.URL;
import java.util.List;
import java.util.*;
/**
* @author Dmitry Avdeev
*/
@SuppressWarnings("unchecked")
public final class ProjectTypeStep extends ModuleWizardStep implements SettingsStep, Disposable {
private static final Logger LOG = Logger.getInstance(ProjectTypeStep.class);
private static final ExtensionPointName<ProjectCategory> CATEGORY_EP =
new ExtensionPointName<>("com.intellij.projectWizard.projectCategory");
private static final ExtensionPointName<ProjectTemplateEP> TEMPLATE_EP = new ExtensionPointName<>("com.intellij.projectTemplate");
private static final Convertor<FrameworkSupportInModuleProvider, String> PROVIDER_STRING_CONVERTOR =
o -> o.getId();
private static final Function<FrameworkSupportNode, String> NODE_STRING_FUNCTION = FrameworkSupportNodeBase::getId;
private static final String TEMPLATES_CARD = "templates card";
private static final String FRAMEWORKS_CARD = "frameworks card";
private static final String PROJECT_WIZARD_GROUP = "project.wizard.group";
private final WizardContext myContext;
private final NewProjectWizard myWizard;
private final ModulesProvider myModulesProvider;
private final AddSupportForFrameworksPanel myFrameworksPanel;
private final ModuleBuilder.ModuleConfigurationUpdater myConfigurationUpdater;
private final Map<ProjectTemplate, ModuleBuilder> myBuilders = FactoryMap.create(key -> (ModuleBuilder)key.createModuleBuilder());
private final Map<String, ModuleWizardStep> myCustomSteps = new HashMap<>();
private final MultiMap<TemplatesGroup,ProjectTemplate> myTemplatesMap;
private JPanel myPanel;
private JPanel myOptionsPanel;
private JBList<TemplatesGroup> myProjectTypeList;
private ProjectTemplateList myTemplatesList;
private JPanel myFrameworksPanelPlaceholder;
private JPanel myHeaderPanel;
private JBLabel myFrameworksLabel;
@Nullable
private ModuleWizardStep mySettingsStep;
private String myCurrentCard;
private TemplatesGroup myLastSelectedGroup;
public ProjectTypeStep(WizardContext context, NewProjectWizard wizard, ModulesProvider modulesProvider) {
myContext = context;
myWizard = wizard;
myTemplatesMap = MultiMap.createConcurrent();
final List<TemplatesGroup> groups = fillTemplatesMap(context);
LOG.debug("groups=" + groups);
myProjectTypeList.setModel(new CollectionListModel<>(groups));
myProjectTypeList.setSelectionModel(new SingleSelectionModel());
myProjectTypeList.addListSelectionListener(__ -> updateSelection());
myProjectTypeList.setCellRenderer(new GroupedItemsListRenderer<>(new ListItemDescriptorAdapter<TemplatesGroup>() {
@Nullable
@Override
public String getTextFor(TemplatesGroup value) {
return value.getName();
}
@Nullable
@Override
public String getTooltipFor(TemplatesGroup value) {
return value.getDescription();
}
@Nullable
@Override
public Icon getIconFor(TemplatesGroup value) {
return value.getIcon();
}
@Override
public boolean hasSeparatorAboveOf(TemplatesGroup value) {
int index = groups.indexOf(value);
if (index < 1) return false;
TemplatesGroup upper = groups.get(index - 1);
if (upper.getParentGroup() == null && value.getParentGroup() == null) return true;
return !Objects.equals(upper.getParentGroup(), value.getParentGroup()) &&
!Objects.equals(upper.getName(), value.getParentGroup());
}
}) {
@Override
protected JComponent createItemComponent() {
JComponent component = super.createItemComponent();
myTextLabel.setBorder(JBUI.Borders.empty(3));
return component;
}
});
new ListSpeedSearch(myProjectTypeList) {
@Override
protected String getElementText(Object element) {
return ((TemplatesGroup)element).getName();
}
};
myModulesProvider = modulesProvider;
Project project = context.getProject();
final LibrariesContainer container = LibrariesContainerFactory.createContainer(context, modulesProvider);
FrameworkSupportModelBase model = new FrameworkSupportModelBase(project, null, container) {
@NotNull
@Override
public String getBaseDirectoryForLibrariesPath() {
ModuleBuilder builder = getSelectedBuilder();
return StringUtil.notNullize(builder.getContentEntryPath());
}
@Override
public ModuleBuilder getModuleBuilder() {
return getSelectedBuilder();
}
};
myFrameworksPanel = new AddSupportForFrameworksPanel(Collections.emptyList(), model, true, myHeaderPanel);
Disposer.register(this, myFrameworksPanel);
myFrameworksPanelPlaceholder.add(myFrameworksPanel.getMainPanel());
myFrameworksLabel.setLabelFor(myFrameworksPanel.getFrameworksTree());
myFrameworksLabel.setBorder(JBUI.Borders.empty(3));
myConfigurationUpdater = new ModuleBuilder.ModuleConfigurationUpdater() {
@Override
public void update(@NotNull Module module, @NotNull ModifiableRootModel rootModel) {
if (isFrameworksMode()) {
myFrameworksPanel.addSupport(module, rootModel);
}
}
};
myProjectTypeList.getSelectionModel().addListSelectionListener(__ -> projectTypeChanged());
myTemplatesList.addListSelectionListener(__ -> updateSelection());
for (TemplatesGroup templatesGroup : myTemplatesMap.keySet()) {
ModuleBuilder builder = templatesGroup.getModuleBuilder();
if (builder != null) {
myWizard.getSequence().addStepsForBuilder(builder, context, modulesProvider);
}
for (ProjectTemplate template : myTemplatesMap.get(templatesGroup)) {
myWizard.getSequence().addStepsForBuilder(myBuilders.get(template), context, modulesProvider);
}
}
final String groupId = PropertiesComponent.getInstance().getValue(PROJECT_WIZARD_GROUP);
LOG.debug("saved groupId=" + groupId);
if (groupId != null) {
TemplatesGroup group = ContainerUtil.find(groups, group1 -> groupId.equals(group1.getId()));
if (group != null) {
myProjectTypeList.setSelectedValue(group, true);
}
}
if (myProjectTypeList.getSelectedValue() == null) {
myProjectTypeList.setSelectedIndex(0);
}
myTemplatesList.restoreSelection();
}
private static ModuleType getModuleType(TemplatesGroup group) {
ModuleBuilder moduleBuilder = group.getModuleBuilder();
return moduleBuilder == null ? null : moduleBuilder.getModuleType();
}
private static boolean matchFramework(ProjectCategory projectCategory, FrameworkSupportInModuleProvider framework) {
FrameworkRole[] roles = framework.getRoles();
if (roles.length == 0) return true;
List<FrameworkRole> acceptable = Arrays.asList(projectCategory.getAcceptableFrameworkRoles());
return ContainerUtil.intersects(Arrays.asList(roles), acceptable);
}
private static MultiMap<TemplatesGroup, ProjectTemplate> getTemplatesMap(WizardContext context) {
ProjectTemplatesFactory[] factories = ProjectTemplatesFactory.EP_NAME.getExtensions();
final MultiMap<TemplatesGroup, ProjectTemplate> groups = new MultiMap<>();
for (ProjectTemplatesFactory factory : factories) {
for (String group : factory.getGroups()) {
ProjectTemplate[] templates = factory.createTemplates(group, context);
List<ProjectTemplate> values = Arrays.asList(templates);
if (!values.isEmpty()) {
Icon icon = factory.getGroupIcon(group);
String parentGroup = factory.getParentGroup(group);
TemplatesGroup templatesGroup = new TemplatesGroup(group, null, icon, factory.getGroupWeight(group), parentGroup, group, null);
templatesGroup.setPluginInfo(PluginInfoDetectorKt.getPluginInfo(factory.getClass()));
groups.putValues(templatesGroup, values);
}
}
}
return groups;
}
private boolean isFrameworksMode() {
return FRAMEWORKS_CARD.equals(myCurrentCard) && getSelectedBuilder().equals(myContext.getProjectBuilder());
}
private @NotNull List<TemplatesGroup> fillTemplatesMap(@NotNull WizardContext context) {
List<ModuleBuilder> builders = ModuleBuilder.getAllBuilders();
if (context.isCreatingNewProject()) {
builders.add(new EmptyModuleBuilder());
}
Map<String, TemplatesGroup> groupMap = new HashMap<>();
for (ModuleBuilder builder : builders) {
try {
BuilderBasedTemplate template = new BuilderBasedTemplate(builder);
if (builder.isTemplate()) {
TemplatesGroup group = groupMap.get(builder.getGroupName());
if (group == null) {
group = new TemplatesGroup(builder);
}
myTemplatesMap.putValue(group, template);
}
else {
TemplatesGroup group = new TemplatesGroup(builder);
groupMap.put(group.getName(), group);
myTemplatesMap.put(group, new ArrayList<>());
}
}
catch (Throwable e) {
LOG.error(e);
}
}
myTemplatesMap.putAllValues(getTemplatesMap(context));
for (ProjectCategory category : CATEGORY_EP.getExtensionList()) {
TemplatesGroup group = new TemplatesGroup(category);
ModuleBuilder builder = group.getModuleBuilder();
if (builder == null || builder.isAvailable()) {
myTemplatesMap.remove(group);
myTemplatesMap.put(group, new ArrayList<>());
}
}
if (context.isCreatingNewProject()) {
MultiMap<String, ProjectTemplate> localTemplates = loadLocalTemplates();
for (TemplatesGroup group : myTemplatesMap.keySet()) {
myTemplatesMap.putValues(group, localTemplates.get(group.getId()));
}
}
List<TemplatesGroup> groups = new ArrayList<>(myTemplatesMap.keySet());
// sorting by module type popularity
final MultiMap<ModuleType, TemplatesGroup> moduleTypes = new MultiMap<>();
for (TemplatesGroup group : groups) {
ModuleType type = getModuleType(group);
moduleTypes.putValue(type, group);
}
groups.sort((o1, o2) -> {
int i = o2.getWeight() - o1.getWeight();
if (i != 0) return i;
int i1 = moduleTypes.get(getModuleType(o2)).size() - moduleTypes.get(getModuleType(o1)).size();
if (i1 != 0) return i1;
return o1.compareTo(o2);
});
Set<String> groupNames = ContainerUtil.map2Set(groups, TemplatesGroup::getParentGroup);
// move subgroups
MultiMap<String, TemplatesGroup> subGroups = new MultiMap<>();
for (ListIterator<TemplatesGroup> iterator = groups.listIterator(); iterator.hasNext(); ) {
TemplatesGroup group = iterator.next();
String parentGroup = group.getParentGroup();
if (parentGroup != null && groupNames.contains(parentGroup) && !group.getName().equals(parentGroup) && groupMap.containsKey(parentGroup)) {
subGroups.putValue(parentGroup, group);
iterator.remove();
}
}
for (ListIterator<TemplatesGroup> iterator = groups.listIterator(); iterator.hasNext(); ) {
TemplatesGroup group = iterator.next();
for (TemplatesGroup subGroup : subGroups.get(group.getName())) {
iterator.add(subGroup);
}
}
// remove Static Web group in IDEA Community if no specific templates found (IDEA-120593)
if (PlatformUtils.isIdeaCommunity()) {
for (ListIterator<TemplatesGroup> iterator = groups.listIterator(); iterator.hasNext(); ) {
TemplatesGroup group = iterator.next();
if (WebModuleTypeBase.WEB_MODULE.equals(group.getId()) && myTemplatesMap.get(group).isEmpty()) {
iterator.remove();
break;
}
}
}
return groups;
}
@TestOnly
@Nullable
ModuleWizardStep getSettingsStep() {
return mySettingsStep;
}
// new TemplatesGroup selected
private void projectTypeChanged() {
TemplatesGroup group = getSelectedGroup();
if (group == null || group == myLastSelectedGroup) return;
myLastSelectedGroup = group;
PropertiesComponent.getInstance().setValue(PROJECT_WIZARD_GROUP, group.getId() );
if (LOG.isDebugEnabled()) {
LOG.debug("projectTypeChanged: " + group.getId() + " " + DebugUtil.currentStackTrace());
}
ModuleBuilder groupModuleBuilder = group.getModuleBuilder();
mySettingsStep = null;
myHeaderPanel.removeAll();
if (groupModuleBuilder != null && groupModuleBuilder.getModuleType() != null) {
mySettingsStep = groupModuleBuilder.modifyProjectTypeStep(this);
}
if (groupModuleBuilder == null || groupModuleBuilder.isTemplateBased()) {
showTemplates(group);
}
else if (!showCustomOptions(groupModuleBuilder)){
List<FrameworkSupportInModuleProvider> providers = FrameworkSupportUtil.getProviders(groupModuleBuilder);
final ProjectCategory category = group.getProjectCategory();
if (category != null) {
List<FrameworkSupportInModuleProvider> filtered = ContainerUtil.filter(providers, provider -> matchFramework(category, provider));
// add associated
Map<String, FrameworkSupportInModuleProvider> map = ContainerUtil.newMapFromValues(providers.iterator(), PROVIDER_STRING_CONVERTOR);
Set<FrameworkSupportInModuleProvider> set = new HashSet<>(filtered);
for (FrameworkSupportInModuleProvider provider : filtered) {
for (FrameworkSupportInModuleProvider.FrameworkDependency depId : provider.getDependenciesFrameworkIds()) {
FrameworkSupportInModuleProvider dependency = map.get(depId.getFrameworkId());
if (dependency == null) {
if (!depId.isOptional()) {
LOG.error("Cannot find provider '" + depId.getFrameworkId() + "' which is required for '" + provider.getId() + "'");
}
continue;
}
set.add(dependency);
}
}
myFrameworksPanel.setProviders(new ArrayList<>(set),
ContainerUtil.set(category.getAssociatedFrameworkIds()),
ContainerUtil.set(category.getPreselectedFrameworkIds()));
}
else {
myFrameworksPanel.setProviders(providers);
}
getSelectedBuilder().addModuleConfigurationUpdater(myConfigurationUpdater);
showCard(FRAMEWORKS_CARD);
}
myHeaderPanel.setVisible(myHeaderPanel.getComponentCount() > 0);
// align header labels
List<JLabel> labels = ContainerUtil.filter(UIUtil.findComponentsOfType(myHeaderPanel, JLabel.class), label ->
label.isVisible() && label.getLabelFor() != null
);
int width = 0;
for (JLabel label : labels) {
int width1 = label.getPreferredSize().width;
width = Math.max(width, width1);
}
for (JLabel label : labels) {
label.setPreferredSize(new Dimension(width, label.getPreferredSize().height));
}
myHeaderPanel.revalidate();
myHeaderPanel.repaint();
updateSelection();
}
private void showCard(String card) {
((CardLayout)myOptionsPanel.getLayout()).show(myOptionsPanel, card);
myCurrentCard = card;
}
private void showTemplates(TemplatesGroup group) {
Collection<ProjectTemplate> templates = myTemplatesMap.get(group);
setTemplatesList(group, templates, false);
showCard(TEMPLATES_CARD);
}
private void setTemplatesList(TemplatesGroup group, Collection<? extends ProjectTemplate> templates, boolean preserveSelection) {
List<ProjectTemplate> list = new ArrayList<>(templates);
ModuleBuilder moduleBuilder = group.getModuleBuilder();
if (moduleBuilder != null && !(moduleBuilder instanceof TemplateModuleBuilder)) {
list.add(0, new BuilderBasedTemplate(moduleBuilder));
}
myTemplatesList.setTemplates(list, preserveSelection);
}
private boolean showCustomOptions(@NotNull ModuleBuilder builder) {
String card = builder.getBuilderId();
ModuleWizardStep customStep;
if (!myCustomSteps.containsKey(card)) {
ModuleWizardStep step = builder.getCustomOptionsStep(myContext, this);
if (step == null) return false;
step.updateStep();
myCustomSteps.put(card, step);
myOptionsPanel.add(step.getComponent(), card);
customStep = step;
} else {
customStep = myCustomSteps.get(card);
}
try {
if (customStep != null) {
customStep._init();
}
} catch (Throwable e) {
LOG.error(e);
}
showCard(card);
return true;
}
@TestOnly
public ModuleWizardStep getFrameworksStep() {
return getCustomStep();
}
@Nullable
private ModuleWizardStep getCustomStep() {
return myCustomSteps.get(myCurrentCard);
}
private TemplatesGroup getSelectedGroup() {
return myProjectTypeList.getSelectedValue();
}
@Nullable
private ProjectTemplate getSelectedTemplate() {
return myCurrentCard == TEMPLATES_CARD ? myTemplatesList.getSelectedTemplate() : null;
}
private ModuleBuilder getSelectedBuilder() {
ProjectTemplate template = getSelectedTemplate();
if (template != null) {
return myBuilders.get(template);
}
return getSelectedGroup().getModuleBuilder();
}
public Collection<ProjectTemplate> getAvailableTemplates() {
if (myCurrentCard != FRAMEWORKS_CARD) {
return Collections.emptyList();
}
else {
Collection<ProjectTemplate> templates = myTemplatesMap.get(getSelectedGroup());
List<FrameworkSupportNode> nodes = myFrameworksPanel.getSelectedNodes();
if (nodes.isEmpty()) return templates;
final List<String> selectedFrameworks = ContainerUtil.map(nodes, NODE_STRING_FUNCTION);
return ContainerUtil.filter(templates, template -> {
if (!(template instanceof ArchivedProjectTemplate)) return true;
List<String> frameworks = ((ArchivedProjectTemplate)template).getFrameworks();
return frameworks.containsAll(selectedFrameworks);
});
}
}
@Override
public void onWizardFinished() throws CommitStepException {
if (isFrameworksMode()) {
boolean ok = myFrameworksPanel.downloadLibraries(myWizard.getContentComponent());
if (!ok) {
throw new CommitStepException(null);
}
}
reportStatistics("finish");
}
@Override
public JComponent getComponent() {
return myPanel;
}
@Override
public void updateDataModel() {
ModuleBuilder builder = getSelectedBuilder();
if (builder != null) {
myWizard.getSequence().addStepsForBuilder(builder, myContext, myModulesProvider);
}
ModuleWizardStep step = getCustomStep();
if (step != null) {
step.updateDataModel();
}
if (mySettingsStep != null) {
mySettingsStep.updateDataModel();
}
}
@Override
public boolean validate() throws ConfigurationException {
if (mySettingsStep != null) {
if (!mySettingsStep.validate()) return false;
}
ModuleWizardStep step = getCustomStep();
if (step != null && !step.validate()) {
return false;
}
if (isFrameworksMode() && !myFrameworksPanel.validate()) {
return false;
}
return super.validate();
}
@Override
public JComponent getPreferredFocusedComponent() {
return myProjectTypeList;
}
@Override
public void dispose() {
myLastSelectedGroup = null;
mySettingsStep = null;
myTemplatesMap.clear();
myBuilders.clear();
myCustomSteps.clear();
}
@Override
public void disposeUIResources() {
Disposer.dispose(this);
}
private @NotNull MultiMap<String, ProjectTemplate> loadLocalTemplates() {
MultiMap<String, ProjectTemplate> map = MultiMap.createConcurrent();
TEMPLATE_EP.processWithPluginDescriptor((ep, pluginDescriptor) -> {
URL url = pluginDescriptor.getPluginClassLoader().getResource(StringUtil.trimStart(ep.templatePath, "/"));
if (url == null) {
LOG.error(new PluginException("Can't find resource for project template: " + ep.templatePath, pluginDescriptor.getPluginId()));
return;
}
try {
LocalArchivedTemplate template = new LocalArchivedTemplate(url, pluginDescriptor.getPluginClassLoader());
if (ep.category) {
TemplateBasedCategory category = new TemplateBasedCategory(template, ep.projectType);
myTemplatesMap.putValue(new TemplatesGroup(category), template);
}
else {
map.putValue(ep.projectType, template);
}
}
catch (Exception e) {
LOG.error(new PluginException("Error loading template from URL: " + ep.templatePath, e, pluginDescriptor.getPluginId()));
}
});
return map;
}
void loadRemoteTemplates(final ChooseTemplateStep chooseTemplateStep) {
if (!ApplicationManager.getApplication().isUnitTestMode()) {
UiNotifyConnector.doWhenFirstShown(myPanel, () -> startLoadingRemoteTemplates(chooseTemplateStep));
}
else {
startLoadingRemoteTemplates(chooseTemplateStep);
}
}
private void startLoadingRemoteTemplates(ChooseTemplateStep chooseTemplateStep) {
myTemplatesList.setPaintBusy(true);
chooseTemplateStep.getTemplateList().setPaintBusy(true);
ProgressManager.getInstance().run(new Task.Backgroundable(myContext.getProject(), JavaUiBundle.message("progress.title.loading.templates")) {
@Override
public void run(@NotNull ProgressIndicator indicator) {
RemoteTemplatesFactory factory = new RemoteTemplatesFactory();
for (String group : factory.getGroups()) {
ProjectTemplate[] templates = factory.createTemplates(group, myContext);
for (ProjectTemplate template : templates) {
String id = ((ArchivedProjectTemplate)template).getCategory();
for (TemplatesGroup templatesGroup : myTemplatesMap.keySet()) {
if (Objects.equals(id, templatesGroup.getId()) || Objects.equals(group, templatesGroup.getName())) {
myTemplatesMap.putValue(templatesGroup, template);
}
}
}
}
}
@Override
public void onSuccess() {
super.onSuccess();
TemplatesGroup group = getSelectedGroup();
if (group == null) return;
Collection<ProjectTemplate> templates = myTemplatesMap.get(group);
setTemplatesList(group, templates, true);
chooseTemplateStep.updateStep();
}
@Override
public void onFinished() {
myTemplatesList.setPaintBusy(false);
chooseTemplateStep.getTemplateList().setPaintBusy(false);
}
});
}
private void updateSelection() {
ProjectTemplate template = getSelectedTemplate();
if (template != null) {
myContext.setProjectTemplate(template);
}
ModuleBuilder builder = getSelectedBuilder();
LOG.debug("builder=" + builder + "; template=" + template + "; group=" + getSelectedGroup() + "; groupIndex=" + myProjectTypeList.getMinSelectionIndex());
myContext.setProjectBuilder(builder);
if (builder != null) {
StepSequence sequence = myWizard.getSequence();
sequence.setType(builder.getBuilderId());
sequence.setIgnoredSteps(builder.getIgnoredSteps());
}
myWizard.setDelegate(builder instanceof WizardDelegate ? (WizardDelegate)builder : null);
myWizard.updateWizardButtons();
}
@TestOnly
public String availableTemplateGroupsToString() {
ListModel<TemplatesGroup> model = myProjectTypeList.getModel();
StringBuilder builder = new StringBuilder();
for (int i = 0; i < model.getSize(); i++) {
if (builder.length() > 0) {
builder.append(", ");
}
builder.append(model.getElementAt(i).getName());
}
return builder.toString();
}
@TestOnly
public boolean setSelectedTemplate(@NotNull String group, @Nullable String name) {
ListModel<TemplatesGroup> model = myProjectTypeList.getModel();
for (int i = 0; i < model.getSize(); i++) {
TemplatesGroup templatesGroup = model.getElementAt(i);
if (group.equals(templatesGroup.getName())) {
myProjectTypeList.setSelectedIndex(i);
if (name == null) {
return getSelectedGroup().getName().equals(group);
}
else {
setTemplatesList(templatesGroup, myTemplatesMap.get(templatesGroup), false);
return myTemplatesList.setSelectedTemplate(name);
}
}
}
return false;
}
public static void resetGroupForTests() {
PropertiesComponent.getInstance().setValue(PROJECT_WIZARD_GROUP, null);
}
@TestOnly
public AddSupportForFrameworksPanel getFrameworksPanel() {
return myFrameworksPanel;
}
@Override
public WizardContext getContext() {
return myContext;
}
@Override
public void addSettingsField(@NotNull @NlsContexts.Label String label, @NotNull JComponent field) {
ProjectSettingsStep.addField(label, field, myHeaderPanel);
}
@Override
public void addSettingsComponent(@NotNull JComponent component) {
}
@Override
public void addExpertPanel(@NotNull JComponent panel) {
}
@Override
public void addExpertField(@NotNull @NlsContexts.Label String label, @NotNull JComponent field) {
}
@Override
public JTextField getModuleNameField() {
return null;
}
@Override
public String getHelpId() {
if (getCustomStep() != null && getCustomStep().getHelpId() != null) {
return getCustomStep().getHelpId();
}
return myContext.isCreatingNewProject() ? "Project_Category_and_Options" : "Module_Category_and_Options";
}
@Override
public void onStepLeaving() {
reportStatistics("attempt");
}
private void reportStatistics(String eventId) {
TemplatesGroup group = myProjectTypeList.getSelectedValue();
FeatureUsageData data = new FeatureUsageData();
data.addData("projectType", group.getId());
data.addPluginInfo(group.getPluginInfo());
if (myCurrentCard.equals(FRAMEWORKS_CARD)) {
myFrameworksPanel.reportSelectedFrameworks(eventId, data);
}
ModuleWizardStep step = getCustomStep();
if (step instanceof StatisticsAwareModuleWizardStep) {
((StatisticsAwareModuleWizardStep) step).addCustomFeatureUsageData(eventId, data);
}
FUCounterUsageLogger.getInstance().logEvent("new.project.wizard", eventId, data);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.catalina.ha.deploy;
import java.io.File;
import java.io.IOException;
import java.util.HashMap;
import javax.management.MBeanServer;
import javax.management.ObjectName;
import org.apache.catalina.Container;
import org.apache.catalina.Context;
import org.apache.catalina.Engine;
import org.apache.catalina.Host;
import org.apache.catalina.LifecycleException;
import org.apache.catalina.ha.ClusterDeployer;
import org.apache.catalina.ha.ClusterListener;
import org.apache.catalina.ha.ClusterMessage;
import org.apache.catalina.tribes.Member;
import org.apache.catalina.util.ContextName;
import org.apache.juli.logging.Log;
import org.apache.juli.logging.LogFactory;
import org.apache.tomcat.util.modeler.Registry;
import org.apache.tomcat.util.res.StringManager;
/**
* <p>
* A farm war deployer is a class that is able to deploy/undeploy web
* applications in WAR from within the cluster.
* </p>
* Any host can act as the admin, and will have three directories
* <ul>
* <li>watchDir - the directory where we watch for changes</li>
* <li>deployDir - the directory where we install applications</li>
* <li>tempDir - a temporaryDirectory to store binary data when downloading a
* war from the cluster</li>
* </ul>
* Currently we only support deployment of WAR files since they are easier to
* send across the wire.
*
* @author Peter Rossbach
*/
public class FarmWarDeployer extends ClusterListener
implements ClusterDeployer, FileChangeListener {
/*--Static Variables----------------------------------------*/
private static final Log log = LogFactory.getLog(FarmWarDeployer.class);
private static final StringManager sm = StringManager.getManager(FarmWarDeployer.class);
/*--Instance Variables--------------------------------------*/
protected boolean started = false;
protected final HashMap<String, FileMessageFactory> fileFactories =
new HashMap<>();
/**
* Deployment directory.
*/
protected String deployDir;
private File deployDirFile = null;
/**
* Temporary directory.
*/
protected String tempDir;
private File tempDirFile = null;
/**
* Watch directory.
*/
protected String watchDir;
private File watchDirFile = null;
protected boolean watchEnabled = false;
protected WarWatcher watcher = null;
/**
* Iteration count for background processing.
*/
private int count = 0;
/**
* Frequency of the Farm watchDir check. Cluster wide deployment will be
* done once for the specified amount of backgroundProcess calls (ie, the
* lower the amount, the most often the checks will occur).
*/
protected int processDeployFrequency = 2;
/**
* Path where context descriptors should be deployed.
*/
protected File configBase = null;
/**
* The associated host.
*/
protected Host host = null;
/**
* MBean server.
*/
protected MBeanServer mBeanServer = null;
/**
* The associated deployer ObjectName.
*/
protected ObjectName oname = null;
/**
* The maximum valid time(in seconds) for FileMessageFactory.
*/
protected int maxValidTime = 5 * 60;
/*--Constructor---------------------------------------------*/
public FarmWarDeployer() {
}
/*--Logic---------------------------------------------------*/
@Override
public void start() throws Exception {
if (started) {
return;
}
Container hcontainer = getCluster().getContainer();
if(!(hcontainer instanceof Host)) {
log.error(sm.getString("farmWarDeployer.hostOnly"));
return ;
}
host = (Host) hcontainer;
// Check to correct engine and host setup
Container econtainer = host.getParent();
if(!(econtainer instanceof Engine)) {
log.error(sm.getString("farmWarDeployer.hostParentEngine",
host.getName()));
return ;
}
Engine engine = (Engine) econtainer;
String hostname = null;
hostname = host.getName();
try {
oname = new ObjectName(engine.getName() + ":type=Deployer,host="
+ hostname);
} catch (Exception e) {
log.error(sm.getString("farmWarDeployer.mbeanNameFail",
engine.getName(), hostname),e);
return;
}
if (watchEnabled) {
watcher = new WarWatcher(this, getWatchDirFile());
if (log.isInfoEnabled()) {
log.info(sm.getString(
"farmWarDeployer.watchDir", getWatchDir()));
}
}
configBase = host.getConfigBaseFile();
// Retrieve the MBean server
mBeanServer = Registry.getRegistry(null, null).getMBeanServer();
started = true;
count = 0;
getCluster().addClusterListener(this);
if (log.isInfoEnabled()) {
log.info(sm.getString("farmWarDeployer.started"));
}
}
/*
* stop cluster wide deployments
*
* @see org.apache.catalina.ha.ClusterDeployer#stop()
*/
@Override
public void stop() throws LifecycleException {
started = false;
getCluster().removeClusterListener(this);
count = 0;
if (watcher != null) {
watcher.clear();
watcher = null;
}
if (log.isInfoEnabled()) {
log.info(sm.getString("farmWarDeployer.stopped"));
}
}
/**
* Callback from the cluster, when a message is received, The cluster will
* broadcast it invoking the messageReceived on the receiver.
*
* @param msg
* ClusterMessage - the message received from the cluster
*/
@Override
public void messageReceived(ClusterMessage msg) {
try {
if (msg instanceof FileMessage) {
FileMessage fmsg = (FileMessage) msg;
if (log.isDebugEnabled()) {
log.debug(sm.getString("farmWarDeployer.msgRxDeploy",
fmsg.getContextName(), fmsg.getFileName()));
}
FileMessageFactory factory = getFactory(fmsg);
// TODO correct second try after app is in service!
if (factory.writeMessage(fmsg)) {
//last message received war file is completed
String name = factory.getFile().getName();
if (!name.endsWith(".war")) {
name = name + ".war";
}
File deployable = new File(getDeployDirFile(), name);
try {
String contextName = fmsg.getContextName();
if (tryAddServiced(contextName)) {
try {
remove(contextName);
if (!factory.getFile().renameTo(deployable)) {
log.error(sm.getString(
"farmWarDeployer.renameFail",
factory.getFile(), deployable));
}
} finally {
removeServiced(contextName);
}
check(contextName);
if (log.isDebugEnabled()) {
log.debug(sm.getString(
"farmWarDeployer.deployEnd",
contextName));
}
} else {
log.error(sm.getString(
"farmWarDeployer.servicingDeploy",
contextName, name));
}
} catch (Exception ex) {
log.error(sm.getString("farmWarDeployer.fileMessageError"), ex);
} finally {
removeFactory(fmsg);
}
}
} else if (msg instanceof UndeployMessage) {
try {
UndeployMessage umsg = (UndeployMessage) msg;
String contextName = umsg.getContextName();
if (log.isDebugEnabled()) {
log.debug(sm.getString("farmWarDeployer.msgRxUndeploy",
contextName));
}
if (tryAddServiced(contextName)) {
try {
remove(contextName);
} finally {
removeServiced(contextName);
}
if (log.isDebugEnabled()) {
log.debug(sm.getString(
"farmWarDeployer.undeployEnd",
contextName));
}
} else {
log.error(sm.getString(
"farmWarDeployer.servicingUndeploy",
contextName));
}
} catch (Exception ex) {
log.error(sm.getString("farmWarDeployer.undeployMessageError"), ex);
}
}
} catch (java.io.IOException x) {
log.error(sm.getString("farmWarDeployer.msgIoe"), x);
}
}
/**
* Create factory for all transported war files
*
* @param msg The file
* @return Factory for all app message (war files)
* @throws java.io.FileNotFoundException Missing file error
* @throws java.io.IOException Other IO error
*/
public synchronized FileMessageFactory getFactory(FileMessage msg)
throws java.io.FileNotFoundException, java.io.IOException {
File writeToFile = new File(getTempDirFile(), msg.getFileName());
FileMessageFactory factory = fileFactories.get(msg.getFileName());
if (factory == null) {
factory = FileMessageFactory.getInstance(writeToFile, true);
factory.setMaxValidTime(maxValidTime);
fileFactories.put(msg.getFileName(), factory);
}
return factory;
}
/**
* Remove file (war) from messages
*
* @param msg The file
*/
public void removeFactory(FileMessage msg) {
fileFactories.remove(msg.getFileName());
}
/**
* Before the cluster invokes messageReceived the cluster will ask the
* receiver to accept or decline the message, In the future, when messages
* get big, the accept method will only take a message header
*
* @param msg ClusterMessage
* @return boolean - returns true to indicate that messageReceived should be
* invoked. If false is returned, the messageReceived method will
* not be invoked.
*/
@Override
public boolean accept(ClusterMessage msg) {
return (msg instanceof FileMessage) || (msg instanceof UndeployMessage);
}
/**
* Install a new web application, whose web application archive is at the
* specified URL, into this container and all the other members of the
* cluster with the specified context name.
* <p>
* If this application is successfully installed locally, a ContainerEvent
* of type <code>INSTALL_EVENT</code> will be sent to all registered
* listeners, with the newly created <code>Context</code> as an argument.
*
* @param contextName
* The context name to which this application should be installed
* (must be unique)
* @param webapp
* A WAR file or unpacked directory structure containing the web
* application to be installed
*
* @exception IllegalArgumentException
* if the specified context name is malformed
* @exception IllegalStateException
* if the specified context name is already deployed
* @exception IOException
* if an input/output error was encountered during
* installation
*/
@Override
public void install(String contextName, File webapp) throws IOException {
Member[] members = getCluster().getMembers();
if (members.length == 0) {
return;
}
Member localMember = getCluster().getLocalMember();
FileMessageFactory factory =
FileMessageFactory.getInstance(webapp, false);
FileMessage msg = new FileMessage(localMember, webapp.getName(),
contextName);
if(log.isDebugEnabled()) {
log.debug(sm.getString("farmWarDeployer.sendStart", contextName,
webapp));
}
msg = factory.readMessage(msg);
while (msg != null) {
for (Member member : members) {
if (log.isDebugEnabled()) {
log.debug(sm.getString("farmWarDeployer.sendFragment",
contextName, webapp, member));
}
getCluster().send(msg, member);
}
msg = factory.readMessage(msg);
}
if(log.isDebugEnabled()) {
log.debug(sm.getString(
"farmWarDeployer.sendEnd", contextName, webapp));
}
}
/**
* Remove an existing web application, attached to the specified context
* name. If this application is successfully removed, a ContainerEvent of
* type <code>REMOVE_EVENT</code> will be sent to all registered
* listeners, with the removed <code>Context</code> as an argument.
* Deletes the web application war file and/or directory if they exist in
* the Host's appBase.
*
* @param contextName
* The context name of the application to be removed
* @param undeploy
* boolean flag to remove web application from server
*
* @exception IllegalArgumentException
* if the specified context name is malformed
* @exception IllegalArgumentException
* if the specified context name does not identify a
* currently installed web application
* @exception IOException
* if an input/output error occurs during removal
*/
@Override
public void remove(String contextName, boolean undeploy)
throws IOException {
if (getCluster().getMembers().length > 0) {
if (log.isInfoEnabled()) {
log.info(sm.getString("farmWarDeployer.removeStart", contextName));
}
Member localMember = getCluster().getLocalMember();
UndeployMessage msg = new UndeployMessage(localMember, System
.currentTimeMillis(), "Undeploy:" + contextName + ":"
+ System.currentTimeMillis(), contextName);
if (log.isDebugEnabled()) {
log.debug(sm.getString("farmWarDeployer.removeTxMsg", contextName));
}
cluster.send(msg);
}
// remove locally
if (undeploy) {
try {
if (tryAddServiced(contextName)) {
try {
remove(contextName);
} finally {
removeServiced(contextName);
}
check(contextName);
} else {
log.error(sm.getString("farmWarDeployer.removeFailRemote",
contextName));
}
} catch (Exception ex) {
log.error(sm.getString("farmWarDeployer.removeFailLocal",
contextName), ex);
}
}
}
/**
* Modification from watchDir war detected!
*
* @see org.apache.catalina.ha.deploy.FileChangeListener#fileModified(File)
*/
@Override
public void fileModified(File newWar) {
try {
File deployWar = new File(getDeployDirFile(), newWar.getName());
ContextName cn = new ContextName(deployWar.getName(), true);
if (deployWar.exists() && deployWar.lastModified() > newWar.lastModified()) {
if (log.isInfoEnabled()) {
log.info(sm.getString("farmWarDeployer.alreadyDeployed", cn.getName()));
}
return;
}
if (log.isInfoEnabled()) {
log.info(sm.getString("farmWarDeployer.modInstall",
cn.getName(), deployWar.getAbsolutePath()));
}
// install local
if (tryAddServiced(cn.getName())) {
try {
copy(newWar, deployWar);
} finally {
removeServiced(cn.getName());
}
check(cn.getName());
} else {
log.error(sm.getString("farmWarDeployer.servicingDeploy",
cn.getName(), deployWar.getName()));
}
install(cn.getName(), deployWar);
} catch (Exception x) {
log.error(sm.getString("farmWarDeployer.modInstallFail"), x);
}
}
/**
* War remove from watchDir
*
* @see org.apache.catalina.ha.deploy.FileChangeListener#fileRemoved(File)
*/
@Override
public void fileRemoved(File removeWar) {
try {
ContextName cn = new ContextName(removeWar.getName(), true);
if (log.isInfoEnabled()) {
log.info(sm.getString("farmWarDeployer.removeLocal",
cn.getName()));
}
remove(cn.getName(), true);
} catch (Exception x) {
log.error(sm.getString("farmWarDeployer.removeLocalFail"), x);
}
}
/**
* Invoke the remove method on the deployer.
* @param contextName The context to remove
* @throws Exception If an error occurs removing the context
*/
protected void remove(String contextName) throws Exception {
// TODO Handle remove also work dir content !
// Stop the context first to be nicer
Context context = (Context) host.findChild(contextName);
if (context != null) {
if(log.isDebugEnabled()) {
log.debug(sm.getString("farmWarDeployer.undeployLocal",
contextName));
}
context.stop();
String baseName = context.getBaseName();
File war = new File(host.getAppBaseFile(), baseName + ".war");
File dir = new File(host.getAppBaseFile(), baseName);
File xml = new File(configBase, baseName + ".xml");
if (war.exists()) {
if (!war.delete()) {
log.error(sm.getString("farmWarDeployer.deleteFail", war));
}
} else if (dir.exists()) {
undeployDir(dir);
} else {
if (!xml.delete()) {
log.error(sm.getString("farmWarDeployer.deleteFail", xml));
}
}
}
}
/**
* Delete the specified directory, including all of its contents and
* subdirectories recursively.
*
* @param dir
* File object representing the directory to be deleted
*/
protected void undeployDir(File dir) {
String files[] = dir.list();
if (files == null) {
files = new String[0];
}
for (String s : files) {
File file = new File(dir, s);
if (file.isDirectory()) {
undeployDir(file);
} else {
if (!file.delete()) {
log.error(sm.getString("farmWarDeployer.deleteFail", file));
}
}
}
if (!dir.delete()) {
log.error(sm.getString("farmWarDeployer.deleteFail", dir));
}
}
/**
* Call watcher to check for deploy changes
*
* @see org.apache.catalina.ha.ClusterDeployer#backgroundProcess()
*/
@Override
public void backgroundProcess() {
if (started) {
if (watchEnabled) {
count = (count + 1) % processDeployFrequency;
if (count == 0) {
watcher.check();
}
}
removeInvalidFileFactories();
}
}
/*--Deployer Operations ------------------------------------*/
/**
* Check a context for deployment operations.
* @param name The context name
* @throws Exception Error invoking the deployer
*/
protected void check(String name) throws Exception {
String[] params = { name };
String[] signature = { "java.lang.String" };
mBeanServer.invoke(oname, "check", params, signature);
}
/**
* Attempt to mark a context as being serviced
* @param name The context name
* @return {@code true} if the application was marked as being serviced and
* {@code false} if the application was already marked as being serviced
* @throws Exception Error invoking the deployer
*/
protected boolean tryAddServiced(String name) throws Exception {
String[] params = { name };
String[] signature = { "java.lang.String" };
Boolean result = (Boolean) mBeanServer.invoke(oname, "tryAddServiced", params, signature);
return result.booleanValue();
}
/**
* Mark a context as no longer being serviced.
* @param name The context name
* @throws Exception Error invoking the deployer
*/
protected void removeServiced(String name) throws Exception {
String[] params = { name };
String[] signature = { "java.lang.String" };
mBeanServer.invoke(oname, "removeServiced", params, signature);
}
/*--Instance Getters/Setters--------------------------------*/
@Override
public boolean equals(Object listener) {
return super.equals(listener);
}
@Override
public int hashCode() {
return super.hashCode();
}
public String getDeployDir() {
return deployDir;
}
public File getDeployDirFile() {
if (deployDirFile != null) {
return deployDirFile;
}
File dir = getAbsolutePath(getDeployDir());
this.deployDirFile = dir;
return dir;
}
public void setDeployDir(String deployDir) {
this.deployDir = deployDir;
}
public String getTempDir() {
return tempDir;
}
public File getTempDirFile() {
if (tempDirFile != null) {
return tempDirFile;
}
File dir = getAbsolutePath(getTempDir());
this.tempDirFile = dir;
return dir;
}
public void setTempDir(String tempDir) {
this.tempDir = tempDir;
}
public String getWatchDir() {
return watchDir;
}
public File getWatchDirFile() {
if (watchDirFile != null) {
return watchDirFile;
}
File dir = getAbsolutePath(getWatchDir());
this.watchDirFile = dir;
return dir;
}
public void setWatchDir(String watchDir) {
this.watchDir = watchDir;
}
public boolean isWatchEnabled() {
return watchEnabled;
}
public boolean getWatchEnabled() {
return watchEnabled;
}
public void setWatchEnabled(boolean watchEnabled) {
this.watchEnabled = watchEnabled;
}
/**
* @return the frequency of watcher checks.
*/
public int getProcessDeployFrequency() {
return this.processDeployFrequency;
}
/**
* Set the watcher checks frequency.
*
* @param processExpiresFrequency
* the new manager checks frequency
*/
public void setProcessDeployFrequency(int processExpiresFrequency) {
if (processExpiresFrequency <= 0) {
return;
}
this.processDeployFrequency = processExpiresFrequency;
}
public int getMaxValidTime() {
return maxValidTime;
}
public void setMaxValidTime(int maxValidTime) {
this.maxValidTime = maxValidTime;
}
/**
* Copy a file to the specified temp directory.
* @param from copy from temp
* @param to to host appBase directory
* @return true, copy successful
*/
protected boolean copy(File from, File to) {
try {
if (!to.exists()) {
if (!to.createNewFile()) {
log.error(sm.getString("fileNewFail", to));
return false;
}
}
} catch (IOException e) {
log.error(sm.getString("farmWarDeployer.fileCopyFail",
from, to), e);
return false;
}
try (java.io.FileInputStream is = new java.io.FileInputStream(from);
java.io.FileOutputStream os = new java.io.FileOutputStream(to, false)) {
byte[] buf = new byte[4096];
while (true) {
int len = is.read(buf);
if (len < 0) {
break;
}
os.write(buf, 0, len);
}
} catch (IOException e) {
log.error(sm.getString("farmWarDeployer.fileCopyFail",
from, to), e);
return false;
}
return true;
}
protected void removeInvalidFileFactories() {
String[] fileNames = fileFactories.keySet().toArray(new String[0]);
for (String fileName : fileNames) {
FileMessageFactory factory = fileFactories.get(fileName);
if (!factory.isValid()) {
fileFactories.remove(fileName);
}
}
}
private File getAbsolutePath(String path) {
File dir = new File(path);
if (!dir.isAbsolute()) {
dir = new File(getCluster().getContainer().getCatalinaBase(),
dir.getPath());
}
try {
dir = dir.getCanonicalFile();
} catch (IOException e) {// ignore
}
return dir;
}
}
| |
/*
* Copyright 2016 Providence Authors
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package net.morimekta.providence.reflect;
import com.google.common.collect.ImmutableList;
import net.morimekta.providence.model.ProgramType;
import net.morimekta.providence.reflect.contained.CProgram;
import net.morimekta.providence.reflect.parser.ParseException;
import net.morimekta.providence.reflect.parser.ProgramParser;
import net.morimekta.providence.reflect.parser.ThriftProgramParser;
import net.morimekta.providence.reflect.util.ProgramConverter;
import net.morimekta.providence.reflect.util.ProgramRegistry;
import net.morimekta.providence.reflect.util.ProgramTypeRegistry;
import java.io.BufferedInputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import static net.morimekta.providence.reflect.util.ReflectionUtils.longestCommonPrefixPath;
import static net.morimekta.providence.reflect.util.ReflectionUtils.programNameFromPath;
import static net.morimekta.providence.reflect.util.ReflectionUtils.stripCommonPrefix;
/**
* @author Stein Eldar Johnsen
* @since 07.09.15
*/
public class TypeLoader {
private final ProgramRegistry programRegistry;
private final ProgramConverter converter;
private final ProgramParser parser;
private final Map<String, ProgramType> loadedDocuments;
private final Collection<File> includes;
/**
* Construct a type loader for file types matches with the given parser.
*
* @param includes List of files with include path roots. For includes
* search these in order.
*/
public TypeLoader(Collection<File> includes) {
this(includes, new ThriftProgramParser());
}
/**
* Construct a type loader for file types matches with the given parser.
*
* @param includes List of files with include path roots. For includes
* search these in order.
* @param parser The thrift file parser.
*/
public TypeLoader(Collection<File> includes, ProgramParser parser) {
this(includes, parser, new ProgramRegistry());
}
/**
* Intermediate constructor.
*
* @param includes List of files with include path roots. For includes
* search these in order.
* @param parser The thrift file parser.
* @param registry Type registry to keep parsed types in.
*/
private TypeLoader(Collection<File> includes, ProgramParser parser, ProgramRegistry registry) {
this(includes, parser, registry, new ProgramConverter(registry));
}
/**
* Constructor with injected functionality.
*
* @param includes List of files with include path roots. For includes
* search these in order.
* @param parser The thrift file parser.
* @param registry The type registry.
* @param converter The document converter
*/
protected TypeLoader(Collection<File> includes, ProgramParser parser, ProgramRegistry registry, ProgramConverter converter) {
this.includes = includes;
this.parser = parser;
this.programRegistry = registry;
this.converter = converter;
this.loadedDocuments = new LinkedHashMap<>();
}
/**
* @return Set of loaded documents.
*/
public Collection<ProgramType> loadedPrograms() {
return loadedDocuments.values();
}
/**
* Load a thrift definition from file including all it's dependencies.
*
* @param file The file to load.
* @return The loaded contained document.
* @throws IOException If the file could not be read or parsed.
*/
public ProgramTypeRegistry load(File file) throws IOException {
return loadInternal(file, new ArrayList<>());
}
private ProgramTypeRegistry loadInternal(File file, List<String> loadStack) throws IOException {
loadStack = new ArrayList<>(loadStack);
file = file.getCanonicalFile();
if (!file.exists()) {
throw new IllegalArgumentException("No such file " + file);
}
if (!file.isFile()) {
throw new IllegalArgumentException(
"Unable to load thrift program: " + file + " is not a file.");
}
file = file.getAbsoluteFile();
String path = file.getPath();
if (loadStack.contains(path)) {
// circular includes.
// Only show the circular includes, not the path to get there.
while (!loadStack.get(0).equals(path)) {
loadStack.remove(0);
}
loadStack.add(path);
String prefix = longestCommonPrefixPath(loadStack);
if (prefix.length() > 0) {
loadStack = stripCommonPrefix(loadStack);
throw new IllegalArgumentException(
"Circular includes detected: " + prefix + "... " + String.join(" -> ", loadStack));
}
throw new IllegalArgumentException(
"Circular includes detected: " + String.join(" -> ", loadStack));
}
loadStack.add(path);
ProgramTypeRegistry registry = this.programRegistry.registryForPath(path);
if (programRegistry.containsProgramPath(path)) {
return registry;
}
InputStream in = new BufferedInputStream(new FileInputStream(file));
ProgramType doc = parser.parse(in, file, includes);
ArrayList<File> queue = new ArrayList<>();
if (doc.hasIncludes()) {
for (String include : doc.getIncludes()) {
File location = new File(file.getParent(), include).getCanonicalFile();
if (!location.exists()) {
if (include.startsWith(".") || include.startsWith(File.separator)) {
throw new ParseException("No such file \"" + include + "\" to include from " + file.getName());
}
for (File inc : includes) {
File i = new File(inc, include);
if (i.exists()) {
location = i.getCanonicalFile();
break;
}
}
}
if (location.exists() && !queue.contains(location)) {
queue.add(location.getAbsoluteFile());
}
}
}
// Load includes in reverse order, in case of serial dependencies.
Collections.reverse(queue);
loadedDocuments.put(path, doc);
for (File include : queue) {
registry.registerInclude(programNameFromPath(include.getPath()),
loadInternal(include, ImmutableList.copyOf(loadStack)));
}
// Now everything it depends on is loaded.
CProgram program = converter.convert(path, doc);
programRegistry.putProgram(path, program);
programRegistry.putProgramType(path, doc);
return registry;
}
/**
* @return The local registry.
*/
public ProgramRegistry getProgramRegistry() {
return programRegistry;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.directory.server.core.authz.support;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertSame;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import com.mycila.junit.concurrent.Concurrency;
import com.mycila.junit.concurrent.ConcurrentJunitRunner;
import org.apache.directory.api.ldap.aci.ACITuple;
import org.apache.directory.api.ldap.aci.MicroOperation;
import org.apache.directory.api.ldap.aci.ProtectedItem;
import org.apache.directory.api.ldap.aci.UserClass;
import org.apache.directory.api.ldap.aci.protectedItem.AllAttributeValuesItem;
import org.apache.directory.api.ldap.aci.protectedItem.AttributeTypeItem;
import org.apache.directory.api.ldap.aci.protectedItem.AttributeValueItem;
import org.apache.directory.api.ldap.aci.protectedItem.RangeOfValuesItem;
import org.apache.directory.api.ldap.aci.protectedItem.SelfValueItem;
import org.apache.directory.api.ldap.model.constants.AuthenticationLevel;
import org.apache.directory.api.ldap.model.entry.Attribute;
import org.apache.directory.api.ldap.model.filter.PresenceNode;
import org.apache.directory.api.ldap.model.schema.AttributeType;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.runner.RunWith;
/**
* Tests {@link MostSpecificProtectedItemFilter}.
*
* @author <a href="mailto:dev@directory.apache.org">Apache Directory Project</a>
*/
@RunWith(ConcurrentJunitRunner.class)
@Concurrency()
public class MostSpecificProtectedItemFilterTest
{
private static final Set<AttributeType> EMPTY_STRING_COLLECTION = Collections
.unmodifiableSet( new HashSet<AttributeType>() );
private static final Set<Attribute> EMPTY_ATTRIBUTE_COLLECTION = Collections
.unmodifiableSet( new HashSet<Attribute>() );
private static final Collection<UserClass> EMPTY_USER_CLASS_COLLECTION = Collections
.unmodifiableCollection( new ArrayList<UserClass>() );
private static final Collection<ACITuple> EMPTY_ACI_TUPLE_COLLECTION = Collections
.unmodifiableCollection( new ArrayList<ACITuple>() );
private static final Collection<ProtectedItem> EMPTY_PROTECTED_ITEM_COLLECTION = Collections
.unmodifiableCollection( new ArrayList<ProtectedItem>() );
private static final Set<MicroOperation> EMPTY_MICRO_OPERATION_SET = Collections
.unmodifiableSet( new HashSet<MicroOperation>() );
private static final List<ACITuple> TUPLES_A = new ArrayList<ACITuple>();
private static final List<ACITuple> TUPLES_B = new ArrayList<ACITuple>();
private static final List<ACITuple> TUPLES_C = new ArrayList<ACITuple>();
private static final List<ACITuple> TUPLES_D = new ArrayList<ACITuple>();
private static final List<ACITuple> TUPLES_E = new ArrayList<ACITuple>();
@BeforeClass
public static void init()
{
Collection<ProtectedItem> attributeType = new ArrayList<ProtectedItem>();
Collection<ProtectedItem> allAttributeValues = new ArrayList<ProtectedItem>();
Collection<ProtectedItem> selfValue = new ArrayList<ProtectedItem>();
Collection<ProtectedItem> attributeValue = new ArrayList<ProtectedItem>();
Collection<ProtectedItem> rangeOfValues = new ArrayList<ProtectedItem>();
Collection<ProtectedItem> allUserAttributeTypes = new ArrayList<ProtectedItem>();
Collection<ProtectedItem> allUserAttributeTypesAndValues = new ArrayList<ProtectedItem>();
attributeType.add( new AttributeTypeItem( EMPTY_STRING_COLLECTION ) );
allAttributeValues.add( new AllAttributeValuesItem( EMPTY_STRING_COLLECTION ) );
selfValue.add( new SelfValueItem( EMPTY_STRING_COLLECTION ) );
attributeValue.add( new AttributeValueItem( EMPTY_ATTRIBUTE_COLLECTION ) );
rangeOfValues.add( new RangeOfValuesItem( new PresenceNode( ( String ) null ) ) );
allUserAttributeTypes.add( ProtectedItem.ALL_USER_ATTRIBUTE_TYPES );
allUserAttributeTypesAndValues.add( ProtectedItem.ALL_USER_ATTRIBUTE_TYPES_AND_VALUES );
ACITuple attributeTypeTuple = new ACITuple( EMPTY_USER_CLASS_COLLECTION, AuthenticationLevel.NONE,
attributeType,
EMPTY_MICRO_OPERATION_SET, true, 0 );
ACITuple allAttributeValuesTuple = new ACITuple( EMPTY_USER_CLASS_COLLECTION, AuthenticationLevel.NONE,
allAttributeValues, EMPTY_MICRO_OPERATION_SET, true, 0 );
ACITuple selfValueTuple = new ACITuple( EMPTY_USER_CLASS_COLLECTION, AuthenticationLevel.NONE, selfValue,
EMPTY_MICRO_OPERATION_SET, true, 0 );
ACITuple attributeValueTuple = new ACITuple( EMPTY_USER_CLASS_COLLECTION, AuthenticationLevel.NONE,
attributeValue,
EMPTY_MICRO_OPERATION_SET, true, 0 );
ACITuple rangeOfValuesTuple = new ACITuple( EMPTY_USER_CLASS_COLLECTION, AuthenticationLevel.NONE,
rangeOfValues,
EMPTY_MICRO_OPERATION_SET, true, 0 );
ACITuple allUserAttributeTypesTuple = new ACITuple( EMPTY_USER_CLASS_COLLECTION, AuthenticationLevel.NONE,
allUserAttributeTypes, EMPTY_MICRO_OPERATION_SET, true, 0 );
ACITuple allUserAttributeTypesAndValuesTuple = new ACITuple( EMPTY_USER_CLASS_COLLECTION,
AuthenticationLevel.NONE,
allUserAttributeTypesAndValues, EMPTY_MICRO_OPERATION_SET, true, 0 );
TUPLES_A.add( attributeTypeTuple );
TUPLES_A.add( allAttributeValuesTuple );
TUPLES_A.add( selfValueTuple );
TUPLES_A.add( attributeValueTuple );
TUPLES_A.add( rangeOfValuesTuple );
TUPLES_A.add( allUserAttributeTypesTuple );
TUPLES_A.add( allUserAttributeTypesAndValuesTuple );
TUPLES_B.add( allAttributeValuesTuple );
TUPLES_B.add( selfValueTuple );
TUPLES_B.add( attributeValueTuple );
TUPLES_B.add( rangeOfValuesTuple );
TUPLES_B.add( allUserAttributeTypesTuple );
TUPLES_B.add( allUserAttributeTypesAndValuesTuple );
TUPLES_C.add( selfValueTuple );
TUPLES_C.add( attributeValueTuple );
TUPLES_C.add( rangeOfValuesTuple );
TUPLES_C.add( allUserAttributeTypesTuple );
TUPLES_C.add( allUserAttributeTypesAndValuesTuple );
TUPLES_D.add( attributeValueTuple );
TUPLES_D.add( rangeOfValuesTuple );
TUPLES_D.add( allUserAttributeTypesTuple );
TUPLES_D.add( allUserAttributeTypesAndValuesTuple );
TUPLES_E.add( allUserAttributeTypesTuple );
TUPLES_E.add( allUserAttributeTypesAndValuesTuple );
}
@Test
public void testZeroOrOneTuple() throws Exception
{
MostSpecificProtectedItemFilter filter = new MostSpecificProtectedItemFilter();
AciContext aciContext = new AciContext( null, null );
aciContext.setAciTuples( EMPTY_ACI_TUPLE_COLLECTION );
assertEquals( 0, filter.filter( aciContext, OperationScope.ATTRIBUTE_TYPE_AND_VALUE, null ).size() );
Collection<ACITuple> tuples = new ArrayList<ACITuple>();
tuples.add( new ACITuple( EMPTY_USER_CLASS_COLLECTION, AuthenticationLevel.NONE,
EMPTY_PROTECTED_ITEM_COLLECTION, EMPTY_MICRO_OPERATION_SET, false, 0 ) );
aciContext = new AciContext( null, null );
aciContext.setAciTuples( tuples );
assertEquals( 1, filter.filter( aciContext, OperationScope.ATTRIBUTE_TYPE_AND_VALUE, null ).size() );
}
@Test
public void testTuplesA() throws Exception
{
MostSpecificProtectedItemFilter filter = new MostSpecificProtectedItemFilter();
List<ACITuple> tuples = new ArrayList<ACITuple>( TUPLES_A );
AciContext aciContext = new AciContext( null, null );
aciContext.setAciTuples( tuples );
tuples = ( List<ACITuple> ) filter.filter( aciContext, OperationScope.ENTRY, null );
assertEquals( 4, tuples.size() );
assertSame( TUPLES_A.get( 0 ), tuples.get( 0 ) );
assertSame( TUPLES_A.get( 1 ), tuples.get( 1 ) );
assertSame( TUPLES_A.get( 2 ), tuples.get( 2 ) );
assertSame( TUPLES_A.get( 3 ), tuples.get( 3 ) );
}
@Test
public void testTuplesB() throws Exception
{
MostSpecificProtectedItemFilter filter = new MostSpecificProtectedItemFilter();
List<ACITuple> tuples = new ArrayList<ACITuple>( TUPLES_B );
AciContext aciContext = new AciContext( null, null );
aciContext.setAciTuples( tuples );
tuples = ( List<ACITuple> ) filter.filter( aciContext, OperationScope.ENTRY, null );
assertEquals( 3, tuples.size() );
assertSame( TUPLES_B.get( 0 ), tuples.get( 0 ) );
assertSame( TUPLES_B.get( 1 ), tuples.get( 1 ) );
assertSame( TUPLES_B.get( 2 ), tuples.get( 2 ) );
}
@Test
public void testTuplesC() throws Exception
{
MostSpecificProtectedItemFilter filter = new MostSpecificProtectedItemFilter();
List<ACITuple> tuples = new ArrayList<ACITuple>( TUPLES_C );
AciContext aciContext = new AciContext( null, null );
aciContext.setAciTuples( tuples );
tuples = ( List<ACITuple> ) filter.filter( aciContext, OperationScope.ENTRY, null );
assertEquals( 2, tuples.size() );
assertSame( TUPLES_C.get( 0 ), tuples.get( 0 ) );
assertSame( TUPLES_C.get( 1 ), tuples.get( 1 ) );
}
@Test
public void testTuplesD() throws Exception
{
MostSpecificProtectedItemFilter filter = new MostSpecificProtectedItemFilter();
List<ACITuple> tuples = new ArrayList<ACITuple>( TUPLES_D );
AciContext aciContext = new AciContext( null, null );
aciContext.setAciTuples( tuples );
tuples = ( List<ACITuple> ) filter.filter( aciContext, OperationScope.ENTRY, null );
assertEquals( 1, tuples.size() );
assertSame( TUPLES_D.get( 0 ), tuples.get( 0 ) );
}
@Test
public void testTuplesE() throws Exception
{
MostSpecificProtectedItemFilter filter = new MostSpecificProtectedItemFilter();
List<ACITuple> tuples = new ArrayList<ACITuple>( TUPLES_E );
AciContext aciContext = new AciContext( null, null );
aciContext.setAciTuples( tuples );
tuples = ( List<ACITuple> ) filter.filter( aciContext, OperationScope.ENTRY, null );
assertEquals( 2, tuples.size() );
assertSame( TUPLES_E.get( 0 ), tuples.get( 0 ) );
assertSame( TUPLES_E.get( 1 ), tuples.get( 1 ) );
}
}
| |
/*
* Copyright (C) 2008 ZXing authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.thomas.view;
import java.util.Collection;
import java.util.HashSet;
import android.content.Context;
import android.content.res.Resources;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.graphics.Rect;
import android.graphics.Typeface;
import android.graphics.drawable.BitmapDrawable;
import android.util.AttributeSet;
import android.view.View;
import com.google.zxing.ResultPoint;
import com.ionicframework.starter.R;
import com.thomas.zxing.camera.CameraManager;
/**
* This view is overlaid on top of the camera preview. It adds the viewfinder
* rectangle and partial transparency outside it, as well as the laser scanner
* animation and result points.
*
*/
public final class ViewfinderView extends View {
private static final String TAG = "log";
private static final long ANIMATION_DELAY = 10L;
private static final int OPAQUE = 0xFF;
private int ScreenRate;
private static final int CORNER_WIDTH = 4;
// private static final int CORNER_WIDTH = 10;
private static final int MIDDLE_LINE_WIDTH = 6;
private static final int MIDDLE_LINE_PADDING = 5;
private static final int SPEEN_DISTANCE = 5;
private static float density;
private static final int TEXT_SIZE = 16;
private static final int TEXT_PADDING_TOP = 30;
private Paint paint;
private int slideTop;
private int slideBottom;
private Bitmap resultBitmap;
private final int maskColor;
private final int resultColor;
private final int resultPointColor;
private Collection<ResultPoint> possibleResultPoints;
private Collection<ResultPoint> lastPossibleResultPoints;
boolean isFirst;
public ViewfinderView(Context context, AttributeSet attrs) {
super(context, attrs);
density = context.getResources().getDisplayMetrics().density;
ScreenRate = (int)(50 * density);
// ScreenRate = (int)(20 * density);
paint = new Paint();
Resources resources = getResources();
maskColor = resources.getColor(R.color.viewfinder_mask);
resultColor = resources.getColor(R.color.result_view);
resultPointColor = resources.getColor(R.color.possible_result_points);
possibleResultPoints = new HashSet<ResultPoint>(5);
}
@Override
public void onDraw(Canvas canvas) {
Rect frame = CameraManager.get().getFramingRect();
if (frame == null) {
return;
}
if(!isFirst){
isFirst = true;
slideTop = frame.top;
slideBottom = frame.bottom;
}
int width = canvas.getWidth();
int height = canvas.getHeight();
paint.setColor(resultBitmap != null ? resultColor : maskColor);
canvas.drawRect(0, 0, width, frame.top, paint);
canvas.drawRect(0, frame.top, frame.left, frame.bottom + 1, paint);
canvas.drawRect(frame.right + 1, frame.top, width, frame.bottom + 1,
paint);
canvas.drawRect(0, frame.bottom + 1, width, height, paint);
if (resultBitmap != null) {
// Draw the opaque result bitmap over the scanning rectangle
paint.setAlpha(OPAQUE);
canvas.drawBitmap(resultBitmap, frame.left, frame.top, paint);
} else {
paint.setColor(Color.GREEN);
// canvas.drawRect(frame.left, frame.top, frame.left + ScreenRate,
// frame.top + CORNER_WIDTH, paint);
// canvas.drawRect(frame.left, frame.top, frame.left + CORNER_WIDTH, frame.top
// + ScreenRate, paint);
// canvas.drawRect(frame.right - ScreenRate, frame.top, frame.right,
// frame.top + CORNER_WIDTH, paint);
// canvas.drawRect(frame.right - CORNER_WIDTH, frame.top, frame.right, frame.top
// + ScreenRate, paint);
// canvas.drawRect(frame.left, frame.bottom - CORNER_WIDTH, frame.left
// + ScreenRate, frame.bottom, paint);
// canvas.drawRect(frame.left, frame.bottom - ScreenRate,
// frame.left + CORNER_WIDTH, frame.bottom, paint);
// canvas.drawRect(frame.right - ScreenRate, frame.bottom - CORNER_WIDTH,
// frame.right, frame.bottom, paint);
// canvas.drawRect(frame.right - CORNER_WIDTH, frame.bottom - ScreenRate,
// frame.right, frame.bottom, paint);
int scanWidth = 50;
Rect lineRectLT = new Rect();
lineRectLT.left = frame.left;
lineRectLT.right = frame.left + scanWidth;
lineRectLT.top = frame.top;
lineRectLT.bottom = frame.top + scanWidth;
canvas.drawBitmap(((BitmapDrawable)(getResources().getDrawable(R.drawable.scan_left_top))).getBitmap(), null, lineRectLT, paint);
Rect lineRectRT = new Rect();
lineRectRT.left = frame.right - scanWidth;
lineRectRT.right = frame.right;
lineRectRT.top = frame.top;
lineRectRT.bottom = frame.top + scanWidth;
canvas.drawBitmap(((BitmapDrawable)(getResources().getDrawable(R.drawable.scan_right_top))).getBitmap(), null, lineRectRT, paint);
Rect lineRectLB = new Rect();
lineRectLB.left = frame.left;
lineRectLB.right = frame.left + scanWidth;
lineRectLB.top = frame.bottom -scanWidth;
lineRectLB.bottom = frame.bottom ;
canvas.drawBitmap(((BitmapDrawable)(getResources().getDrawable(R.drawable.scan_left_bottom))).getBitmap(), null, lineRectLB, paint);
Rect lineRectRB = new Rect();
lineRectRB.left = frame.right - scanWidth;
lineRectRB.right = frame.right;
lineRectRB.top = frame.bottom -scanWidth;
lineRectRB.bottom = frame.bottom ;
canvas.drawBitmap(((BitmapDrawable)(getResources().getDrawable(R.drawable.scan_right_bottom))).getBitmap(), null, lineRectRB, paint);
slideTop += SPEEN_DISTANCE;
if(slideTop >= frame.bottom){
slideTop = frame.top;
}
// canvas.drawRect(frame.left + MIDDLE_LINE_PADDING, slideTop - MIDDLE_LINE_WIDTH/2, frame.right - MIDDLE_LINE_PADDING,slideTop + MIDDLE_LINE_WIDTH/2, paint);
Rect lineRect = new Rect();
lineRect.left = frame.left;
lineRect.right = frame.right;
lineRect.top = slideTop;
// lineRect.bottom = slideTop + 18;
lineRect.bottom = slideTop + 3;
canvas.drawBitmap(((BitmapDrawable)(getResources().getDrawable(R.drawable.scan_center_line))).getBitmap(), null, lineRect, paint);
paint.setColor(Color.WHITE);
paint.setTextSize(TEXT_SIZE * density);
paint.setAlpha(0x40);
paint.setTypeface(Typeface.create("System", Typeface.BOLD));
String textPileCode = getResources().getString(R.string.scan_pile_code);
float textWidth = paint.measureText(textPileCode);
canvas.drawText(textPileCode, frame.left+(frame.right-frame.left-textWidth)/2, (float) (frame.bottom + (float)TEXT_PADDING_TOP *density), paint);
// Collection<ResultPoint> currentPossible = possibleResultPoints;
// Collection<ResultPoint> currentLast = lastPossibleResultPoints;
// if (currentPossible.isEmpty()) {
// lastPossibleResultPoints = null;
// } else {
// possibleResultPoints = new HashSet<ResultPoint>(5);
// lastPossibleResultPoints = currentPossible;
// paint.setAlpha(OPAQUE);
// paint.setColor(resultPointColor);
// for (ResultPoint point : currentPossible) {
// canvas.drawCircle(frame.left + point.getX(), frame.top
// + point.getY(), 6.0f, paint);
// }
// }
// if (currentLast != null) {
// paint.setAlpha(OPAQUE / 2);
// paint.setColor(resultPointColor);
// for (ResultPoint point : currentLast) {
// canvas.drawCircle(frame.left + point.getX(), frame.top
// + point.getY(), 3.0f, paint);
// }
// }
postInvalidateDelayed(ANIMATION_DELAY, frame.left, frame.top,
frame.right, frame.bottom);
}
}
public void drawViewfinder() {
resultBitmap = null;
invalidate();
}
/**
* Draw a bitmap with the result points highlighted instead of the live
* scanning display.
*
* @param barcode
* An image of the decoded barcode.
*/
public void drawResultBitmap(Bitmap barcode) {
resultBitmap = barcode;
invalidate();
}
public void addPossibleResultPoint(ResultPoint point) {
possibleResultPoints.add(point);
}
}
| |
package org.opencds.cqf.ruler.cr.r4.provider;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkNotNull;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.UUID;
import java.util.regex.Pattern;
import com.google.common.base.Strings;
import org.apache.commons.lang3.NotImplementedException;
import org.apache.commons.lang3.tuple.ImmutablePair;
import org.apache.commons.lang3.tuple.MutablePair;
import org.apache.commons.lang3.tuple.Pair;
import org.hl7.fhir.instance.model.api.IIdType;
import org.hl7.fhir.r4.model.Bundle;
import org.hl7.fhir.r4.model.Bundle.BundleEntryComponent;
import org.hl7.fhir.r4.model.Bundle.BundleType;
import org.hl7.fhir.r4.model.CanonicalType;
import org.hl7.fhir.r4.model.Composition;
import org.hl7.fhir.r4.model.DetectedIssue;
import org.hl7.fhir.r4.model.Measure;
import org.hl7.fhir.r4.model.MeasureReport;
import org.hl7.fhir.r4.model.Meta;
import org.hl7.fhir.r4.model.Organization;
import org.hl7.fhir.r4.model.Parameters;
import org.hl7.fhir.r4.model.Patient;
import org.hl7.fhir.r4.model.Reference;
import org.hl7.fhir.r4.model.Resource;
import org.opencds.cqf.ruler.behavior.ConfigurationUser;
import org.opencds.cqf.ruler.behavior.ResourceCreator;
import org.opencds.cqf.ruler.behavior.r4.MeasureReportUser;
import org.opencds.cqf.ruler.behavior.r4.ParameterUser;
import org.opencds.cqf.ruler.builder.BundleBuilder;
import org.opencds.cqf.ruler.builder.CodeableConceptSettings;
import org.opencds.cqf.ruler.builder.CompositionBuilder;
import org.opencds.cqf.ruler.builder.CompositionSectionComponentBuilder;
import org.opencds.cqf.ruler.builder.DetectedIssueBuilder;
import org.opencds.cqf.ruler.builder.NarrativeSettings;
import org.opencds.cqf.ruler.cr.CrProperties;
import org.opencds.cqf.ruler.provider.DaoRegistryOperationProvider;
import org.opencds.cqf.ruler.utility.Ids;
import org.opencds.cqf.ruler.utility.Operations;
import org.opencds.cqf.ruler.utility.Resources;
import org.opencds.cqf.ruler.utility.Searches;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import ca.uhn.fhir.model.api.annotation.Description;
import ca.uhn.fhir.rest.annotation.Operation;
import ca.uhn.fhir.rest.annotation.OperationParam;
import ca.uhn.fhir.rest.api.server.RequestDetails;
public class CareGapsProvider extends DaoRegistryOperationProvider
implements ParameterUser, ConfigurationUser, ResourceCreator, MeasureReportUser {
public static final Pattern CARE_GAPS_STATUS = Pattern
.compile("(open-gap|closed-gap|not-applicable)");
public static final String CARE_GAPS_REPORT_PROFILE = "http://hl7.org/fhir/us/davinci-deqm/StructureDefinition/indv-measurereport-deqm";
public static final String CARE_GAPS_BUNDLE_PROFILE = "http://hl7.org/fhir/us/davinci-deqm/StructureDefinition/gaps-bundle-deqm";
public static final String CARE_GAPS_COMPOSITION_PROFILE = "http://hl7.org/fhir/us/davinci-deqm/StructureDefinition/gaps-composition-deqm";
public static final String CARE_GAPS_DETECTEDISSUE_PROFILE = "http://hl7.org/fhir/us/davinci-deqm/StructureDefinition/gaps-detectedissue-deqm";
public static final String CARE_GAPS_GAP_STATUS_EXTENSION = "http://hl7.org/fhir/us/davinci-deqm/StructureDefinition/extension-gapStatus";
public static final String CARE_GAPS_GAP_STATUS_SYSTEM = "http://hl7.org/fhir/us/davinci-deqm/CodeSystem/gaps-status";
public static final String CARE_GAPS_MEASUREREPORT_REPORTER_EXTENSION = "http://hl7.org/fhir/us/davinci-deqm/StructureDefinition/extension-reporterGroup";
protected static final Map<String, CodeableConceptSettings> CARE_GAPS_CODES;
static {
CARE_GAPS_CODES = new HashMap<>();
CARE_GAPS_CODES.put("http://loinc.org/96315-7",
new CodeableConceptSettings().add("http://loinc.org", "96315-7", "Gaps in care report"));
CARE_GAPS_CODES.put("http://terminology.hl7.org/CodeSystem/v3-ActCode/CAREGAP", new CodeableConceptSettings()
.add("http://terminology.hl7.org/CodeSystem/v3-ActCode", "CAREGAP", "Care Gaps"));
}
// TODO: I guess this isn't available yet? Replace when we update to newer
// version of Java.
// = ofEntries(
// new AbstractMap.SimpleEntry<String,
// CodeableConceptSettings>("http://loinc.org/96315-7",
// new CodeableConceptSettings().add("http://loinc.org", "96315-7", "Gaps in
// care report")),
// new AbstractMap.SimpleEntry<String, CodeableConceptSettings>(
// "http://terminology.hl7.org/CodeSystem/v3-ActCode/CAREGAP", new
// CodeableConceptSettings()
// .add("http://terminology.hl7.org/CodeSystem/v3-ActCode", "CAREGAP", "Care
// Gaps")));
public enum CareGapsStatusCode {
OPEN_GAP("open-gap"), CLOSED_GAP("closed-gap"), NOT_APPLICABLE("not-applicable");
private final String myValue;
private CareGapsStatusCode(final String theValue) {
myValue = theValue;
}
@Override
public String toString() {
return myValue;
}
public String toDisplayString() {
if (myValue.equals("open-gap")) {
return "Open Gap";
}
if (myValue.equals("closed-gap")) {
return "Closed Gap";
}
if (myValue.equals("not-applicable")) {
return "Not Applicable";
}
throw new IllegalArgumentException();
}
}
static final Logger ourLog = LoggerFactory.getLogger(CareGapsProvider.class);
@Autowired
private MeasureEvaluateProvider measureEvaluateProvider;
@Autowired
private CrProperties crProperties;
/**
* Implements the <a href=
* "http://build.fhir.org/ig/HL7/davinci-deqm/OperationDefinition-care-gaps.html">$care-gaps</a>
* operation found in the
* <a href="http://build.fhir.org/ig/HL7/davinci-deqm/index.html">Da Vinci DEQM
* FHIR Implementation Guide</a> that overrides the <a href=
* "http://build.fhir.org/operation-measure-care-gaps.html">$care-gaps</a>
* operation found in the
* <a href="http://hl7.org/fhir/R4/clinicalreasoning-module.html">FHIR Clinical
* Reasoning Module</a>.
*
* The operation calculates measures describing gaps in care. For more details,
* reference the <a href=
* "http://build.fhir.org/ig/HL7/davinci-deqm/gaps-in-care-reporting.html">Gaps
* in Care Reporting</a> section of the
* <a href="http://build.fhir.org/ig/HL7/davinci-deqm/index.html">Da Vinci DEQM
* FHIR Implementation Guide</a>.
*
* A Parameters resource that includes zero to many document bundles that
* include Care Gap Measure Reports will be returned.
*
* Usage:
* URL: [base]/Measure/$care-gaps
*
* @param theRequestDetails generally auto-populated by the HAPI server
* framework.
* @param periodStart the start of the gaps through period
* @param periodEnd the end of the gaps through period
* @param topic the category of the measures that is of interest for
* the care gaps report
* @param subject a reference to either a Patient or Group for which
* the gaps in care report(s) will be generated
* @param practitioner a reference to a Practitioner for which the gaps in
* care report(s) will be generated
* @param organization a reference to an Organization for which the gaps in
* care report(s) will be generated
* @param status the status code of gaps in care reports that will be
* included in the result
* @param measureId the id of Measure(s) for which the gaps in care
* report(s) will be calculated
* @param measureIdentifier the identifier of Measure(s) for which the gaps in
* care report(s) will be calculated
* @param measureUrl the canonical URL of Measure(s) for which the gaps
* in care report(s) will be calculated
* @param program the program that a provider (either clinician or
* clinical organization) participates in
* @return Parameters of bundles of Care Gap Measure Reports
*/
@SuppressWarnings("squid:S00107") // warning for greater than 7 parameters
@Description(shortDefinition = "$care-gaps", value = "Implements the <a href=\"http://build.fhir.org/ig/HL7/davinci-deqm/OperationDefinition-care-gaps.html\">$care-gaps</a> operation found in the <a href=\"http://build.fhir.org/ig/HL7/davinci-deqm/index.html\">Da Vinci DEQM FHIR Implementation Guide</a> which is an extension of the <a href=\"http://build.fhir.org/operation-measure-care-gaps.html\">$care-gaps</a> operation found in the <a href=\"http://hl7.org/fhir/R4/clinicalreasoning-module.html\">FHIR Clinical Reasoning Module</a>.")
@Operation(name = "$care-gaps", idempotent = true, type = Measure.class)
public Parameters careGapsReport(RequestDetails theRequestDetails,
@OperationParam(name = "periodStart") String periodStart,
@OperationParam(name = "periodEnd") String periodEnd,
@OperationParam(name = "topic") List<String> topic,
@OperationParam(name = "subject") String subject,
@OperationParam(name = "practitioner") String practitioner,
@OperationParam(name = "organization") String organization,
@OperationParam(name = "status") List<String> status,
@OperationParam(name = "measureId") List<String> measureId,
@OperationParam(name = "measureIdentifier") List<String> measureIdentifier,
@OperationParam(name = "measureUrl") List<CanonicalType> measureUrl,
@OperationParam(name = "program") List<String> program) {
validateConfiguration(theRequestDetails);
validateParameters(theRequestDetails);
// TODO: filter by topic.
// TODO: filter by program.
List<Measure> measures = ensureMeasures(getMeasures(measureId, measureIdentifier, measureUrl, theRequestDetails));
List<Patient> patients;
if (!Strings.isNullOrEmpty(subject)) {
patients = getPatientListFromSubject(subject);
} else {
// TODO: implement non subject parameters (practitioner and organization)
throw new NotImplementedException("Non subject parameters have not been implemented.");
}
Parameters result = initializeResult();
(patients)
.forEach(
patient -> {
Parameters.ParametersParameterComponent patientParameter = patientReports(theRequestDetails,
periodStart, periodEnd, patient, status, measures, organization);
if (patientParameter != null) {
result.addParameter(patientParameter);
}
});
return result;
}
private Map<String, Resource> configuredResources = new HashMap<>();
private <T extends Resource> T putConfiguredResource(Class<T> theResourceClass, String theId,
String theKey, RequestDetails theRequestDetails) {
T resource = search(theResourceClass, Searches.byId(theId), theRequestDetails).firstOrNull();
if (resource != null) {
configuredResources.put(theKey, resource);
}
return resource;
}
@Override
public void validateConfiguration(RequestDetails theRequestDetails) {
checkNotNull(crProperties.getMeasureReport(),
"The measure_report setting is required for the $care-gaps operation.");
checkArgument(!Strings.isNullOrEmpty(crProperties.getMeasureReport().getReporter()),
"The measure_report.care_gaps_reporter setting is required for the $care-gaps operation.");
checkArgument(!Strings.isNullOrEmpty(crProperties.getMeasureReport().getCompositionAuthor()),
"The measure_report.care_gaps_composition_section_author setting is required for the $care-gaps operation.");
Resource configuredReporter = putConfiguredResource(Organization.class,
crProperties.getMeasureReport().getReporter(), "care_gaps_reporter", theRequestDetails);
Resource configuredAuthor = putConfiguredResource(Organization.class,
crProperties.getMeasureReport().getCompositionAuthor(), "care_gaps_composition_section_author",
theRequestDetails);
checkNotNull(configuredReporter, String.format(
"The %s Resource is configured as the measure_report.care_gaps_reporter but the Resource could not be read.",
crProperties.getMeasureReport().getReporter()));
checkNotNull(configuredAuthor, String.format(
"The %s Resource is configured as the measure_report.care_gaps_composition_section_author but the Resource could not be read.",
crProperties.getMeasureReport().getCompositionAuthor()));
}
@SuppressWarnings("squid:S1192") // warning for using the same string value more than 5 times
public void validateParameters(RequestDetails theRequestDetails) {
Operations.validatePeriod(theRequestDetails, "periodStart", "periodEnd");
Operations.validateCardinality(theRequestDetails, "subject", 0, 1);
Operations.validateSingularPattern(theRequestDetails, "subject", Operations.PATIENT_OR_GROUP_REFERENCE);
Operations.validateCardinality(theRequestDetails, "status", 1);
Operations.validateSingularPattern(theRequestDetails, "status", CARE_GAPS_STATUS);
Operations.validateExclusive(theRequestDetails, "subject", "organization", "practitioner");
Operations.validateExclusive(theRequestDetails, "organization", "subject");
Operations.validateInclusive(theRequestDetails, "practitioner", "organization");
Operations.validateExclusiveOr(theRequestDetails, "subject", "organization");
Operations.validateAtLeastOne(theRequestDetails, "measureId", "measureIdentifier", "measureUrl");
}
private List<Measure> ensureMeasures(List<Measure> measures) {
measures.forEach(measure -> {
if (!measure.hasScoring()) {
ourLog.info("Measure does not specify a scoring so skipping: {}.", measure.getId());
measures.remove(measure);
}
if (!measure.hasImprovementNotation()) {
ourLog.info("Measure does not specify an improvement notation so skipping: {}.", measure.getId());
measures.remove(measure);
}
});
return measures;
}
private Parameters initializeResult() {
return newResource(Parameters.class, "care-gaps-report-" + UUID.randomUUID().toString());
}
@SuppressWarnings("squid:S00107") // warning for greater than 7 parameters
private Parameters.ParametersParameterComponent patientReports(RequestDetails requestDetails, String periodStart,
String periodEnd, Patient patient, List<String> status, List<Measure> measures, String organization) {
// TODO: add organization to report, if it exists.
Composition composition = getComposition(patient);
List<DetectedIssue> detectedIssues = new ArrayList<>();
Map<String, Resource> evaluatedResources = new HashMap<>();
List<MeasureReport> reports = getReports(requestDetails, periodStart, periodEnd, patient, status, measures,
composition, detectedIssues, evaluatedResources);
if (reports.isEmpty()) {
return null;
}
return initializePatientParameter(patient)
.setResource(addBundleEntries(requestDetails.getFhirServerBase(), composition,
detectedIssues, reports, evaluatedResources));
}
@SuppressWarnings("squid:S00107") // warning for greater than 7 parameters
private List<MeasureReport> getReports(RequestDetails requestDetails, String periodStart,
String periodEnd, Patient patient, List<String> status, List<Measure> measures, Composition composition,
List<DetectedIssue> detectedIssues, Map<String, Resource> evaluatedResources) {
List<MeasureReport> reports = new ArrayList<>();
MeasureReport report = null;
for (Measure measure : measures) {
report = measureEvaluateProvider.evaluateMeasure(requestDetails, measure.getIdElement(), periodStart,
periodEnd, "patient", Ids.simple(patient), null, null, null, null);
if (!report.hasGroup()) {
ourLog.info("Report does not include a group so skipping.\nSubject: {}\nMeasure: {}",
Ids.simple(patient),
Ids.simplePart(measure));
continue;
}
initializeReport(report);
CareGapsStatusCode gapStatus = getGapStatus(measure, report);
if (!status.contains(gapStatus.toString())) {
continue;
}
DetectedIssue detectedIssue = getDetectedIssue(patient, report, gapStatus);
detectedIssues.add(detectedIssue);
composition.addSection(getSection(measure, report, detectedIssue, gapStatus));
getEvaluatedResources(report, evaluatedResources);
reports.add(report);
}
return reports;
}
private void initializeReport(MeasureReport report) {
if (Strings.isNullOrEmpty(report.getId())) {
IIdType id = Ids.newId(MeasureReport.class, UUID.randomUUID().toString());
report.setId(id);
}
Reference reporter = new Reference().setReference(crProperties.getMeasureReport().getReporter());
// TODO: figure out what this extension is for
// reporter.addExtension(new
// Extension().setUrl(CARE_GAPS_MEASUREREPORT_REPORTER_EXTENSION));
report.setReporter(reporter);
if (report.hasMeta()) {
report.getMeta().addProfile(CARE_GAPS_REPORT_PROFILE);
} else {
report.setMeta(new Meta().addProfile(CARE_GAPS_REPORT_PROFILE));
}
}
private Parameters.ParametersParameterComponent initializePatientParameter(Patient patient) {
Parameters.ParametersParameterComponent patientParameter = Resources
.newBackboneElement(Parameters.ParametersParameterComponent.class)
.setName("return");
patientParameter.setId("subject-" + Ids.simplePart(patient));
return patientParameter;
}
private Bundle addBundleEntries(String serverBase, Composition composition, List<DetectedIssue> detectedIssues,
List<MeasureReport> reports, Map<String, Resource> evaluatedResources) {
Bundle reportBundle = getBundle();
reportBundle.addEntry(getBundleEntry(serverBase, composition));
reports.forEach(report -> reportBundle.addEntry(getBundleEntry(serverBase, report)));
detectedIssues.forEach(
detectedIssue -> reportBundle.addEntry(getBundleEntry(serverBase, detectedIssue)));
configuredResources.values().forEach(resource -> reportBundle.addEntry(getBundleEntry(serverBase, resource)));
evaluatedResources.values().forEach(resource -> reportBundle.addEntry(getBundleEntry(serverBase, resource)));
return reportBundle;
}
private CareGapsStatusCode getGapStatus(Measure measure, MeasureReport report) {
Pair<String, Boolean> inNumerator = new MutablePair<>("numerator", false);
report.getGroup().forEach(group -> group.getPopulation().forEach(population -> {
if (population.hasCode()
&& population.getCode().hasCoding(MEASUREREPORT_MEASURE_POPULATION_SYSTEM, inNumerator.getKey())
&& population.getCount() == 1) {
inNumerator.setValue(true);
}
}));
boolean isPositive = measure.getImprovementNotation().hasCoding(MEASUREREPORT_IMPROVEMENT_NOTATION_SYSTEM,
"increase");
if ((isPositive && !inNumerator.getValue()) || (!isPositive && inNumerator.getValue())) {
return CareGapsStatusCode.OPEN_GAP;
}
return CareGapsStatusCode.CLOSED_GAP;
}
private BundleEntryComponent getBundleEntry(String serverBase, Resource resource) {
return new BundleEntryComponent().setResource(resource)
.setFullUrl(Operations.getFullUrl(serverBase, resource));
}
private Composition.SectionComponent getSection(Measure measure, MeasureReport report, DetectedIssue detectedIssue,
CareGapsStatusCode gapStatus) {
String narrative = String.format(HTML_DIV_PARAGRAPH_CONTENT,
gapStatus == CareGapsStatusCode.CLOSED_GAP ? "No detected issues."
: String.format("Issues detected. See %s for details.", Ids.simple(detectedIssue)));
return new CompositionSectionComponentBuilder<Composition.SectionComponent>(Composition.SectionComponent.class)
.withTitle(measure.hasTitle() ? measure.getTitle() : measure.getUrl())
.withFocus(Ids.simple(report))
.withText(new NarrativeSettings(narrative))
.withEntry(Ids.simple(detectedIssue))
.build();
}
private Bundle getBundle() {
return new BundleBuilder<Bundle>(Bundle.class)
.withProfile(CARE_GAPS_BUNDLE_PROFILE)
.withType(BundleType.DOCUMENT.toString())
.build();
}
private Composition getComposition(Patient patient) {
return new CompositionBuilder<Composition>(Composition.class)
.withProfile(CARE_GAPS_COMPOSITION_PROFILE)
.withType(CARE_GAPS_CODES.get("http://loinc.org/96315-7"))
.withStatus(Composition.CompositionStatus.FINAL.toString())
.withTitle("Care Gap Report for " + Ids.simplePart(patient))
.withSubject(Ids.simple(patient))
.withAuthor(Ids.simple(configuredResources.get("care_gaps_composition_section_author")))
// .withCustodian(organization) // TODO: Optional: identifies the organization
// who is responsible for ongoing maintenance of and accessing to this gaps in
// care report. Add as a setting and optionally read if it's there.
.build();
}
private DetectedIssue getDetectedIssue(Patient patient, MeasureReport report, CareGapsStatusCode gapStatus) {
return new DetectedIssueBuilder<DetectedIssue>(DetectedIssue.class)
.withProfile(CARE_GAPS_DETECTEDISSUE_PROFILE)
.withStatus(DetectedIssue.DetectedIssueStatus.FINAL.toString())
.withCode(CARE_GAPS_CODES.get("http://terminology.hl7.org/CodeSystem/v3-ActCode/CAREGAP"))
.withPatient(Ids.simple(patient))
.withEvidenceDetail(Ids.simple(report))
.withModifierExtension(new ImmutablePair<>(
CARE_GAPS_GAP_STATUS_EXTENSION,
new CodeableConceptSettings().add(CARE_GAPS_GAP_STATUS_SYSTEM, gapStatus.toString(),
gapStatus.toDisplayString())))
.build();
}
}
| |
/**
* JBoss, Home of Professional Open Source.
* Copyright 2014-2020 Red Hat, Inc., and individual contributors
* as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jboss.pnc.coordinator.builder;
import org.jboss.pnc.common.Date.ExpiresDate;
import org.jboss.pnc.common.logging.MDCUtils;
import org.jboss.pnc.coordinator.builder.datastore.DatastoreAdapter;
import org.jboss.pnc.model.BuildConfigSetRecord;
import org.jboss.pnc.model.BuildConfiguration;
import org.jboss.pnc.model.BuildConfigurationAudited;
import org.jboss.pnc.model.BuildConfigurationSet;
import org.jboss.pnc.model.ProductMilestone;
import org.jboss.pnc.model.User;
import org.jboss.pnc.model.utils.ContentIdentityManager;
import org.jboss.pnc.spi.BuildOptions;
import org.jboss.pnc.spi.coordinator.BuildSetTask;
import org.jboss.pnc.spi.coordinator.BuildTask;
import org.jboss.pnc.spi.datastore.DatastoreException;
import org.jboss.pnc.spi.exception.CoreException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.Collections;
import java.util.Date;
import java.util.HashSet;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import java.util.function.Supplier;
import java.util.stream.Collectors;
/**
* @author <a href="mailto:matejonnet@gmail.com">Matej Lazar</a>
*/
public class BuildTasksInitializer {
private final Logger log = LoggerFactory.getLogger(BuildTasksInitializer.class);
private DatastoreAdapter datastoreAdapter; // TODO remove datastore dependency
private long temporaryBuildLifespanDays;
public BuildTasksInitializer(DatastoreAdapter datastoreAdapter, long temporaryBuildLifespanDays) {
this.datastoreAdapter = datastoreAdapter;
this.temporaryBuildLifespanDays = temporaryBuildLifespanDays;
}
public BuildSetTask createBuildSetTask(
BuildConfigurationAudited buildConfigurationAudited,
User user,
BuildOptions buildOptions,
Supplier<Integer> buildTaskIdProvider,
Set<BuildTask> submittedBuildTasks) {
BuildSetTask buildSetTask = BuildSetTask.Builder.newBuilder()
.buildOptions(buildOptions)
.startTime(new Date())
.build();
Set<BuildConfigurationAudited> toBuild = new HashSet<>();
collectBuildTasks(buildConfigurationAudited, buildOptions, toBuild);
log.debug(
"Collected build tasks for the BuildConfigurationAudited: {}. Collected: {}.",
buildConfigurationAudited,
toBuild.stream().map(BuildConfigurationAudited::toString).collect(Collectors.joining(", ")));
fillBuildTaskSet(
buildSetTask,
user,
buildTaskIdProvider,
buildConfigurationAudited.getBuildConfiguration().getCurrentProductMilestone(),
toBuild,
submittedBuildTasks,
buildOptions);
return buildSetTask;
}
private void collectBuildTasks(
BuildConfigurationAudited buildConfigurationAudited,
BuildOptions buildOptions,
Set<BuildConfigurationAudited> toBuild) {
log.debug(
"will create build tasks for scope: {} and configuration: {}",
buildOptions,
buildConfigurationAudited);
Set<BuildConfiguration> visited = new HashSet<>();
if (toBuild.contains(buildConfigurationAudited)) {
return;
}
toBuild.add(buildConfigurationAudited);
if (buildOptions.isBuildDependencies()) {
Set<Integer> processedDependenciesCache = new HashSet<>();
buildConfigurationAudited.getBuildConfiguration()
.getDependencies()
.forEach(
dependencyConfiguration -> collectDependentConfigurations(
dependencyConfiguration,
datastoreAdapter.getLatestBuildConfigurationAuditedInitializeBCDependencies(
dependencyConfiguration.getId()),
toBuild,
visited,
buildOptions.isImplicitDependenciesCheck(),
buildOptions.isForceRebuild(),
buildOptions.isTemporaryBuild(),
processedDependenciesCache));
}
}
/**
* Collects all BuildConfigurationAudited entities, that needs to be built.
*
* @param buildConfiguration Current BuildConfiguration used to resolve dependencies.
* @param buildConfigurationAudited Specific revision of a BuildConfiguration (passed as first parameter) to be
* potentially built
* @param toBuild Set of BuildConfigurationAudited entities planned to be built
* @param visited Set of BuildConfigurations, which were already evaluated, if should be built
* @param checkImplicitDependencies if implicit check of dependencies needs to be done
* @param forceRebuild if force build is required
* @param temporaryBuild if build is temporary
* @param processedDependenciesCache list containing any dependency which was already processed in previous
* iterations
* @return Returns true, if the buildConfiguration should be rebuilt, otherwise returns false.
*/
private boolean collectDependentConfigurations(
BuildConfiguration buildConfiguration,
BuildConfigurationAudited buildConfigurationAudited,
Set<BuildConfigurationAudited> toBuild,
Set<BuildConfiguration> visited,
boolean checkImplicitDependencies,
boolean forceRebuild,
boolean temporaryBuild,
Set<Integer> processedDependenciesCache) {
if (visited.contains(buildConfiguration)) {
return toBuild.contains(buildConfigurationAudited);
}
visited.add(buildConfiguration);
boolean requiresRebuild = forceRebuild || datastoreAdapter.requiresRebuild(
buildConfigurationAudited,
checkImplicitDependencies,
temporaryBuild,
processedDependenciesCache);
for (BuildConfiguration dependency : buildConfiguration.getDependencies()) {
boolean dependencyRequiresRebuild = collectDependentConfigurations(
dependency,
datastoreAdapter.getLatestBuildConfigurationAuditedInitializeBCDependencies(dependency.getId()),
toBuild,
visited,
checkImplicitDependencies,
forceRebuild,
temporaryBuild,
processedDependenciesCache);
requiresRebuild = requiresRebuild || dependencyRequiresRebuild;
}
log.debug("Configuration {} requires rebuild: {}", buildConfiguration.getId(), requiresRebuild);
if (requiresRebuild) {
toBuild.add(buildConfigurationAudited);
}
return requiresRebuild;
}
/**
* Create a BuildSetTask of latest revisions of BuildConfigurations contained in the BuildConfigurationSet
*
* @param buildConfigurationSet BuildConfigurationSet to be built
* @param user A user, who triggered the build
* @param buildOptions Build options
* @param buildTaskIdProvider Provider to get build task ID
* @param submittedBuildTasks Already submitted build tasks
* @return Prepared BuildSetTask
* @throws CoreException Thrown if the BuildConfigSetRecord cannot be stored
*/
public BuildSetTask createBuildSetTask(
BuildConfigurationSet buildConfigurationSet,
User user,
BuildOptions buildOptions,
Supplier<Integer> buildTaskIdProvider,
Set<BuildTask> submittedBuildTasks) throws CoreException {
return createBuildSetTask(
buildConfigurationSet,
Collections.emptyMap(),
user,
buildOptions,
buildTaskIdProvider,
submittedBuildTasks);
}
/**
* Create a BuildSetTask of BuildConfigurations contained in the BuildConfigurationSet.
*
* A specific revision of the BuildConfigurations contained in the set is used, if it's available in the
* buildConfigurationAuditedsMap parameter. If it's not available, latest revision of the BuildConfiguration is
* used.
*
* @param buildConfigurationSet BuildConfigurationSet to be built
* @param buildConfigurationAuditedsMap A map BuildConfiguration::id:BuildConfigurationAudited of specific revisions
* of BuildConfigurations contained in the buildConfigurationSet
* @param user A user, who triggered the build
* @param buildOptions Build options
* @param buildTaskIdProvider Provider to get build task ID
* @param submittedBuildTasks Already submitted build tasks
* @return Prepared BuildSetTask
* @throws CoreException Thrown if the BuildConfigSetRecord cannot be stored
*/
public BuildSetTask createBuildSetTask(
BuildConfigurationSet buildConfigurationSet,
Map<Integer, BuildConfigurationAudited> buildConfigurationAuditedsMap,
User user,
BuildOptions buildOptions,
Supplier<Integer> buildTaskIdProvider,
Set<BuildTask> submittedBuildTasks) throws CoreException {
BuildSetTask buildSetTask = initBuildSetTask(buildConfigurationSet, user, buildOptions);
Set<BuildConfigurationAudited> buildConfigurationAuditeds = new HashSet<>();
for (BuildConfiguration buildConfiguration : datastoreAdapter.getBuildConfigurations(buildConfigurationSet)) {
BuildConfigurationAudited buildConfigurationAudited = buildConfigurationAuditedsMap
.get(buildConfiguration.getId());
if (buildConfigurationAudited == null) {
buildConfigurationAudited = datastoreAdapter
.getLatestBuildConfigurationAuditedInitializeBCDependencies(buildConfiguration.getId());
}
buildConfigurationAuditeds.add(buildConfigurationAudited);
}
// initializeBuildTasksInSet
log.debug(
"Initializing BuildTasks In Set for BuildConfigurationAuditeds: {}.",
buildConfigurationAuditeds.stream()
.map(BuildConfigurationAudited::toString)
.collect(Collectors.joining("; ")));
fillBuildTaskSet(
buildSetTask,
user,
buildTaskIdProvider,
buildConfigurationSet.getCurrentProductMilestone(),
buildConfigurationAuditeds,
submittedBuildTasks,
buildOptions);
return buildSetTask;
}
private BuildSetTask initBuildSetTask(
BuildConfigurationSet buildConfigurationSet,
User user,
BuildOptions buildOptions) throws CoreException {
BuildConfigSetRecord buildConfigSetRecord = BuildConfigSetRecord.Builder.newBuilder()
.buildConfigurationSet(buildConfigurationSet)
.user(user)
.startTime(new Date())
.status(org.jboss.pnc.enums.BuildStatus.BUILDING)
.temporaryBuild(buildOptions.isTemporaryBuild())
.build();
final BuildConfigSetRecord configSetRecord;
try {
configSetRecord = saveBuildConfigSetRecord(buildConfigSetRecord);
} catch (DatastoreException e) {
log.error("Failed to store build config set record: " + e);
throw new CoreException(e);
}
return BuildSetTask.Builder.newBuilder()
.buildConfigSetRecord(configSetRecord)
.buildOptions(buildOptions)
.build();
}
/**
* Creates build tasks and sets up the appropriate dependency relations
*
* @param buildSetTask The build set task which will contain the build tasks. This must already have initialized the
* BuildConfigSet, BuildConfigSetRecord, Milestone, etc.
*/
private void fillBuildTaskSet(
BuildSetTask buildSetTask,
User user,
Supplier<Integer> buildTaskIdProvider,
ProductMilestone productMilestone,
Set<BuildConfigurationAudited> toBuild,
Set<BuildTask> alreadySubmittedBuildTasks,
BuildOptions buildOptions) {
for (BuildConfigurationAudited buildConfigAudited : toBuild) {
Optional<BuildTask> taskOptional = alreadySubmittedBuildTasks.stream()
.filter(bt -> bt.getBuildConfigurationAudited().equals(buildConfigAudited))
.findAny();
BuildTask buildTask;
if (taskOptional.isPresent()) {
buildTask = taskOptional.get();
log.debug("Linking BuildConfigurationAudited {} to existing task {}.", buildConfigAudited, buildTask);
} else {
int buildId = buildTaskIdProvider.get();
String buildContentId = ContentIdentityManager.getBuildContentId(buildId);
// Used only for this operation inside the loop
MDCUtils.addBuildContext(
buildContentId,
buildOptions.isTemporaryBuild(),
ExpiresDate.getTemporaryBuildExpireDate(
temporaryBuildLifespanDays,
buildOptions.isTemporaryBuild()),
user.getId().toString());
try {
Optional<String> requestContext = MDCUtils.getRequestContext();
buildTask = BuildTask.build(
buildConfigAudited,
buildSetTask.getBuildOptions(),
user,
buildId,
buildSetTask,
buildSetTask.getStartTime(),
productMilestone,
buildContentId,
requestContext);
log.debug(
"Created new buildTask {} for BuildConfigurationAudited {}.",
buildTask,
buildConfigAudited);
} finally {
MDCUtils.removeBuildContext();
}
}
buildSetTask.addBuildTask(buildTask);
}
// Loop again to set dependencies
for (BuildTask buildTask : buildSetTask.getBuildTasks()) {
for (BuildTask checkDepBuildTask : buildSetTask.getBuildTasks()) {
if (buildTask.hasDirectConfigDependencyOn(checkDepBuildTask)) {
buildTask.addDependency(checkDepBuildTask);
}
}
}
}
/**
* Save the build config set record using a single thread for all db operations. This ensures that database
* operations are done in the correct sequence, for example in the case of a build config set.
*
* @param buildConfigSetRecord The bcs record to save
* @return The build config set record which has been saved to the db
* @throws org.jboss.pnc.spi.datastore.DatastoreException if there is a db problem which prevents this record being
* stored
*/
private BuildConfigSetRecord saveBuildConfigSetRecord(BuildConfigSetRecord buildConfigSetRecord)
throws DatastoreException {
return datastoreAdapter.saveBuildConfigSetRecord(buildConfigSetRecord);
}
}
| |
// Copyright 2020 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
////////////////////////////////////////////////////////////////////////////////
package com.google.crypto.tink.jwt;
import static com.google.common.truth.Truth.assertThat;
import static java.nio.charset.StandardCharsets.UTF_8;
import static org.junit.Assert.assertThrows;
import com.google.crypto.tink.proto.OutputPrefixType;
import com.google.crypto.tink.subtle.Base64;
import com.google.gson.JsonObject;
import com.google.gson.JsonParser;
import java.security.InvalidAlgorithmParameterException;
import java.util.Optional;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
/** Unit tests for JwtFormat */
@RunWith(JUnit4.class)
public final class JwtFormatTest {
@Test
public void createDecodeHeader_success() throws Exception {
String header =
JwtFormat.decodeHeader(JwtFormat.createHeader("RS256", Optional.empty(), Optional.empty()));
assertThat(header).isEqualTo("{\"alg\":\"RS256\"}");
}
@Test
public void createDecodeHeaderWithTyp_success() throws Exception {
String header =
JwtFormat.decodeHeader(
JwtFormat.createHeader("RS256", Optional.of("JWT"), Optional.empty()));
assertThat(header).isEqualTo("{\"alg\":\"RS256\",\"typ\":\"JWT\"}");
}
@Test
public void createDecodeHeaderWithKidAndTyp_success() throws Exception {
String header =
JwtFormat.decodeHeader(
JwtFormat.createHeader("RS256", Optional.of("JWT"), Optional.of("GsapRA")));
assertThat(header).isEqualTo("{\"kid\":\"GsapRA\",\"alg\":\"RS256\",\"typ\":\"JWT\"}");
}
@Test
public void createDecodeHeaderWithInvalidUtf8_fails() throws Exception {
assertThrows(
JwtInvalidException.class,
() -> JwtFormat.decodeHeader("eyJhbGciOiJIUzI1NiIsICJhIjoiwiJ9"));
}
@Test
public void getKidFromTinkOutputPrefixType_success() throws Exception {
int keyId = 0x1ac6a944;
Optional<String> kid = JwtFormat.getKid(keyId, OutputPrefixType.TINK);
assertThat(kid.get()).isEqualTo("GsapRA");
assertThat(JwtFormat.getKeyId(kid.get()).get()).isEqualTo(0x1ac6a944);
}
@Test
public void getKidFromRawOutputPrefixType_success() throws Exception {
int keyId = 0x1ac6a944;
Optional<String> kid = JwtFormat.getKid(keyId, OutputPrefixType.RAW);
assertThat(kid.isPresent()).isFalse();
}
@Test
public void keyIdKidConversion_outputIsEqual() throws Exception {
assertThat(JwtFormat.getKeyId(JwtFormat.getKid(0x12345678, OutputPrefixType.TINK).get()).get())
.isEqualTo(0x12345678);
assertThat(JwtFormat.getKeyId(JwtFormat.getKid(-2147483648, OutputPrefixType.TINK).get()).get())
.isEqualTo(-2147483648);
assertThat(JwtFormat.getKeyId(JwtFormat.getKid(-100, OutputPrefixType.TINK).get()).get())
.isEqualTo(-100);
assertThat(JwtFormat.getKeyId(JwtFormat.getKid(0, OutputPrefixType.TINK).get()).get())
.isEqualTo(0);
assertThat(JwtFormat.getKeyId(JwtFormat.getKid(100, OutputPrefixType.TINK).get()).get())
.isEqualTo(100);
assertThat(JwtFormat.getKeyId(JwtFormat.getKid(2147483647, OutputPrefixType.TINK).get()).get())
.isEqualTo(2147483647);
}
@Test
public void getKeyId_wrongFormat_isNotPresent() throws Exception {
assertThat(JwtFormat.getKeyId("GsapRAA").isPresent()).isFalse();
assertThat(JwtFormat.getKeyId("Gsap").isPresent()).isFalse();
assertThat(JwtFormat.getKeyId("").isPresent()).isFalse();
assertThat(JwtFormat.getKeyId("dBjftJeZ4CVP-mB92K27uhbUJU1p1r").isPresent()).isFalse();
}
@Test
public void getKidFromUnsupportedOutputPrefixType_fails() throws Exception {
int keyId = 0x1ac6a944;
assertThrows(JwtInvalidException.class, () -> JwtFormat.getKid(keyId, OutputPrefixType.LEGACY));
}
@Test
public void createHeaderWithUnknownAlgorithm_fails() throws Exception {
assertThrows(
InvalidAlgorithmParameterException.class,
() -> JwtFormat.createHeader("UnknownAlgorithm", Optional.empty(), Optional.empty()));
}
@Test
public void decodeHeaderA1_success() throws Exception {
// Example from https://tools.ietf.org/html/rfc7515#appendix-A.1
String header = JwtFormat.decodeHeader("eyJ0eXAiOiJKV1QiLA0KICJhbGciOiJIUzI1NiJ9");
assertThat(header).isEqualTo("{\"typ\":\"JWT\",\r\n \"alg\":\"HS256\"}");
}
@Test
public void decodeHeaderA2_success() throws Exception {
// Example from https://tools.ietf.org/html/rfc7515#appendix-A.2
String header = JwtFormat.decodeHeader("eyJhbGciOiJSUzI1NiJ9");
assertThat(header).isEqualTo("{\"alg\":\"RS256\"}");
}
@Test
public void decodeModifiedHeader_success() throws Exception {
assertThrows(JwtInvalidException.class, () -> JwtFormat.decodeHeader("eyJhbGciOiJSUzI1NiJ9?"));
assertThrows(JwtInvalidException.class, () -> JwtFormat.decodeHeader("eyJhbGciOiJ SUzI1NiJ9"));
assertThrows(
JwtInvalidException.class, () -> JwtFormat.decodeHeader("eyJhbGci\r\nOiJSUzI1NiJ9"));
}
@Test
public void decodeHeader_success() throws Exception {
String headerStr = Base64.urlSafeEncode("{\"alg\":\"RS256\"}".getBytes(UTF_8));
String header = JwtFormat.decodeHeader(headerStr);
assertThat(header).isEqualTo("{\"alg\":\"RS256\"}");
}
@Test
public void decodeInvalidHeader_fails() throws Exception {
assertThrows(JwtInvalidException.class, () -> JwtFormat.decodeHeader("?="));
}
@Test
public void createDecodeValidateHeader_success() throws Exception {
JwtFormat.validateHeader(
"HS256",
Optional.empty(), Optional.empty(),
JsonUtil.parseJson(
JwtFormat.decodeHeader(
JwtFormat.createHeader("HS256", Optional.empty(), Optional.empty()))));
JwtFormat.validateHeader(
"HS384",
Optional.empty(), Optional.empty(),
JsonUtil.parseJson(
JwtFormat.decodeHeader(
JwtFormat.createHeader("HS384", Optional.empty(), Optional.empty()))));
JwtFormat.validateHeader(
"HS512",
Optional.empty(), Optional.empty(),
JsonUtil.parseJson(
JwtFormat.decodeHeader(
JwtFormat.createHeader("HS512", Optional.empty(), Optional.empty()))));
JwtFormat.validateHeader(
"ES256",
Optional.empty(), Optional.empty(),
JsonUtil.parseJson(
JwtFormat.decodeHeader(
JwtFormat.createHeader("ES256", Optional.empty(), Optional.empty()))));
JwtFormat.validateHeader(
"RS256",
Optional.empty(), Optional.empty(),
JsonUtil.parseJson(
JwtFormat.decodeHeader(
JwtFormat.createHeader("RS256", Optional.empty(), Optional.empty()))));
}
@Test
public void validateHeaderWithWrongAlgorithm_fails() throws Exception {
String header =
JwtFormat.decodeHeader(JwtFormat.createHeader("HS256", Optional.empty(), Optional.empty()));
assertThrows(
InvalidAlgorithmParameterException.class,
() ->
JwtFormat.validateHeader(
"HS384", Optional.empty(), Optional.empty(), JsonUtil.parseJson(header)));
}
@Test
public void validateHeaderWithUnknownAlgorithm_fails() throws Exception {
assertThrows(
InvalidAlgorithmParameterException.class,
() ->
JwtFormat.validateHeader(
"UnknownAlgorithm",
Optional.empty(), Optional.empty(),
JsonUtil.parseJson("{\"alg\": \"UnknownAlgorithm\"}")));
}
@Test
public void validateHeaderIgnoresTyp() throws Exception {
JwtFormat.validateHeader(
"HS256",
Optional.empty(), Optional.empty(),
JsonUtil.parseJson("{\"alg\": \"HS256\", \"typ\": \"unknown\"}"));
}
@Test
public void validateHeaderRejectsCrit() throws Exception {
assertThrows(
JwtInvalidException.class,
() ->
JwtFormat.validateHeader(
"HS256",
Optional.empty(), Optional.empty(),
JsonUtil.parseJson(
"{\"alg\": \"HS256\", \"crit\":[\"http://example.invalid/UNDEFINED\"], "
+ "\"http://example.invalid/UNDEFINED\":true}")));
}
@Test
public void validateHeaderWithUnknownEntry_success() throws Exception {
JwtFormat.validateHeader(
"HS256",
Optional.empty(), Optional.empty(),
JsonUtil.parseJson("{\"alg\": \"HS256\", \"unknown\": \"header\"}"));
}
@Test
public void validateEmptyHeader_fails() throws Exception {
assertThrows(
JwtInvalidException.class,
() ->
JwtFormat.validateHeader(
"HS256", Optional.empty(), Optional.empty(), JsonUtil.parseJson("{}")));
}
@Test
public void validateHeaderWithTinkKid() throws Exception {
JwtFormat.validateHeader(
"HS256",
Optional.of("kid123"), Optional.empty(),
JsonUtil.parseJson("{\"alg\": \"HS256\", \"kid\": \"kid123\"}"));
assertThrows(
JwtInvalidException.class,
() ->
JwtFormat.validateHeader(
"HS256",
Optional.of("kid123"),
Optional.empty(),
JsonUtil.parseJson("{\"alg\": \"HS256\", \"kid\": \"wrongKid\"}")));
// If tinkKid is set, then the kid is required in the header.
assertThrows(
JwtInvalidException.class,
() ->
JwtFormat.validateHeader(
"HS256",
Optional.of("kid123"),
Optional.empty(),
JsonUtil.parseJson("{\"alg\": \"HS256\"}")));
}
@Test
public void validateHeaderWithCustomKid() throws Exception {
JwtFormat.validateHeader(
"HS256",
Optional.empty(), Optional.of("kid123"),
JsonUtil.parseJson("{\"alg\": \"HS256\", \"kid\": \"kid123\"}"));
assertThrows(
JwtInvalidException.class,
() ->
JwtFormat.validateHeader(
"HS256",
Optional.empty(),
Optional.of("kid123"),
JsonUtil.parseJson("{\"alg\": \"HS256\", \"kid\": \"wrongKid\"}")));
// If customKid is set, then the kid is not required in the header.
JwtFormat.validateHeader(
"HS256",
Optional.empty(), Optional.of("kid123"),
JsonUtil.parseJson("{\"alg\": \"HS256\"}"));
}
@Test
public void validateHeaderWithBothTinkAndCustomKid_fails() throws Exception {
assertThrows(
JwtInvalidException.class,
() ->
JwtFormat.validateHeader(
"HS256",
Optional.of("kid123"),
Optional.of("kid123"),
JsonUtil.parseJson("{\"alg\": \"HS256\", \"kid\": \"kid123\"}")));
}
@Test
public void encodeDecodePayload_equal() throws Exception {
JsonObject payload = new JsonObject();
payload.addProperty("iss", "joe");
payload.addProperty("exp", 1300819380);
payload.addProperty("http://example.com/is_root", true);
String jsonPayload = payload.toString();
String encodedPayload = JwtFormat.encodePayload(jsonPayload);
String decodedPayload = JwtFormat.decodePayload(encodedPayload);
assertThat(decodedPayload).isEqualTo(jsonPayload);
}
@Test
public void decodePayload_success() throws Exception {
// Example from https://tools.ietf.org/html/rfc7515#appendix-A.1
JsonObject payload =
JsonParser.parseString(
JwtFormat.decodePayload(
"eyJpc3MiOiJqb2UiLA0KICJleHAiOjEzMDA4MTkzODAsDQogImh0dHA6Ly9leGFt"
+ "cGxlLmNvbS9pc19yb290Ijp0cnVlfQ"))
.getAsJsonObject();
assertThat(payload.get("iss").getAsString()).isEqualTo("joe");
assertThat(payload.get("exp").getAsInt()).isEqualTo(1300819380);
assertThat(payload.get("http://example.com/is_root").getAsBoolean()).isTrue();
}
@Test
public void decodeInvalidPayload_fails() throws Exception {
assertThrows(JwtInvalidException.class, () -> JwtFormat.decodePayload("?="));
}
@Test
public void createDecodePayloadWithInvalidUtf8_fails() throws Exception {
assertThrows(JwtInvalidException.class, () -> JwtFormat.decodePayload("eyJpc3MiOiJqb2XCIn0"));
}
@Test
public void signedCompactCreateSplit_success() throws Exception {
RawJwt rawJwt = RawJwt.newBuilder().setIssuer("joe").withoutExpiration().build();
String encodedSignature = "dBjftJeZ4CVP-mB92K27uhbUJU1p1r_wW1gFWFOEjXk";
byte[] signature = JwtFormat.decodeSignature(encodedSignature);
String unsignedCompact = JwtFormat.createUnsignedCompact("RS256", Optional.empty(), rawJwt);
String signedCompact = JwtFormat.createSignedCompact(unsignedCompact, signature);
JwtFormat.Parts parts = JwtFormat.splitSignedCompact(signedCompact);
JwtFormat.validateHeader(
"RS256", Optional.empty(), Optional.empty(), JsonUtil.parseJson(parts.header));
assertThat(unsignedCompact).isEqualTo("eyJhbGciOiJSUzI1NiJ9.eyJpc3MiOiJqb2UifQ");
assertThat(signedCompact).isEqualTo(
"eyJhbGciOiJSUzI1NiJ9.eyJpc3MiOiJqb2UifQ.dBjftJeZ4CVP-mB92K27uhbUJU1p1r_wW1gFWFOEjXk");
assertThat(parts.unsignedCompact).isEqualTo(unsignedCompact);
assertThat(parts.signatureOrMac).isEqualTo(signature);
assertThat(parts.header).isEqualTo("{\"alg\":\"RS256\"}");
assertThat(parts.payload).isEqualTo("{\"iss\":\"joe\"}");
}
@Test
public void signedCompactCreateSplitWithTypeHeader_success() throws Exception {
RawJwt rawJwt =
RawJwt.newBuilder().setTypeHeader("JWT").setIssuer("joe").withoutExpiration().build();
String encodedSignature = "dBjftJeZ4CVP-mB92K27uhbUJU1p1r_wW1gFWFOEjXk";
byte[] signature = JwtFormat.decodeSignature(encodedSignature);
String unsignedCompact = JwtFormat.createUnsignedCompact("RS256", Optional.empty(), rawJwt);
String signedCompact = JwtFormat.createSignedCompact(unsignedCompact, signature);
JwtFormat.Parts parts = JwtFormat.splitSignedCompact(signedCompact);
JsonObject parsedHeader = JsonUtil.parseJson(parts.header);
JwtFormat.validateHeader("RS256", Optional.empty(), Optional.empty(), parsedHeader);
assertThat(parsedHeader.getAsJsonPrimitive("typ").getAsString()).isEqualTo("JWT");
assertThat(unsignedCompact)
.isEqualTo("eyJhbGciOiJSUzI1NiIsInR5cCI6IkpXVCJ9.eyJpc3MiOiJqb2UifQ");
assertThat(parts.unsignedCompact).isEqualTo(unsignedCompact);
assertThat(parts.signatureOrMac).isEqualTo(signature);
assertThat(parts.header).isEqualTo("{\"alg\":\"RS256\",\"typ\":\"JWT\"}");
assertThat(parts.payload).isEqualTo("{\"iss\":\"joe\"}");
}
@Test
public void signedCompactCreateSplitWithKeyIdentifier_success() throws Exception {
String kid = "AZxkm2U";
RawJwt rawJwt = RawJwt.newBuilder().setIssuer("joe").withoutExpiration().build();
String encodedSignature = "dBjftJeZ4CVP-mB92K27uhbUJU1p1r_wW1gFWFOEjXk";
byte[] signature = JwtFormat.decodeSignature(encodedSignature);
String unsignedCompact = JwtFormat.createUnsignedCompact("RS256", Optional.of(kid), rawJwt);
String signedCompact = JwtFormat.createSignedCompact(unsignedCompact, signature);
JwtFormat.Parts parts = JwtFormat.splitSignedCompact(signedCompact);
JsonObject parsedHeader = JsonUtil.parseJson(parts.header);
JwtFormat.validateHeader("RS256", Optional.empty(), Optional.empty(), parsedHeader);
assertThat(unsignedCompact)
.isEqualTo("eyJraWQiOiJBWnhrbTJVIiwiYWxnIjoiUlMyNTYifQ.eyJpc3MiOiJqb2UifQ");
assertThat(signedCompact)
.isEqualTo(
"eyJraWQiOiJBWnhrbTJVIiwiYWxnIjoiUlMyNTYifQ.eyJpc3MiOiJqb2UifQ"
+ ".dBjftJeZ4CVP-mB92K27uhbUJU1p1r_wW1gFWFOEjXk");
assertThat(parts.unsignedCompact).isEqualTo(unsignedCompact);
assertThat(parts.signatureOrMac).isEqualTo(signature);
assertThat(parts.header).isEqualTo("{\"kid\":\"AZxkm2U\",\"alg\":\"RS256\"}");
assertThat(parts.payload).isEqualTo("{\"iss\":\"joe\"}");
}
@Test
public void splitEmptySignedCompact_success() throws Exception {
JwtFormat.Parts parts = JwtFormat.splitSignedCompact("..");
assertThat(parts.unsignedCompact).isEqualTo(".");
assertThat(parts.signatureOrMac).isEmpty();
assertThat(parts.header).isEmpty();
assertThat(parts.payload).isEmpty();
}
@Test
public void splitSignedCompactWithBadFormat_fails() throws Exception {
assertThrows(
JwtInvalidException.class,
() -> JwtFormat.splitSignedCompact("e30.e30.YWJj.abc"));
assertThrows(
JwtInvalidException.class,
() -> JwtFormat.splitSignedCompact("e30.e30.YWJj."));
assertThrows(
JwtInvalidException.class,
() -> JwtFormat.splitSignedCompact(".e30.e30.YWJj"));
assertThrows(
JwtInvalidException.class,
() -> JwtFormat.splitSignedCompact(".e30.e30."));
assertThrows(
JwtInvalidException.class,
() -> JwtFormat.splitSignedCompact("e30.e30"));
assertThrows(
JwtInvalidException.class,
() -> JwtFormat.splitSignedCompact("e30"));
assertThrows(
JwtInvalidException.class,
() -> JwtFormat.splitSignedCompact(""));
}
@Test
public void splitSignedCompactWithBadCharacters_fails() throws Exception {
// check that unmodified token works
JwtFormat.Parts parts = JwtFormat.splitSignedCompact("e30.e30.YWJj");
assertThat(parts.unsignedCompact).isEqualTo("e30.e30");
// add bad characters
assertThrows(JwtInvalidException.class, () -> JwtFormat.splitSignedCompact("{e30.e30.YWJj"));
assertThrows(JwtInvalidException.class, () -> JwtFormat.splitSignedCompact(" e30.e30.YWJj"));
assertThrows(JwtInvalidException.class, () -> JwtFormat.splitSignedCompact("e30. e30.YWJj"));
assertThrows(JwtInvalidException.class, () -> JwtFormat.splitSignedCompact("e30.e30.YWJj "));
assertThrows(JwtInvalidException.class, () -> JwtFormat.splitSignedCompact("e30.e30.\nYWJj"));
assertThrows(JwtInvalidException.class, () -> JwtFormat.splitSignedCompact("e30.\re30.YWJj"));
assertThrows(JwtInvalidException.class, () -> JwtFormat.splitSignedCompact("e30$.e30.YWJj"));
assertThrows(JwtInvalidException.class, () -> JwtFormat.splitSignedCompact("e30.$e30.YWJj"));
assertThrows(JwtInvalidException.class, () -> JwtFormat.splitSignedCompact("e30.e30.YWJj$"));
assertThrows(JwtInvalidException.class, () -> JwtFormat.splitSignedCompact("e30.e30.YWJj$"));
assertThrows(
JwtInvalidException.class, () -> JwtFormat.splitSignedCompact("e30.e30.YWJj\ud83c"));
}
@Test
public void encodeDecodeSignature_success() throws Exception {
// Example from https://tools.ietf.org/html/rfc7515#appendix-A.1
byte[] signatureBytes =
new byte[] {
(byte) 116,
(byte) 24,
(byte) 223,
(byte) 180,
(byte) 151,
(byte) 153,
(byte) 224,
(byte) 37,
(byte) 79,
(byte) 250,
(byte) 96,
(byte) 125,
(byte) 216,
(byte) 173,
(byte) 187,
(byte) 186,
(byte) 22,
(byte) 212,
(byte) 37,
(byte) 77,
(byte) 105,
(byte) 214,
(byte) 191,
(byte) 240,
(byte) 91,
(byte) 88,
(byte) 5,
(byte) 88,
(byte) 83,
(byte) 132,
(byte) 141,
(byte) 121
};
String encodeSignature = JwtFormat.encodeSignature(signatureBytes);
assertThat(encodeSignature)
.isEqualTo("dBjftJeZ4CVP-mB92K27uhbUJU1p1r_wW1gFWFOEjXk");
assertThat(JwtFormat.decodeSignature(encodeSignature))
.isEqualTo(signatureBytes);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.beam.sdk.io.kudu;
import static org.apache.beam.vendor.guava.v26_0_jre.com.google.common.base.Preconditions.checkNotNull;
import static org.apache.beam.vendor.guava.v26_0_jre.com.google.common.base.Preconditions.checkState;
import java.io.IOException;
import java.util.List;
import java.util.NoSuchElementException;
import java.util.concurrent.Callable;
import java.util.stream.Collectors;
import org.apache.beam.sdk.io.BoundedSource;
import org.apache.kudu.Common;
import org.apache.kudu.Schema;
import org.apache.kudu.client.AbstractKuduScannerBuilder;
import org.apache.kudu.client.AsyncKuduClient;
import org.apache.kudu.client.KuduClient;
import org.apache.kudu.client.KuduException;
import org.apache.kudu.client.KuduPredicate;
import org.apache.kudu.client.KuduScanToken;
import org.apache.kudu.client.KuduScanner;
import org.apache.kudu.client.KuduSession;
import org.apache.kudu.client.KuduTable;
import org.apache.kudu.client.RowError;
import org.apache.kudu.client.RowResult;
import org.apache.kudu.client.RowResultIterator;
import org.apache.kudu.client.SessionConfiguration;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/** An implementation of the {@link KuduService} that uses a Kudu instance. */
class KuduServiceImpl<T> implements KuduService<T> {
private static final Logger LOG = LoggerFactory.getLogger(KuduServiceImpl.class);
@Override
public Writer createWriter(KuduIO.Write<T> spec) throws KuduException {
return new WriterImpl(spec);
}
@Override
public BoundedSource.BoundedReader createReader(KuduIO.KuduSource source) {
return new ReaderImpl(source);
}
@Override
public List<byte[]> createTabletScanners(KuduIO.Read spec) throws KuduException {
try (KuduClient client = getKuduClient(spec.getMasterAddresses())) {
KuduTable table = client.openTable(spec.getTable());
KuduScanToken.KuduScanTokenBuilder builder = client.newScanTokenBuilder(table);
configureBuilder(spec, table.getSchema(), builder);
List<KuduScanToken> tokens = builder.build();
return tokens.stream().map(t -> uncheckCall(t::serialize)).collect(Collectors.toList());
}
}
/** Writer storing an entity into Apache Kudu table. */
class WriterImpl implements Writer<T> {
private final KuduIO.FormatFunction<T> formatFunction;
private KuduClient client;
private KuduSession session;
private KuduTable table;
WriterImpl(KuduIO.Write<T> spec) throws KuduException {
checkNotNull(spec.masterAddresses(), "masterAddresses cannot be null");
checkNotNull(spec.table(), "table cannot be null");
this.formatFunction = checkNotNull(spec.formatFn(), "formatFn cannot be null");
client =
new AsyncKuduClient.AsyncKuduClientBuilder(spec.masterAddresses()).build().syncClient();
table = client.openTable(spec.table());
}
@Override
public void openSession() throws KuduException {
// errors are collected per session so we align session with the bundle
session = client.newSession();
// async flushing as per the official kudu-spark approach
session.setFlushMode(SessionConfiguration.FlushMode.AUTO_FLUSH_BACKGROUND);
}
@Override
public void write(T entity) throws KuduException {
checkState(session != null, "must call openSession() before writing");
session.apply(formatFunction.apply(new TableAndRecord(table, entity)));
}
@Override
public void closeSession() throws Exception {
try {
session.close();
if (session.countPendingErrors() > 0) {
LOG.error("At least {} errors occurred writing to Kudu", session.countPendingErrors());
RowError[] errors = session.getPendingErrors().getRowErrors();
for (int i = 0; errors != null && i < 3 && i < errors.length; i++) {
LOG.error("Sample error: {}", errors[i]);
}
throw new Exception(
"At least " + session.countPendingErrors() + " error(s) occurred writing to Kudu");
}
} finally {
session = null;
}
}
@Override
public void close() throws Exception {
client.close();
client = null;
}
}
/** Bounded reader of an Apache Kudu table. */
class ReaderImpl extends BoundedSource.BoundedReader<T> {
private final KuduIO.KuduSource<T> source;
private KuduClient client;
private KuduScanner scanner;
private RowResultIterator iter;
private RowResult current;
private long recordsReturned;
ReaderImpl(KuduIO.KuduSource<T> source) {
this.source = source;
}
@Override
public boolean start() throws IOException {
LOG.debug("Starting Kudu reader");
client =
new AsyncKuduClient.AsyncKuduClientBuilder(source.spec.getMasterAddresses())
.build()
.syncClient();
if (source.serializedToken != null) {
// tokens available if the source is already split
scanner = KuduScanToken.deserializeIntoScanner(source.serializedToken, client);
} else {
KuduTable table = client.openTable(source.spec.getTable());
KuduScanner.KuduScannerBuilder builder =
table.getAsyncClient().syncClient().newScannerBuilder(table);
configureBuilder(source.spec, table.getSchema(), builder);
scanner = builder.build();
}
return advance();
}
/**
* Returns the current record transformed into the desired type.
*
* @return the current record
* @throws NoSuchElementException If the current does not exist
*/
@Override
public T getCurrent() throws NoSuchElementException {
if (current != null) {
return source.spec.getParseFn().apply(current);
} else {
throw new NoSuchElementException(
"No current record (Indicates misuse. Perhaps advance() was not called?)");
}
}
@Override
public boolean advance() throws KuduException {
// scanner pages over results, with each page holding an iterator of records
if (iter == null || (!iter.hasNext() && scanner.hasMoreRows())) {
iter = scanner.nextRows();
}
if (iter != null && iter.hasNext()) {
current = iter.next();
++recordsReturned;
return true;
}
return false;
}
@Override
public void close() throws IOException {
LOG.debug("Closing reader after reading {} records.", recordsReturned);
if (scanner != null) {
scanner.close();
scanner = null;
}
if (client != null) {
client.close();
client = null;
}
}
@Override
public synchronized KuduIO.KuduSource getCurrentSource() {
return source;
}
}
/** Creates a new synchronous client. */
private synchronized KuduClient getKuduClient(List<String> masterAddresses) {
return new AsyncKuduClient.AsyncKuduClientBuilder(masterAddresses).build().syncClient();
}
/** Configures the scanner builder to conform to the spec. */
private static <T2> void configureBuilder(
KuduIO.Read<T2> spec, Schema schema, AbstractKuduScannerBuilder builder) {
builder.cacheBlocks(true); // as per kudu-spark
if (spec.getBatchSize() != null) {
builder.batchSizeBytes(spec.getBatchSize());
}
if (spec.getProjectedColumns() != null) {
builder.setProjectedColumnNames(spec.getProjectedColumns());
}
if (spec.getFaultTolerent() != null) {
builder.setFaultTolerant(spec.getFaultTolerent());
}
if (spec.getSerializablePredicates() != null) {
for (Common.ColumnPredicatePB predicate : spec.getSerializablePredicates()) {
builder.addPredicate(KuduPredicate.fromPB(schema, predicate));
}
}
}
/** Wraps the callable converting checked to RuntimeExceptions. */
private static <T> T uncheckCall(Callable<T> callable) {
try {
return callable.call();
} catch (RuntimeException e) {
throw e;
} catch (Exception e) {
throw new RuntimeException(e);
}
}
}
| |
/**
* Copyright 2015-2017 Red Hat, Inc, and individual contributors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.wildfly.swarm.cli;
import java.io.IOException;
import java.io.PrintStream;
import java.net.MalformedURLException;
import java.net.URL;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.Enumeration;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.StringTokenizer;
import javax.enterprise.inject.Vetoed;
import org.jboss.modules.Module;
import org.jboss.modules.ModuleClassLoader;
import org.jboss.modules.ModuleLoadException;
import org.wildfly.swarm.Swarm;
import org.wildfly.swarm.SwarmInfo;
import org.wildfly.swarm.spi.api.SwarmProperties;
/**
* A parsed command-line.
*
* @author Bob McWhirter
*/
@Vetoed
public class CommandLine {
private static final String CONFIG_ELEMENT = "<config>";
private static final String FRACTION = "fraction";
private static final String ALL = "all";
/**
* Default option for parsing -h and --help
*/
public static final Option<Boolean> HELP = new Option<Boolean>()
.withLong("help")
.withShort('h')
.withDescription("Display this help")
.withDefault(() -> false)
.then((cmd, opt, value) -> cmd.put(opt, true));
public static final Option<String> CONFIG_HELP = new Option<String>()
.withLong("config-help")
.hasValue("<fraction>")
.withDescription("Display configuration help by fraction, or 'all' for all")
.then((cmd, opt, value) -> cmd.put(opt, value));
public static final Option<String> YAML_HELP = new Option<String>()
.withLong("yaml-help")
.hasValue("<fraction>")
.withDescription("Display example YAML configuration by fraction, or 'all' for all")
.then((cmd, opt, value) -> cmd.put(opt, value));
/**
* Default option for parsing -v and --version
*/
public static final Option<Boolean> VERSION = new Option<Boolean>()
.withLong("version")
.withShort('v')
.withDescription("Display the version of Thorntail")
.withDefault(() -> false)
.then((cmd, opt, value) -> cmd.put(opt, true));
/**
* Default option for parsing -Dname and -Dname=value
*/
public static final Option<Properties> PROPERTY = new Option<Properties>()
.withShort('D')
.hasValue("<name>[=<value>]")
.valueMayBeSeparate(false)
.withDescription("Set a system property")
.withDefault(Properties::new)
.then((cmd, opt, value) -> {
String[] nameValue = value.split("=", 2);
Properties props = cmd.get(opt);
String propName = nameValue[0];
String propValue = "true";
if (nameValue.length > 1) {
propValue = nameValue[1];
}
props.setProperty(propName, propValue);
});
/**
* Default option for parsing -P
*/
public static final Option<URL> PROPERTIES_URL = new Option<URL>()
.withShort('P')
.withLong("properties")
.hasValue("<url>")
.withDescription("Load system properties from the given URL")
.then((cmd, opt, value) -> cmd.put(opt, Option.toURL(value)));
/**
* Default option for parsing -c and --server-config
*/
public static final Option<URL> SERVER_CONFIG = new Option<URL>()
.withShort('c')
.withLong("server-config")
.hasValue(CONFIG_ELEMENT)
.valueMayBeSeparate(true)
.withDescription("URL of the server configuration (e.g. standalone.xml)")
.withDefault(() -> resolveResource("standalone.xml"))
.then((cmd, opt, value) -> cmd.put(opt, Option.toURL(value)));
public static final Option<List<URL>> CONFIG = new Option<List<URL>>()
.withShort('s')
.withLong("config")
.hasValue("<config>")
.valueMayBeSeparate(true)
.withDescription("URL to configuration YAML to use")
.then((cmd, opt, value) -> {
List<URL> configs = cmd.get(opt);
if (configs == null) {
configs = new ArrayList<>();
cmd.put(opt, configs);
}
configs.add(Option.toURL(value));
});
public static final Option<List<String>> PROFILES = new Option<List<String>>()
.withShort('S')
.withLong("profile")
.hasValue("<profile>")
.valueMayBeSeparate(true)
.withDescription("Selected profiles")
.then((cmd, opt, value) -> {
List<String> profiles = cmd.get(opt);
if (profiles == null) {
profiles = new ArrayList<>();
cmd.put(opt, profiles);
}
profiles.add(value);
});
/**
* Default option for parsing -b
*/
public static final Option<String> BIND = new Option<String>()
.withShort('b')
.hasValue("<value>")
.valueMayBeSeparate(true)
.withDescription("Set the property " + SwarmProperties.BIND_ADDRESS + " to <value>")
.then(CommandLine::put);
/**
* Default set of options
*/
public static Options defaultOptions() {
return new Options(
HELP,
CONFIG_HELP,
YAML_HELP,
VERSION,
PROPERTY,
PROPERTIES_URL,
SERVER_CONFIG,
CONFIG,
PROFILES,
BIND
);
}
CommandLine(Options options) {
this.options = options;
}
/**
* Put a value under a given key.
*
* @param key The key.
* @param value The value.
* @param <T> The type of the value.
*/
public <T> void put(Option<T> key, T value) {
this.values.put(key, value);
}
/**
* Retrieve a value under a given key.
*
* @param key The key.
* @param <T> The type of the value.
* @return The previously stored value, or the default provided by key if none has been previously stored. The default will then be stored.
*/
@SuppressWarnings("unchecked")
public <T> T get(Option<T> key) {
T v = (T) this.values.get(key);
if (v == null) {
v = key.defaultValue();
this.values.put(key, v);
}
return v;
}
/**
* Display help for the options associated with the creation of this command-line.
*
* @param out The output stream to display help upon.
*/
public void displayHelp(PrintStream out) {
this.options.displayHelp(out);
}
public void displayConfigHelp(PrintStream out, String fraction) throws IOException, ModuleLoadException {
ModuleClassLoader cl = Module.getBootModuleLoader().loadModule("thorntail.application").getClassLoader();
Enumeration<URL> docs = cl.getResources("META-INF/configuration-meta.properties");
Properties props = new Properties();
while (docs.hasMoreElements()) {
URL each = docs.nextElement();
Properties fractionDocs = new Properties();
fractionDocs.load(each.openStream());
if (fraction.equals(ALL) || fraction.equals(fractionDocs.getProperty(FRACTION))) {
fractionDocs.remove(FRACTION);
props.putAll(fractionDocs);
}
}
props.stringPropertyNames().stream()
.sorted()
.forEach(key -> {
out.println("# " + key);
out.println();
out.println(formatDocs(" ", props.getProperty(key)));
out.println();
});
}
public void dumpYaml(PrintStream out, String fraction) throws IOException, ModuleLoadException {
ModuleClassLoader cl = Module.getBootModuleLoader().loadModule("thorntail.application").getClassLoader();
Enumeration<URL> docs = cl.getResources("META-INF/configuration-meta.properties");
Properties props = new Properties();
while (docs.hasMoreElements()) {
URL each = docs.nextElement();
Properties fractionDocs = new Properties();
fractionDocs.load(each.openStream());
if (fraction.equals(ALL) || fraction.equals(fractionDocs.getProperty(FRACTION))) {
fractionDocs.remove(FRACTION);
props.putAll(fractionDocs);
}
}
YamlDumper.dump(out, props);
}
private String formatDocs(String indent, String docs) {
StringTokenizer tokens = new StringTokenizer(docs);
StringBuilder formatted = new StringBuilder();
int lineLength = indent.length();
boolean freshLine = true;
formatted.append(indent);
while (tokens.hasMoreElements()) {
String next = tokens.nextToken();
if ((lineLength + 1 + next.length()) > 80) {
formatted.append("\n");
formatted.append(indent);
lineLength = indent.length();
freshLine = true;
}
if (freshLine) {
freshLine = false;
} else {
formatted.append(" ");
}
lineLength += next.length();
formatted.append(next);
}
return formatted.toString();
}
/**
* Display the version.
*
* @param out The output stream to display help upon.
*/
public void displayVersion(PrintStream out) {
out.println("Thorntail version " + SwarmInfo.VERSION);
}
/**
* Apply properties to the system properties.
*
* <p>Applies values stored through the <code>Key.PROPERTIES</code>,
* <code>Key.PROPERTIES_URL</code> or <code>Key.BIND</code> options.
*
* @throws IOException If a URL is attempted to be read and fails.
*/
public void applyProperties(Swarm swarm) throws IOException {
URL propsUrl = get(PROPERTIES_URL);
if (propsUrl != null) {
Properties urlProps = new Properties();
urlProps.load(propsUrl.openStream());
for (String name : urlProps.stringPropertyNames()) {
swarm.withProperty(name, urlProps.getProperty(name));
}
}
Properties props = get(PROPERTY);
for (String name : props.stringPropertyNames()) {
swarm.withProperty(name, props.getProperty(name));
}
if (get(BIND) != null) {
swarm.withProperty(SwarmProperties.BIND_ADDRESS, get(BIND));
}
}
/**
* Apply configuration to the container.
*
* <p>Applies configuration from <code>Key.SERVER_CONFIG</code> and <code>Key.STAGE_CONFIG</code>.</p>
*
* @param swarm Swarm instance to configure.
* @throws MalformedURLException If a URL is attempted to be read and fails.
*/
public void applyConfigurations(Swarm swarm) throws IOException {
if (get(SERVER_CONFIG) != null) {
swarm.withXmlConfig(get(SERVER_CONFIG));
}
if (get(CONFIG) != null) {
List<URL> configs = get(CONFIG);
for (URL config : configs) {
swarm.withConfig(config);
}
}
if (get(PROFILES) != null) {
List<String> profiles = get(PROFILES);
for (String profile : profiles) {
swarm.withProfile(profile);
}
}
}
/**
* Apply properties and configuration from the parsed commandline to a container.
*
* @param swarm The Swarm instance to apply configuration to.
* @throws IOException If an error occurs resolving any URL.
*/
public void apply(Swarm swarm) throws IOException, ModuleLoadException {
applyProperties(swarm);
applyConfigurations(swarm);
if (get(HELP)) {
displayVersion(System.err);
System.err.println();
displayHelp(System.err);
System.exit(0);
}
if (get(CONFIG_HELP) != null) {
displayConfigHelp(System.err, get(CONFIG_HELP));
System.exit(0);
}
if (get(YAML_HELP) != null) {
dumpYaml(System.err, get(YAML_HELP));
System.exit(0);
}
if (get(VERSION)) {
displayVersion(System.err);
}
}
void extraArgument(String arg) {
this.extraArguments.add(arg);
}
/**
* Any un-parsed non-option arguments.
*
* @return The list of unparsed arguments.
*/
public List<String> extraArguments() {
return this.extraArguments;
}
/**
* Any un-parsed non-option arguments.
*
* @return The array of unparsed arguments.
*/
public String[] extraArgumentsArray() {
return this.extraArguments.toArray(new String[this.extraArguments.size()]);
}
void invalidArgument(String arg) {
this.invalidArguments.add(arg);
}
/**
* Any invalid options seen during parsing.
*
* @return The list of invalid arguments.
*/
public List<String> invalidArguments() {
return this.invalidArguments;
}
/**
* Determine if any invalid arguments were seen during the parse.
*
* @return <code>true</code> if {@link #invalidArguments()} is not empty, otherwise <code>false</code>.
*/
public boolean hasInvalidArguments() {
return !this.invalidArguments.isEmpty();
}
/**
* Parse an array of arguments using the default options.
*
* @param args The args to parse.
* @return The parsed <code>CommandLine</code>.
*/
public static CommandLine parse(String... args) throws Exception {
return CommandLineParser.parse(defaultOptions(), args);
}
/**
* Parse an array of arguments using specific options.
*
* @param options The options to use.
* @param args The args to parse.
* @return The parsed <code>CommandLine</code>.
*/
public static CommandLine parse(Options options, String... args) throws Exception {
return CommandLineParser.parse(options, args);
}
private static URL resolveResource(String path) {
Path candidate = Paths.get(path);
if (Files.exists(candidate)) {
try {
return candidate.toUri().toURL();
} catch (MalformedURLException e) {
// ignore
}
}
URL yml = null;
try {
Module appModule = Module.getBootModuleLoader().loadModule("thorntail.application");
yml = appModule.getClassLoader().getResource(path);
if (yml != null) {
return yml;
}
} catch (ModuleLoadException e) {
// ignore;
}
yml = ClassLoader.getSystemClassLoader().getResource(path);
return yml;
}
private final Options options;
private final Map<Option<?>, Object> values = new HashMap<>();
private final List<String> extraArguments = new ArrayList<>();
private final List<String> invalidArguments = new ArrayList<>();
}
| |
/*! ******************************************************************************
*
* Pentaho Data Integration
*
* Copyright (C) 2002-2016 by Pentaho : http://www.pentaho.com
*
*******************************************************************************
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
******************************************************************************/
package org.pentaho.di.job.entries.deletefiles;
import com.google.common.collect.ArrayListMultimap;
import com.google.common.collect.Multimap;
import org.pentaho.di.core.exception.KettleValueException;
import org.pentaho.di.job.entry.validator.AbstractFileValidator;
import org.pentaho.di.job.entry.validator.AndValidator;
import org.pentaho.di.job.entry.validator.JobEntryValidatorUtils;
import java.io.IOException;
import java.util.List;
import java.util.Map;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.apache.commons.vfs2.FileObject;
import org.apache.commons.vfs2.FileSelectInfo;
import org.apache.commons.vfs2.FileSelector;
import org.apache.commons.vfs2.FileType;
import org.pentaho.di.cluster.SlaveServer;
import org.pentaho.di.core.CheckResultInterface;
import org.pentaho.di.core.Const;
import org.pentaho.di.core.util.Utils;
import org.pentaho.di.core.Result;
import org.pentaho.di.core.RowMetaAndData;
import org.pentaho.di.core.database.DatabaseMeta;
import org.pentaho.di.core.exception.KettleDatabaseException;
import org.pentaho.di.core.exception.KettleException;
import org.pentaho.di.core.exception.KettleXMLException;
import org.pentaho.di.core.variables.VariableSpace;
import org.pentaho.di.core.vfs.KettleVFS;
import org.pentaho.di.core.xml.XMLHandler;
import org.pentaho.di.i18n.BaseMessages;
import org.pentaho.di.job.Job;
import org.pentaho.di.job.JobMeta;
import org.pentaho.di.job.entry.JobEntryBase;
import org.pentaho.di.job.entry.JobEntryInterface;
import org.pentaho.di.job.entry.validator.ValidatorContext;
import org.pentaho.di.repository.ObjectId;
import org.pentaho.di.repository.Repository;
import org.pentaho.di.resource.ResourceEntry;
import org.pentaho.di.resource.ResourceEntry.ResourceType;
import org.pentaho.di.resource.ResourceReference;
import org.pentaho.metastore.api.IMetaStore;
import org.w3c.dom.Node;
/**
* This defines a 'delete files' job entry.
*
* @author Samatar Hassan
* @since 06-05-2007
*/
public class JobEntryDeleteFiles extends JobEntryBase implements Cloneable, JobEntryInterface {
private static Class<?> PKG = JobEntryDeleteFiles.class; // for i18n purposes, needed by Translator2!!
private boolean argFromPrevious;
private boolean includeSubfolders;
private String[] arguments;
private String[] filemasks;
public JobEntryDeleteFiles( String jobName ) {
super( jobName, "" );
argFromPrevious = false;
arguments = null;
includeSubfolders = false;
}
public JobEntryDeleteFiles() {
this( "" );
}
public void allocate( int numberOfFields ) {
arguments = new String[ numberOfFields ];
filemasks = new String[ numberOfFields ];
}
public Object clone() {
JobEntryDeleteFiles jobEntry = (JobEntryDeleteFiles) super.clone();
if ( arguments != null ) {
int nrFields = arguments.length;
jobEntry.allocate( nrFields );
System.arraycopy( arguments, 0, jobEntry.arguments, 0, nrFields );
System.arraycopy( filemasks, 0, jobEntry.filemasks, 0, nrFields );
}
return jobEntry;
}
public String getXML() {
StringBuilder retval = new StringBuilder( 300 );
retval.append( super.getXML() );
retval.append( " " ).append( XMLHandler.addTagValue( "arg_from_previous", argFromPrevious ) );
retval.append( " " ).append( XMLHandler.addTagValue( "include_subfolders", includeSubfolders ) );
retval.append( " <fields>" ).append( Const.CR );
if ( arguments != null ) {
for ( int i = 0; i < arguments.length; i++ ) {
retval.append( " <field>" ).append( Const.CR );
retval.append( " " ).append( XMLHandler.addTagValue( "name", arguments[i] ) );
retval.append( " " ).append( XMLHandler.addTagValue( "filemask", filemasks[i] ) );
retval.append( " </field>" ).append( Const.CR );
}
}
retval.append( " </fields>" ).append( Const.CR );
return retval.toString();
}
public void loadXML( Node entrynode, List<DatabaseMeta> databases, List<SlaveServer> slaveServers,
Repository rep, IMetaStore metaStore ) throws KettleXMLException {
try {
super.loadXML( entrynode, databases, slaveServers );
argFromPrevious = "Y".equalsIgnoreCase( XMLHandler.getTagValue( entrynode, "arg_from_previous" ) );
includeSubfolders = "Y".equalsIgnoreCase( XMLHandler.getTagValue( entrynode, "include_subfolders" ) );
Node fields = XMLHandler.getSubNode( entrynode, "fields" );
int numberOfFields = XMLHandler.countNodes( fields, "field" );
allocate( numberOfFields );
for ( int i = 0; i < numberOfFields; i++ ) {
Node fnode = XMLHandler.getSubNodeByNr( fields, "field", i );
arguments[i] = XMLHandler.getTagValue( fnode, "name" );
filemasks[i] = XMLHandler.getTagValue( fnode, "filemask" );
}
} catch ( KettleXMLException xe ) {
throw new KettleXMLException( BaseMessages.getString( PKG, "JobEntryDeleteFiles.UnableToLoadFromXml" ), xe );
}
}
public void loadRep( Repository rep, IMetaStore metaStore, ObjectId id_jobentry, List<DatabaseMeta> databases,
List<SlaveServer> slaveServers ) throws KettleException {
try {
argFromPrevious = rep.getJobEntryAttributeBoolean( id_jobentry, "arg_from_previous" );
includeSubfolders = rep.getJobEntryAttributeBoolean( id_jobentry, "include_subfolders" );
int numberOfArgs = rep.countNrJobEntryAttributes( id_jobentry, "name" );
allocate( numberOfArgs );
for ( int i = 0; i < numberOfArgs; i++ ) {
arguments[i] = rep.getJobEntryAttributeString( id_jobentry, i, "name" );
filemasks[i] = rep.getJobEntryAttributeString( id_jobentry, i, "filemask" );
}
} catch ( KettleException dbe ) {
throw new KettleException( BaseMessages.getString( PKG, "JobEntryDeleteFiles.UnableToLoadFromRepo", String
.valueOf( id_jobentry ) ), dbe );
}
}
public void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_job ) throws KettleException {
try {
rep.saveJobEntryAttribute( id_job, getObjectId(), "arg_from_previous", argFromPrevious );
rep.saveJobEntryAttribute( id_job, getObjectId(), "include_subfolders", includeSubfolders );
// save the arguments...
if ( arguments != null ) {
for ( int i = 0; i < arguments.length; i++ ) {
rep.saveJobEntryAttribute( id_job, getObjectId(), i, "name", arguments[i] );
rep.saveJobEntryAttribute( id_job, getObjectId(), i, "filemask", filemasks[i] );
}
}
} catch ( KettleDatabaseException dbe ) {
throw new KettleException( BaseMessages.getString( PKG, "JobEntryDeleteFiles.UnableToSaveToRepo", String
.valueOf( id_job ) ), dbe );
}
}
public Result execute( Result result, int nr ) throws KettleException {
List<RowMetaAndData> resultRows = result.getRows();
int numberOfErrFiles = 0;
result.setResult( false );
result.setNrErrors( 1 );
if ( argFromPrevious && log.isDetailed() ) {
logDetailed( BaseMessages.getString( PKG, "JobEntryDeleteFiles.FoundPreviousRows", String
.valueOf( ( resultRows != null ? resultRows.size() : 0 ) ) ) );
}
Multimap<String, String> pathToMaskMap = populateDataForJobExecution( resultRows );
for ( Map.Entry<String, String> pathToMask : pathToMaskMap.entries() ) {
final String filePath = environmentSubstitute( pathToMask.getKey() );
if ( filePath.trim().isEmpty() ) {
// Relative paths are permitted, and providing an empty path means deleting all files inside a root pdi-folder.
// It is much more likely to be a mistake than a desirable action, so we don't delete anything (see PDI-15181)
if ( log.isDetailed() ) {
logDetailed( BaseMessages.getString( PKG, "JobEntryDeleteFiles.NoPathProvided" ) );
}
} else {
final String fileMask = environmentSubstitute( pathToMask.getValue() );
if ( parentJob.isStopped() ) {
break;
}
if ( !processFile( filePath, fileMask, parentJob ) ) {
numberOfErrFiles++;
}
}
}
if ( numberOfErrFiles == 0 ) {
result.setResult( true );
result.setNrErrors( 0 );
} else {
result.setNrErrors( numberOfErrFiles );
result.setResult( false );
}
return result;
}
/**
* For job execution path to files and file masks should be provided.
* These values can be obtained in two ways:
* 1. As an argument of a current job entry
* 2. As a table, that comes as a result of execution previous job/transformation.
*
* As the logic of processing this data is the same for both of this cases, we first
* populate this data (in this method) and then process it.
*
* We are using guava multimap here, because if allows key duplication and there could be a
* situation where two paths to one folder with different wildcards are provided.
*/
private Multimap<String, String> populateDataForJobExecution( List<RowMetaAndData> rowsFromPreviousMeta ) throws KettleValueException {
Multimap<String, String> pathToMaskMap = ArrayListMultimap.create();
if ( argFromPrevious && rowsFromPreviousMeta != null ) {
for ( RowMetaAndData resultRow : rowsFromPreviousMeta ) {
if ( resultRow.size() < 2 ) {
logError( BaseMessages.getString(
PKG, "JobDeleteFiles.Error.InvalidNumberOfRowsFromPrevMeta", resultRow.size() ) );
return pathToMaskMap;
}
String pathToFile = resultRow.getString( 0, null );
String fileMask = resultRow.getString( 1, null );
if ( log.isDetailed() ) {
logDetailed( BaseMessages.getString(
PKG, "JobEntryDeleteFiles.ProcessingRow", pathToFile, fileMask ) );
}
pathToMaskMap.put( pathToFile, fileMask );
}
} else if ( arguments != null ) {
for ( int i = 0; i < arguments.length; i++ ) {
if ( log.isDetailed() ) {
logDetailed( BaseMessages.getString(
PKG, "JobEntryDeleteFiles.ProcessingArg", arguments[ i ], filemasks[ i ] ) );
}
pathToMaskMap.put( arguments[ i ], filemasks[ i ] );
}
}
return pathToMaskMap;
}
boolean processFile( String path, String wildcard, Job parentJob ) {
boolean isDeleted = false;
FileObject fileFolder = null;
try {
fileFolder = KettleVFS.getFileObject( path, this );
if ( fileFolder.exists() ) {
if ( fileFolder.getType() == FileType.FOLDER ) {
if ( log.isDetailed() ) {
logDetailed( BaseMessages.getString( PKG, "JobEntryDeleteFiles.ProcessingFolder", path ) );
}
int totalDeleted = fileFolder.delete( new TextFileSelector( fileFolder.toString(), wildcard, parentJob ) );
if ( log.isDetailed() ) {
logDetailed(
BaseMessages.getString( PKG, "JobEntryDeleteFiles.TotalDeleted", String.valueOf( totalDeleted ) ) );
}
isDeleted = true;
} else {
if ( log.isDetailed() ) {
logDetailed( BaseMessages.getString( PKG, "JobEntryDeleteFiles.ProcessingFile", path ) );
}
isDeleted = fileFolder.delete();
if ( !isDeleted ) {
logError( BaseMessages.getString( PKG, "JobEntryDeleteFiles.CouldNotDeleteFile", path ) );
} else {
if ( log.isBasic() ) {
logBasic( BaseMessages.getString( PKG, "JobEntryDeleteFiles.FileDeleted", path ) );
}
}
}
} else {
// File already deleted, no reason to try to delete it
if ( log.isBasic() ) {
logBasic( BaseMessages.getString( PKG, "JobEntryDeleteFiles.FileAlreadyDeleted", path ) );
}
isDeleted = true;
}
} catch ( Exception e ) {
logError( BaseMessages.getString( PKG, "JobEntryDeleteFiles.CouldNotProcess", path, e
.getMessage() ), e );
} finally {
if ( fileFolder != null ) {
try {
fileFolder.close();
} catch ( IOException ex ) {
// Ignore
}
}
}
return isDeleted;
}
private class TextFileSelector implements FileSelector {
String fileWildcard = null;
String sourceFolder = null;
Job parentjob;
public TextFileSelector( String sourcefolderin, String filewildcard, Job parentJob ) {
if ( !Utils.isEmpty( sourcefolderin ) ) {
sourceFolder = sourcefolderin;
}
if ( !Utils.isEmpty( filewildcard ) ) {
fileWildcard = filewildcard;
}
parentjob = parentJob;
}
public boolean includeFile( FileSelectInfo info ) {
boolean doReturnCode = false;
try {
if ( !info.getFile().toString().equals( sourceFolder ) && !parentjob.isStopped() ) {
// Pass over the Base folder itself
String shortFilename = info.getFile().getName().getBaseName();
if ( !info.getFile().getParent().equals( info.getBaseFolder() ) ) {
// Not in the Base Folder..Only if include sub folders
if ( includeSubfolders
&& ( info.getFile().getType() == FileType.FILE ) && GetFileWildcard( shortFilename, fileWildcard ) ) {
if ( log.isDetailed() ) {
logDetailed( BaseMessages.getString( PKG, "JobEntryDeleteFiles.DeletingFile", info
.getFile().toString() ) );
}
doReturnCode = true;
}
} else {
// In the Base Folder...
if ( ( info.getFile().getType() == FileType.FILE ) && GetFileWildcard( shortFilename, fileWildcard ) ) {
if ( log.isDetailed() ) {
logDetailed( BaseMessages.getString( PKG, "JobEntryDeleteFiles.DeletingFile", info
.getFile().toString() ) );
}
doReturnCode = true;
}
}
}
} catch ( Exception e ) {
log.logError(
BaseMessages.getString( PKG, "JobDeleteFiles.Error.Exception.DeleteProcessError" ), BaseMessages
.getString( PKG, "JobDeleteFiles.Error.Exception.DeleteProcess", info.getFile().toString(), e
.getMessage() ) );
doReturnCode = false;
}
return doReturnCode;
}
public boolean traverseDescendents( FileSelectInfo info ) {
return true;
}
}
/**********************************************************
*
* @param selectedfile
* @param wildcard
* @return True if the selectedfile matches the wildcard
**********************************************************/
private boolean GetFileWildcard( String selectedfile, String wildcard ) {
boolean getIt = true;
if ( !Utils.isEmpty( wildcard ) ) {
Pattern pattern = Pattern.compile( wildcard );
// First see if the file matches the regular expression!
Matcher matcher = pattern.matcher( selectedfile );
getIt = matcher.matches();
}
return getIt;
}
public void setIncludeSubfolders( boolean includeSubfolders ) {
this.includeSubfolders = includeSubfolders;
}
public void setPrevious( boolean argFromPrevious ) {
this.argFromPrevious = argFromPrevious;
}
public boolean evaluates() {
return true;
}
public void check( List<CheckResultInterface> remarks, JobMeta jobMeta, VariableSpace space,
Repository repository, IMetaStore metaStore ) {
boolean isValid = JobEntryValidatorUtils.andValidator().validate( this, "arguments", remarks,
AndValidator.putValidators( JobEntryValidatorUtils.notNullValidator() ) );
if ( !isValid ) {
return;
}
ValidatorContext ctx = new ValidatorContext();
AbstractFileValidator.putVariableSpace( ctx, getVariables() );
AndValidator.putValidators( ctx, JobEntryValidatorUtils.notNullValidator(),
JobEntryValidatorUtils.fileExistsValidator() );
for ( int i = 0; i < arguments.length; i++ ) {
JobEntryValidatorUtils.andValidator().validate( this, "arguments[" + i + "]", remarks, ctx );
}
}
public List<ResourceReference> getResourceDependencies( JobMeta jobMeta ) {
List<ResourceReference> references = super.getResourceDependencies( jobMeta );
if ( arguments != null ) {
ResourceReference reference = null;
for ( int i = 0; i < arguments.length; i++ ) {
String filename = jobMeta.environmentSubstitute( arguments[i] );
if ( reference == null ) {
reference = new ResourceReference( this );
references.add( reference );
}
reference.getEntries().add( new ResourceEntry( filename, ResourceType.FILE ) );
}
}
return references;
}
public void setArguments( String[] arguments ) {
this.arguments = arguments;
}
public void setFilemasks( String[] filemasks ) {
this.filemasks = filemasks;
}
public void setArgFromPrevious( boolean argFromPrevious ) {
this.argFromPrevious = argFromPrevious;
}
public boolean isArgFromPrevious() {
return argFromPrevious;
}
public String[] getArguments() {
return arguments;
}
public String[] getFilemasks() {
return filemasks;
}
public boolean isIncludeSubfolders() {
return includeSubfolders;
}
}
| |
/*
* DBeaver - Universal Database Manager
* Copyright (C) 2010-2019 Serge Rider (serge@jkiss.org)
* Copyright (C) 2011-2012 Eugene Fradkin (eugene.fradkin@gmail.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jkiss.dbeaver.ui.editors.sql.preferences;
import org.eclipse.swt.SWT;
import org.eclipse.swt.layout.GridData;
import org.eclipse.swt.widgets.*;
import org.eclipse.ui.dialogs.PreferenceLinkArea;
import org.eclipse.ui.preferences.IWorkbenchPreferenceContainer;
import org.jkiss.dbeaver.Log;
import org.jkiss.dbeaver.model.DBPDataSourceContainer;
import org.jkiss.dbeaver.model.preferences.DBPPreferenceStore;
import org.jkiss.dbeaver.ui.UIUtils;
import org.jkiss.dbeaver.ui.editors.sql.SQLEditor;
import org.jkiss.dbeaver.ui.editors.sql.SQLPreferenceConstants;
import org.jkiss.dbeaver.ui.editors.sql.internal.SQLEditorMessages;
import org.jkiss.dbeaver.ui.preferences.TargetPrefPage;
import org.jkiss.dbeaver.utils.PrefUtils;
/**
* PrefPageSQLEditor
*/
public class PrefPageSQLEditor extends TargetPrefPage
{
private static final Log log = Log.getLog(PrefPageSQLEditor.class);
public static final String PAGE_ID = "org.jkiss.dbeaver.preferences.main.sqleditor"; //$NON-NLS-1$
private static final String TEXT_EDITOR_PAGE_ID = "org.eclipse.ui.preferencePages.GeneralTextEditor"; //$NON-NLS-1$
private Button editorSeparateConnectionCheck;
private Button connectOnActivationCheck;
private Button connectOnExecuteCheck;
private Button saveOnQueryExecution;
private Button autoSaveOnClose;
private Button csFoldingEnabled;
private Button csMarkOccurrencesUnderCursor;
private Button csMarkOccurrencesForSelection;
private Button closeTabOnErrorCheck;
private Combo resultsOrientationCombo;
public PrefPageSQLEditor()
{
super();
}
@Override
protected boolean hasDataSourceSpecificOptions(DBPDataSourceContainer dataSourceDescriptor)
{
DBPPreferenceStore store = dataSourceDescriptor.getPreferenceStore();
return
store.contains(SQLPreferenceConstants.EDITOR_SEPARATE_CONNECTION) ||
store.contains(SQLPreferenceConstants.EDITOR_CONNECT_ON_ACTIVATE) ||
store.contains(SQLPreferenceConstants.EDITOR_CONNECT_ON_EXECUTE) ||
store.contains(SQLPreferenceConstants.AUTO_SAVE_ON_CLOSE) ||
store.contains(SQLPreferenceConstants.AUTO_SAVE_ON_EXECUTE) ||
store.contains(SQLPreferenceConstants.FOLDING_ENABLED) ||
store.contains(SQLPreferenceConstants.MARK_OCCURRENCES_UNDER_CURSOR) ||
store.contains(SQLPreferenceConstants.RESULT_SET_CLOSE_ON_ERROR) ||
store.contains(SQLPreferenceConstants.RESULT_SET_ORIENTATION)
;
}
@Override
protected boolean supportsDataSourceSpecificOptions()
{
return true;
}
@Override
protected Control createPreferenceContent(Composite parent)
{
Composite composite = UIUtils.createPlaceholder(parent, 2, 5);
{
Group connectionsGroup = UIUtils.createControlGroup(composite, SQLEditorMessages.pref_page_sql_editor_group_connections, 1, GridData.VERTICAL_ALIGN_BEGINNING | GridData.FILL_HORIZONTAL, 0);
((GridData)connectionsGroup.getLayoutData()).horizontalSpan = 2;
editorSeparateConnectionCheck = UIUtils.createCheckbox(connectionsGroup, SQLEditorMessages.pref_page_sql_editor_label_separate_connection_each_editor, false);
connectOnActivationCheck = UIUtils.createCheckbox(connectionsGroup, SQLEditorMessages.pref_page_sql_editor_label_connect_on_editor_activation, false);
connectOnExecuteCheck = UIUtils.createCheckbox(connectionsGroup, SQLEditorMessages.pref_page_sql_editor_label_connect_on_query_execute, false);
}
{
Group autoSaveGroup = UIUtils.createControlGroup(composite, SQLEditorMessages.pref_page_sql_editor_group_auto_save, 1, GridData.VERTICAL_ALIGN_BEGINNING | GridData.FILL_HORIZONTAL, 0);
autoSaveOnClose = UIUtils.createCheckbox(autoSaveGroup, SQLEditorMessages.pref_page_sql_editor_label_auto_save_on_close, false);
saveOnQueryExecution = UIUtils.createCheckbox(autoSaveGroup, SQLEditorMessages.pref_page_sql_editor_label_save_on_query_execute, false);
}
// Folding
{
Composite foldingGroup = UIUtils.createControlGroup(composite, SQLEditorMessages.pref_page_sql_completion_group_misc, 2, GridData.VERTICAL_ALIGN_BEGINNING | GridData.FILL_HORIZONTAL, 0);
csMarkOccurrencesUnderCursor = UIUtils.createCheckbox(foldingGroup, SQLEditorMessages.pref_page_sql_completion_label_mark_occurrences, SQLEditorMessages.pref_page_sql_completion_label_mark_occurrences_tip, false, 2);
csMarkOccurrencesForSelection = UIUtils.createCheckbox(foldingGroup, SQLEditorMessages.pref_page_sql_completion_label_mark_occurrences_for_selections, SQLEditorMessages.pref_page_sql_completion_label_mark_occurrences_for_selections_tip, false, 2);
csFoldingEnabled = UIUtils.createCheckbox(foldingGroup, SQLEditorMessages.pref_page_sql_completion_label_folding_enabled, SQLEditorMessages.pref_page_sql_completion_label_folding_enabled_tip, false, 2);
}
{
Composite layoutGroup = UIUtils.createControlGroup(composite, SQLEditorMessages.pref_page_sql_editor_group_result_view, 2, GridData.FILL_HORIZONTAL, 0);
((GridData)layoutGroup.getLayoutData()).horizontalSpan = 2;
closeTabOnErrorCheck = UIUtils.createCheckbox(layoutGroup, SQLEditorMessages.pref_page_sql_editor_label_close_results_tab_on_error, null, false, 2);
resultsOrientationCombo = UIUtils.createLabelCombo(layoutGroup, SQLEditorMessages.pref_page_sql_editor_label_results_orientation, SQLEditorMessages.pref_page_sql_editor_label_results_orientation_tip, SWT.READ_ONLY | SWT.DROP_DOWN);
((GridData)resultsOrientationCombo.getLayoutData()).grabExcessHorizontalSpace = false;
for (SQLEditor.ResultSetOrientation orientation : SQLEditor.ResultSetOrientation.values()) {
if (orientation.isSupported()) {
resultsOrientationCombo.add(orientation.getLabel());
}
}
}
{
new PreferenceLinkArea(composite, SWT.NONE,
PrefPageSQLEditor.TEXT_EDITOR_PAGE_ID,
"<a>''{0}''</a>" + SQLEditorMessages.pref_page_sql_editor_link_text_editor,
(IWorkbenchPreferenceContainer) getContainer(), null); //$NON-NLS-1$
}
return composite;
}
@Override
protected void loadPreferences(DBPPreferenceStore store)
{
try {
editorSeparateConnectionCheck.setSelection(store.getBoolean(SQLPreferenceConstants.EDITOR_SEPARATE_CONNECTION));
connectOnActivationCheck.setSelection(store.getBoolean(SQLPreferenceConstants.EDITOR_CONNECT_ON_ACTIVATE));
connectOnExecuteCheck.setSelection(store.getBoolean(SQLPreferenceConstants.EDITOR_CONNECT_ON_EXECUTE));
autoSaveOnClose.setSelection(store.getBoolean(SQLPreferenceConstants.AUTO_SAVE_ON_CLOSE));
saveOnQueryExecution.setSelection(store.getBoolean(SQLPreferenceConstants.AUTO_SAVE_ON_EXECUTE));
csFoldingEnabled.setSelection(store.getBoolean(SQLPreferenceConstants.FOLDING_ENABLED));
csMarkOccurrencesUnderCursor.setSelection(store.getBoolean(SQLPreferenceConstants.MARK_OCCURRENCES_UNDER_CURSOR));
csMarkOccurrencesForSelection.setSelection(store.getBoolean(SQLPreferenceConstants.MARK_OCCURRENCES_FOR_SELECTION));
closeTabOnErrorCheck.setSelection(store.getBoolean(SQLPreferenceConstants.RESULT_SET_CLOSE_ON_ERROR));
SQLEditor.ResultSetOrientation orientation = SQLEditor.ResultSetOrientation.valueOf(store.getString(SQLPreferenceConstants.RESULT_SET_ORIENTATION));
resultsOrientationCombo.setText(orientation.getLabel());
} catch (Exception e) {
log.warn(e);
}
}
@Override
protected void savePreferences(DBPPreferenceStore store)
{
try {
store.setValue(SQLPreferenceConstants.EDITOR_SEPARATE_CONNECTION, editorSeparateConnectionCheck.getSelection());
store.setValue(SQLPreferenceConstants.EDITOR_CONNECT_ON_ACTIVATE, connectOnActivationCheck.getSelection());
store.setValue(SQLPreferenceConstants.EDITOR_CONNECT_ON_EXECUTE, connectOnExecuteCheck.getSelection());
store.setValue(SQLPreferenceConstants.AUTO_SAVE_ON_CLOSE, autoSaveOnClose.getSelection());
store.setValue(SQLPreferenceConstants.AUTO_SAVE_ON_EXECUTE, saveOnQueryExecution.getSelection());
store.setValue(SQLPreferenceConstants.FOLDING_ENABLED, csFoldingEnabled.getSelection());
store.setValue(SQLPreferenceConstants.MARK_OCCURRENCES_UNDER_CURSOR, csMarkOccurrencesUnderCursor.getSelection());
store.setValue(SQLPreferenceConstants.MARK_OCCURRENCES_FOR_SELECTION, csMarkOccurrencesForSelection.getSelection());
store.setValue(SQLPreferenceConstants.RESULT_SET_CLOSE_ON_ERROR, closeTabOnErrorCheck.getSelection());
String orientationLabel = resultsOrientationCombo.getText();
for (SQLEditor.ResultSetOrientation orientation : SQLEditor.ResultSetOrientation.values()) {
if (orientationLabel.equals(orientation.getLabel())) {
store.setValue(SQLPreferenceConstants.RESULT_SET_ORIENTATION, orientation.name());
break;
}
}
} catch (Exception e) {
log.warn(e);
}
PrefUtils.savePreferenceStore(store);
}
@Override
protected void clearPreferences(DBPPreferenceStore store)
{
store.setToDefault(SQLPreferenceConstants.EDITOR_SEPARATE_CONNECTION);
store.setToDefault(SQLPreferenceConstants.EDITOR_CONNECT_ON_ACTIVATE);
store.setToDefault(SQLPreferenceConstants.EDITOR_CONNECT_ON_EXECUTE);
store.setToDefault(SQLPreferenceConstants.AUTO_SAVE_ON_CLOSE);
store.setToDefault(SQLPreferenceConstants.AUTO_SAVE_ON_EXECUTE);
store.setToDefault(SQLPreferenceConstants.FOLDING_ENABLED);
store.setToDefault(SQLPreferenceConstants.MARK_OCCURRENCES_UNDER_CURSOR);
store.setToDefault(SQLPreferenceConstants.MARK_OCCURRENCES_FOR_SELECTION);
store.setToDefault(SQLPreferenceConstants.RESULT_SET_CLOSE_ON_ERROR);
store.setToDefault(SQLPreferenceConstants.RESULT_SET_ORIENTATION);
}
@Override
protected String getPropertyPageID()
{
return PAGE_ID;
}
}
| |
/*
* Copyright 2003-2015 Dave Griffith, Bas Leijdekkers
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.siyeh.ig.psiutils;
import com.intellij.codeInsight.AnnotationUtil;
import com.intellij.codeInsight.TestFrameworks;
import com.intellij.openapi.roots.ProjectRootManager;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.psi.*;
import com.intellij.psi.util.InheritanceUtil;
import com.intellij.psi.util.PsiTreeUtil;
import com.intellij.psi.util.PsiUtil;
import com.intellij.testIntegration.TestFramework;
import com.intellij.util.ObjectUtils;
import com.siyeh.ig.callMatcher.CallMatcher;
import com.siyeh.ig.junit.JUnitCommonClassNames;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.Set;
import static com.intellij.codeInsight.AnnotationUtil.CHECK_HIERARCHY;
public class TestUtils {
public static final String RUN_WITH = "org.junit.runner.RunWith";
private static final CallMatcher ASSERT_THROWS =
CallMatcher.staticCall(JUnitCommonClassNames.ORG_JUNIT_JUPITER_API_ASSERTIONS, "assertThrows");
private TestUtils() { }
public static boolean isInTestSourceContent(@Nullable PsiElement element) {
if (element == null) {
return false;
}
final PsiFile file = element.getContainingFile();
final VirtualFile virtualFile = file == null ? null : file.getVirtualFile();
return virtualFile != null && ProjectRootManager.getInstance(file.getProject()).getFileIndex().isInTestSourceContent(virtualFile);
}
public static boolean isPartOfJUnitTestMethod(@NotNull PsiElement element) {
final PsiMethod method = PsiTreeUtil.getParentOfType(element, PsiMethod.class, false);
return method != null && isJUnitTestMethod(method);
}
public static boolean isJUnit4BeforeOrAfterMethod(@NotNull PsiMethod method) {
return AnnotationUtil.isAnnotated(method, "org.junit.Before", CHECK_HIERARCHY) ||
AnnotationUtil.isAnnotated(method, "org.junit.After", CHECK_HIERARCHY);
}
public static boolean isJUnitTestMethod(@Nullable PsiMethod method) {
if (method == null) return false;
final PsiClass containingClass = method.getContainingClass();
if (containingClass == null) return false;
final Set<TestFramework> frameworks = TestFrameworks.detectApplicableFrameworks(containingClass);
return frameworks.stream().anyMatch(framework -> framework.getName().startsWith("JUnit") && framework.isTestMethod(method, false));
}
public static boolean isRunnable(PsiMethod method) {
if (method == null) {
return false;
}
if (method.hasModifierProperty(PsiModifier.ABSTRACT) ||
method.hasModifierProperty(PsiModifier.STATIC) ||
!method.hasModifierProperty(PsiModifier.PUBLIC)) {
return false;
}
final PsiType returnType = method.getReturnType();
if (!PsiType.VOID.equals(returnType)) {
return false;
}
final PsiParameterList parameterList = method.getParameterList();
return parameterList.getParametersCount() == 0;
}
public static boolean isJUnit3TestMethod(@Nullable PsiMethod method) {
if (method == null) {
return false;
}
final String methodName = method.getName();
@NonNls final String test = "test";
if (!methodName.startsWith(test) ||
!method.hasModifierProperty(PsiModifier.PUBLIC) && method.getParameterList().getParametersCount() > 0) {
return false;
}
final PsiClass containingClass = method.getContainingClass();
return isJUnitTestClass(containingClass);
}
public static boolean isJUnit4TestMethod(@Nullable PsiMethod method) {
return method != null && AnnotationUtil.isAnnotated(method, JUnitCommonClassNames.ORG_JUNIT_TEST, CHECK_HIERARCHY);
}
public static boolean isAnnotatedTestMethod(@Nullable PsiMethod method) {
if (method == null) return false;
final PsiClass containingClass = method.getContainingClass();
if (containingClass == null) return false;
final TestFramework testFramework = TestFrameworks.detectFramework(containingClass);
if (testFramework == null) return false;
if (testFramework.isTestMethod(method, false)) {
final String testFrameworkName = testFramework.getName();
return testFrameworkName.equals("JUnit4") || testFrameworkName.equals("JUnit5");
}
return false;
}
public static boolean isJUnitTestClass(@Nullable PsiClass targetClass) {
return targetClass != null && InheritanceUtil.isInheritor(targetClass, JUnitCommonClassNames.JUNIT_FRAMEWORK_TEST_CASE);
}
public static boolean isJUnit4TestClass(@Nullable PsiClass aClass, boolean runWithIsTestClass) {
if (aClass == null) return false;
if (AnnotationUtil.isAnnotated(aClass, RUN_WITH, CHECK_HIERARCHY)) return runWithIsTestClass;
for (final PsiMethod method : aClass.getAllMethods()) {
if (isJUnit4TestMethod(method)) return true;
}
return false;
}
public static boolean isInTestCode(PsiElement element) {
if (isPartOfJUnitTestMethod(element)) {
return true;
}
final PsiClass containingClass = PsiTreeUtil.getParentOfType(element, PsiClass.class);
if (containingClass != null && TestFrameworks.getInstance().isTestOrConfig(containingClass)) {
return true;
}
return isInTestSourceContent(element);
}
/**
* @return true if class is annotated with {@code @TestInstance(TestInstance.Lifecycle.PER_CLASS)}
*/
public static boolean testInstancePerClass(@NotNull PsiClass containingClass) {
PsiAnnotation annotation = AnnotationUtil.findAnnotation(containingClass, JUnitCommonClassNames.ORG_JUNIT_JUPITER_API_TEST_INSTANCE);
if (annotation != null) {
PsiAnnotationMemberValue value = annotation.findDeclaredAttributeValue(PsiAnnotation.DEFAULT_REFERENCED_METHOD_NAME);
if (value != null && value.getText().contains("PER_CLASS")) {
return true;
}
}
return false;
}
/**
* Tries to determine whether exception is expected at given element (e.g. element is a part of method annotated with
* {@code @Test(expected = ...)} or part of lambda passed to {@code Assertions.assertThrows()}.
*
* Note that the test is not exhaustive: false positives and false negatives are possible.
*
* @param element to check
* @return true if it's likely that exception is expected at this point.
*/
public static boolean isExceptionExpected(PsiElement element) {
if (!isInTestSourceContent(element)) return false;
for(; element != null && !(element instanceof PsiFile); element = element.getParent()) {
if (element instanceof PsiMethod) {
return hasExpectedExceptionAnnotation((PsiMethod)element);
}
if (element instanceof PsiLambdaExpression) {
PsiExpressionList expressionList =
ObjectUtils.tryCast(PsiUtil.skipParenthesizedExprUp(element.getParent()), PsiExpressionList.class);
if (expressionList != null) {
PsiElement parent = expressionList.getParent();
if (parent instanceof PsiMethodCallExpression && ASSERT_THROWS.test((PsiMethodCallExpression)parent)) return true;
}
}
if (element instanceof PsiTryStatement && ((PsiTryStatement)element).getCatchBlocks().length > 0) {
return true;
}
}
return false;
}
public static boolean hasExpectedExceptionAnnotation(PsiMethod method) {
final PsiModifierList modifierList = method.getModifierList();
return hasAnnotationWithParameter(modifierList, "org.junit.Test", "expected") ||
hasAnnotationWithParameter(modifierList, "org.testng.annotations.Test", "expectedExceptions");
}
private static boolean hasAnnotationWithParameter(PsiModifierList modifierList, String annotationName, String expectedParameterName) {
final PsiAnnotation testAnnotation = modifierList.findAnnotation(annotationName);
if (testAnnotation == null) {
return false;
}
final PsiAnnotationParameterList parameterList = testAnnotation.getParameterList();
final PsiNameValuePair[] nameValuePairs = parameterList.getAttributes();
for (PsiNameValuePair nameValuePair : nameValuePairs) {
@NonNls final String parameterName = nameValuePair.getName();
if (expectedParameterName.equals(parameterName)) {
return true;
}
}
return false;
}
}
| |
package de.fhg.iais.cortex.search.utils;
import java.text.Normalizer;
import java.util.List;
import java.util.Map;
import org.apache.commons.lang.StringUtils;
import org.apache.solr.client.solrj.SolrQuery;
import org.apache.solr.client.solrj.SolrQuery.SortClause;
import org.apache.solr.client.solrj.util.ClientUtils;
import com.google.common.base.Strings;
import de.fhg.iais.cortex.search.SolrFields;
import de.fhg.iais.cortex.search.types.Facet;
import de.fhg.iais.cortex.search.types.FacetValue;
import de.fhg.iais.cortex.search.types.SortCriterion;
/**
* @author fschulz
*/
public class SolrQueryBuilder {
protected static final String SORT_BY_SCORE = "score";
protected static final String SORT_RANDOM = "random_";
protected final SolrQuery solrQuery;
private static final String AUTOCOMPLETE_SUFFIX = "_autocomplete";
private String autoCompleteFacetname;
/**
* Create a new SolrQuery Builder with <code>query</code> as search query
* that returns the portion <code>offset</code> to <code>offset+rows</code> from the search results.
*
* @param query
* The search query
* @param offset
* The offset into the result documents.
* @param rows
* The number of result documents returned.
* @return this SolrQueryBuilder.
*/
public SolrQueryBuilder(String query, int offset, int rows) {
this.solrQuery = new SolrQuery(query).setStart(offset).setRows(rows);
}
public SolrQueryBuilder setAutocompleteQuery(String facetName, String prefixQuery) {
this.autoCompleteFacetname = facetName + AUTOCOMPLETE_SUFFIX;
this.solrQuery.addFilterQuery(this.autoCompleteFacetname + ":\"" + Normalizer.normalize(prefixQuery, Normalizer.Form.NFC) + "\"");
return this;
}
/**
* Set that the search results should return the query terms highlighted in
* view and preview.
*
* @param numberOfSnippets
* The number of snippets to highlight in the view field.
* @param highlightFragSize
* The number of characters that should surround the hghlights in
* the view field.
* @return this SolrQueryBuilder.
*/
public SolrQueryBuilder addHighlightingCapabilities(int numberOfSnippets, int highlightFragSize) {
this.solrQuery.setParam("hl.q", solrQuery().getQuery());
this.solrQuery.addHighlightField(SolrFields.PREVIEW_STORE);
this.solrQuery.setParam("f." + SolrFields.PREVIEW_STORE + ".hl.fragsize", "0");
this.solrQuery.addHighlightField(SolrFields.VIEW);
this.solrQuery.setParam("f." + SolrFields.VIEW + ".hl.bs.type", "WORD");
this.solrQuery.setHighlight(true);
this.solrQuery.setParam("f." + SolrFields.VIEW + ".hl.fragsize", Integer.toString(highlightFragSize));
this.solrQuery.setHighlightSimplePre("<match>");
this.solrQuery.setHighlightSimplePost("</match>");
this.solrQuery.setHighlightSnippets(numberOfSnippets);
return this;
}
public SolrQueryBuilder setSortCapabilities(SortCriterion sortCriterion) {
if ( sortCriterion.equals(SortCriterion.RELEVANCE) ) {
this.solrQuery.setSort(SortClause.create(SORT_BY_SCORE, SolrQuery.ORDER.desc));
} else if ( sortCriterion.equals(SortCriterion.ALPHA_ASC) ) {
this.solrQuery.setSort(SortClause.create(SolrFields.SORT, SolrQuery.ORDER.asc));
} else if ( sortCriterion.equals(SortCriterion.ALPHA_DESC) ) {
this.solrQuery.setSort(SortClause.create(SolrFields.SORT, SolrQuery.ORDER.desc));
} else if ( sortCriterion.equals(SortCriterion.APD_XML) ) {
this.solrQuery.setSort(SortClause.create(SolrFields.APD_XML_SORT, SolrQuery.ORDER.asc));
} else if ( sortCriterion.isRandom() ) {
long seed = sortCriterion.getSeed();
this.solrQuery.setSort(SortClause.create(SORT_RANDOM + seed, SolrQuery.ORDER.desc));
}
return this;
}
public SolrQueryBuilder enableSpellchecking() {
this.solrQuery.setParam("spellcheck", true);
return this;
}
public SolrQueryBuilder addFacetFilterQuery(Facet facet, String facetField) {
List<FacetValue> facetValues = facet.getFacetValues();
String[] filterQueries = new String[facet.getFacetValues().size()];
int i = 0;
for ( FacetValue facetValue : facetValues ) {
if ( isRangeQuery(facetValue.getValue()) ) {
filterQueries[i] = facetField + ":" + rangeQueryEscape(facetValue.getValue());
} else {
filterQueries[i] = facetField + ":" + ClientUtils.escapeQueryChars(facetValue.getValue());
}
i++;
}
String filterQuery = StringUtils.join(filterQueries, " OR ");
if ( filterQuery.length() != 0 ) {
this.solrQuery.addFilterQuery(filterQuery.trim());
}
return this;
}
private boolean isRangeQuery(String facetValue) {
return ((facetValue.startsWith("[") || facetValue.startsWith("{")) && facetValue.contains(" TO ") && (facetValue.endsWith("]") || facetValue
.endsWith("}"))) ? true : false;
}
private String rangeQueryEscape(String value) {
String result = value;
try {
int midIdx = value.indexOf(" TO ");
String fromValue = value.substring(1, midIdx);
String toValue = value.substring(midIdx + 4, value.length() - 1);
StringBuilder resultBuilder = new StringBuilder().append(value.charAt(0));
if ( "*".equals(fromValue) ) {
resultBuilder.append(fromValue);
} else {
resultBuilder.append(ClientUtils.escapeQueryChars(fromValue));
}
resultBuilder.append(" TO ");
if ( "*".equals(toValue) ) {
resultBuilder.append(toValue);
} else {
resultBuilder.append(ClientUtils.escapeQueryChars(toValue));
}
result = resultBuilder.append(value.charAt(value.length() - 1)).toString();
} catch ( IndexOutOfBoundsException e ) {
return value;
}
return result;
}
public SolrQueryBuilder addExcludingFilterQueries(Map<String, List<String>> exclusions) {
for ( String fieldId : exclusions.keySet() ) {
for ( String fieldValue : exclusions.get(fieldId) ) {
StringBuilder filterQuery = new StringBuilder("-").append(fieldId).append(":").append(fieldValue);
this.solrQuery.addFilterQuery(filterQuery.toString().trim());
}
}
return this;
}
public SolrQueryBuilder setFacetSort(Facet.Sort sort) {
this.solrQuery.setFacetSort(sort.toString().toLowerCase());
return this;
}
public SolrQuery solrQuery() {
return this.solrQuery;
}
public String autoCompleteFacetname() {
return this.autoCompleteFacetname;
}
public SolrQueryBuilder addFacets(int minDocs, int facetLimit, Facet... facets) {
return addFacets(minDocs, 0, facetLimit, facets);
}
public SolrQueryBuilder addFacets(int minDocs, int offset, int rows, Facet... facets) {
this.solrQuery.setFacet(true);
this.solrQuery.setParam("facet.offset", "" + offset);
this.solrQuery.setFacetMinCount(minDocs);
this.solrQuery.setFacetLimit(rows);
if ( facets == null ) {
facets = new Facet[0];
}
for ( Facet facetList : facets ) {
String facetField = facetList.getField();
this.solrQuery.addFacetField(facetField);
addFacetFilterQuery(facetList, facetField);
}
return this;
}
public SolrQueryBuilder addFacetsUnlimited(int minDocs, int offset, Facet... facets) {
this.solrQuery.setFacet(true);
this.solrQuery.setParam("facet.offset", "" + offset);
this.solrQuery.setFacetMinCount(minDocs);
if ( facets == null ) {
facets = new Facet[0];
}
for ( Facet facetList : facets ) {
String facetField = facetList.getField();
this.solrQuery.addFacetField(facetField);
addFacetFilterQuery(facetList, facetField);
}
return this;
}
public SolrQueryBuilder setMinimumDocumentsPerFacet(int minDocs) {
if ( minDocs <= 0 ) {
solrQuery().setFacetMinCount(1);
} else {
solrQuery().setFacetMinCount(minDocs);
}
return this;
}
/**
* Apply specific sort criterion on the given sort field.
*
* @param sortCriterion
* @param sortField
* @return
*/
public SolrQueryBuilder setSortCapabilities(SortCriterion sortCriterion, String sortField) {
if ( Strings.isNullOrEmpty(sortField) ) {
this.setSortCapabilities(sortCriterion);
} else if ( sortCriterion.equals(SortCriterion.ALPHA_DESC) ) {
this.solrQuery.setSort(SortClause.create(sortField, SolrQuery.ORDER.desc));
} else if ( sortCriterion.equals(SortCriterion.ALPHA_ASC) ) {
this.solrQuery.setSort(SortClause.create(sortField, SolrQuery.ORDER.asc));
} else if ( sortCriterion.equals(SortCriterion.TIME_ASC) ) {
this.solrQuery.setSort(SortClause.create(sortField, SolrQuery.ORDER.asc));
} else if ( sortCriterion.equals(SortCriterion.TIME_DESC) ) {
this.solrQuery.setSort(SortClause.create(sortField, SolrQuery.ORDER.desc));
} else if ( sortCriterion.isRandom() ) {
long seed = sortCriterion.getSeed();
this.solrQuery.setSort(SortClause.create(SORT_RANDOM + seed, SolrQuery.ORDER.desc));
}
return this;
}
}
| |
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package tonelitosmovil;
/**
*
* @author rick
*/
public class Grafo {
private List nodos;
public Grafo(List nodos) {
this.nodos = nodos;
}
public Grafo() {
nodos = new List();
}
public List getNodos() {
return nodos;
}
public void setNodos(List nodos) {
this.nodos = nodos;
}
public Node First(Node vertex) {
int nodeIndex = -1;
for (int i = 0; i < nodos.size(); i++) {
if (vertex.getID() == nodos.elementAt(i).getID()) {
nodeIndex = ((Arista) nodos.elementAt(i).getAristas().elementAt(0).getValue()).getNodoFinal().getID();
break;
}
}
if (nodeIndex == -1) {
System.err.println("Index out of bounds");
return null;
}
// el -1 se aplica si se empieza con ID 1 y no con ID 0
return nodos.elementAt(nodeIndex - 1);
}
public Node getVertex(Node vertex, int posicion) {
int nodeIndex = -1;
for (int i = 0; i < nodos.size(); i++) {
if (vertex.getID() == nodos.elementAt(i).getID()) {
nodeIndex = ((Arista) nodos.elementAt(i).getAristas().elementAt(posicion).getValue()).getNodoFinal().getID();
break;
}
}
if (nodeIndex == -1) {
System.err.println("Index out of bounds");
return null;
}
// el -1 se aplica si se empieza con ID 1 y no con ID 0
return nodos.elementAt(nodeIndex - 1);
}
public void addVertex(Node vertex) {
nodos.elementAt(nodos.size() - 1).setNext(vertex);
}
public void addEdge(Node vertex, Arista edge) {
for (int i = 0; i < nodos.size(); i++) {
if (vertex.getID() == nodos.elementAt(i).getID()) {
nodos.elementAt(i).getAristas().push_back(edge);
break;
}
}
}
public void removeVertex(int posicion) {
int nodeIndex = -1;
for (int i = 0; i < nodos.size(); i++) {
if (nodos.elementAt(posicion).getID() == nodos.elementAt(i).getID()) {
nodeIndex = i;
break;
}
}
if (nodeIndex != -1) {
for (int i = 0; i < nodos.size(); i++) {
for (int j = 0; j < nodos.elementAt(i).getAristas().size(); j++) {
if (((Arista) nodos.elementAt(i).getAristas().elementAt(j).getValue()).getNodoFinal().getID()
== nodos.elementAt(nodeIndex).getID()
|| ((Arista) nodos.elementAt(i).getAristas().elementAt(j).getValue()).getNodoInicial().getID()
== nodos.elementAt(nodeIndex).getID()) {
removeEdge(nodos.elementAt(nodeIndex), ((Arista) nodos.elementAt(i).getAristas().elementAt(j).getValue()));
}
}
}
nodos.remove(nodeIndex);
for (int i = 0; i < nodos.size(); i++) {
nodos.elementAt(i).setID(i);
}
} else {
System.err.println("Index out of bounds");
}
}
public void removeEdge(Node vertex, Arista edge) {
for (int i = 0; i < nodos.size(); i++) {
if (vertex.getID() == nodos.elementAt(i).getID()) {
for (int j = 0; j < nodos.elementAt(i).getAristas().size(); j++) {
if (((Arista) nodos.elementAt(i).getAristas().elementAt(j).getValue()).getNodoInicial().getID()
== edge.getNodoInicial().getID()
&& ((Arista) nodos.elementAt(i).getAristas().elementAt(j).getValue()).getNodoFinal().getID()
== edge.getNodoFinal().getID()) {
nodos.elementAt(i).getAristas().remove(j);
break;
}
}
}
}
}
public List Dijkstra(Node origin, Node destiny/*, List nodes*/) {
List permanentes = new List();
List notPermanents = new List();
for (int i = 0; i < nodos.size(); i++) {
nodos.elementAt(i).setDijkstraNum(Integer.MAX_VALUE);
nodos.elementAt(i).setDijkstraPath(new List());
if (nodos.elementAt(i) != origin) {
notPermanents.push_back(nodos.elementAt(i));
}
}
origin.setDijkstraNum(0);
permanentes.push_back(origin);
while (nodos.size() != permanentes.size()) {
Node temp = permanentes.last();
for (int i = 0; i < temp.getAristas().size(); i++) {
for (int j = 0; j < permanentes.size(); j++) {
if (((Arista) temp.getAristas().elementAt(i).getValue()).getNodoFinal() != permanentes.elementAt(j)) {
if (((Arista) temp.getAristas().elementAt(i).getValue()).getDistancia() + temp.getDijkstraNum() < ((Arista) temp.getAristas().elementAt(i).getValue()).getNodoFinal().getDijkstraNum()) {
((Arista) temp.getAristas().elementAt(i).getValue()).getNodoFinal().setDijkstraNum(((Arista) temp.getAristas().elementAt(i).getValue()).getDistancia() + temp.getDijkstraNum());
((Arista) temp.getAristas().elementAt(i).getValue()).getNodoFinal().setDijkstraPath(temp.getDijkstraPath());
((Arista) temp.getAristas().elementAt(i).getValue()).getNodoFinal().addToPath(((Arista) temp.getAristas().elementAt(i).getValue()));
}
}
}
}
long min = Integer.MAX_VALUE;
int delete = -1;
for (int i = 0; i < notPermanents.size(); i++) {
System.out.println(notPermanents.elementAt(i).getDijkstraNum());
if (notPermanents.elementAt(i).getDijkstraNum() < min) {
min = notPermanents.elementAt(i).getDijkstraNum();
temp = notPermanents.elementAt(i);
delete = i;
}
}
System.out.println(delete);
notPermanents.remove(delete);
permanentes.push_back(temp);
}
return origin.getDijkstraPath();
}
public List FLoyd(Node origin, Node destiny) {
List ruta = new List();
List lista;
List ponderaciones = new List();
List rows = new List();
Node temp = new Node();
for (int i = 0; i < this.nodos.size(); i++) {
rows.insert(i, temp);
}
for (int i = 0; i < this.nodos.size(); i++) {
ponderaciones.insert(i, rows);
}
//con lo superior creo la matriz
List caminos = ponderaciones;
for (int i = 0; i < caminos.size(); i++) {
for (int j = 0; j < caminos.size(); j++) {
((List) caminos.elementAt(i).getValue()).elementAt(j).setValue(nodos.elementAt(j).getID());
// ((List) caminos.elementAt(i).getValue()).elementAt(i).setValue(0);
}
}
ponderaciones = llenar(ponderaciones);
lista = Camino(caminos, ponderaciones);
//lleno caminos
//lleno
int filas = nodos.find(origin);
int columnas = nodos.find(destiny);
ruta.push_back(destiny);
while(((List)caminos.elementAt(filas).getValue()).elementAt(columnas) != nodos.elementAt(columnas)){
ruta.push_back(((List)caminos.elementAt(filas).getValue()).elementAt(columnas));
columnas = nodos.find(((List)caminos.elementAt(filas).getValue()).elementAt(columnas));
}
ruta.push_back(origin);
return ruta.flip(lista);
}
private List Camino(List caminos, List ponderaciones) {
List lista = new List();
for (int i = 0; i < caminos.size(); i++) {
for (int j = 0; j < caminos.size(); j++) {
for (int k = 0; k < caminos.size(); k++) {
if (((int) ((List) ponderaciones.elementAt(i).getValue()).elementAt(k).getValue())
+ ((int) ((List) ponderaciones.elementAt(k).getValue()).elementAt(j).getValue())
< ((int) ((List) ponderaciones.elementAt(i).getValue()).elementAt(j).getValue())) {
((List) ponderaciones.elementAt(i).getValue()).elementAt(j).setValue(
((int) ((List) ponderaciones.elementAt(i).getValue()).elementAt(k).getValue())
+ ((int) ((List) ponderaciones.elementAt(k).getValue()).elementAt(j).getValue()));
((List) caminos.elementAt(i).getValue()).elementAt(j).setValue(k);
}
}
}
}
lista.push_back(caminos);
lista.push_back(ponderaciones);
return lista;
}
private List llenar(List ponderaciones) {
/*for (int i = 0; i < ponderaciones.size(); i++) {
for (int j = 0; j < ponderaciones.size(); j++) {
((List) ponderaciones.elementAt(i).getValue()).elementAt(i).setValue(0);
for (int k = 0; k < ponderaciones.size(); k++) {
if ((int) ((List) ponderaciones.elementAt(i).getValue()).elementAt(j).getValue() != 0
&& ((Arista) nodos.elementAt(j).getAristas().elementAt(k).getValue()).getNodoFinal().getID() == j) {
((List) ponderaciones.elementAt(i).getValue()).elementAt(i).setValue(((Arista) nodos.elementAt(j).getAristas().elementAt(k).getValue()).getDistancia());
} else {
((List) ponderaciones.elementAt(i).getValue()).elementAt(i).setValue(Integer.MAX_VALUE);
}
}
}
}
*/
// List row = new List();
for (int i = 0; i < ponderaciones.size(); i++) {
for (int m = 0; m < 10; m++) {
for (int j = 0; j < nodos.size(); j++) {
for (int k = 0; k < nodos.elementAt(j).getAristas().size(); k++) {
((List) ponderaciones.elementAt(i).getValue()).elementAt(i).setValue(0);
if (((Arista) nodos.elementAt(j).getAristas().elementAt(k).getValue()).getNodoFinal().getID() == m) {
((List) ponderaciones.elementAt(i).getValue()).elementAt(m).setValue(
((Arista) nodos.elementAt(j).getAristas().elementAt(k).getValue()).getDistancia());
} else {
((List) ponderaciones.elementAt(i).getValue()).elementAt(m).setValue(Integer.MAX_VALUE);
}
}
}
}
}
return ponderaciones;
}
private List optimos() {
List optimos = new List();
return optimos;
}
}
| |
package org.squiddev.plethora.gameplay.minecart;
import com.mojang.authlib.GameProfile;
import dan200.computercraft.ComputerCraft;
import dan200.computercraft.api.peripheral.IPeripheral;
import dan200.computercraft.core.computer.ComputerSide;
import dan200.computercraft.shared.computer.blocks.BlockCommandComputer;
import dan200.computercraft.shared.computer.blocks.BlockComputer;
import dan200.computercraft.shared.computer.blocks.ComputerState;
import dan200.computercraft.shared.computer.core.*;
import dan200.computercraft.shared.computer.items.ComputerItemFactory;
import dan200.computercraft.shared.computer.items.IComputerItem;
import net.minecraft.block.state.IBlockState;
import net.minecraft.client.Minecraft;
import net.minecraft.client.renderer.GlStateManager;
import net.minecraft.entity.Entity;
import net.minecraft.entity.item.EntityMinecart;
import net.minecraft.entity.item.EntityMinecartEmpty;
import net.minecraft.entity.player.EntityPlayer;
import net.minecraft.entity.player.EntityPlayerMP;
import net.minecraft.init.Items;
import net.minecraft.item.Item;
import net.minecraft.item.ItemStack;
import net.minecraft.nbt.NBTTagCompound;
import net.minecraft.network.datasync.DataParameter;
import net.minecraft.network.datasync.DataSerializers;
import net.minecraft.network.datasync.EntityDataManager;
import net.minecraft.server.MinecraftServer;
import net.minecraft.util.DamageSource;
import net.minecraft.util.EnumFacing;
import net.minecraft.util.EnumHand;
import net.minecraft.util.math.AxisAlignedBB;
import net.minecraft.util.math.MathHelper;
import net.minecraft.util.math.RayTraceResult;
import net.minecraft.util.math.Vec3d;
import net.minecraft.util.text.TextComponentTranslation;
import net.minecraft.world.World;
import net.minecraft.world.WorldServer;
import net.minecraftforge.client.ForgeHooksClient;
import net.minecraftforge.client.event.DrawBlockHighlightEvent;
import net.minecraftforge.common.MinecraftForge;
import net.minecraftforge.common.capabilities.Capability;
import net.minecraftforge.common.util.Constants;
import net.minecraftforge.event.entity.minecart.MinecartInteractEvent;
import net.minecraftforge.event.entity.player.PlayerEvent;
import net.minecraftforge.event.entity.player.PlayerInteractEvent;
import net.minecraftforge.fml.common.Mod;
import net.minecraftforge.fml.common.eventhandler.SubscribeEvent;
import net.minecraftforge.fml.relauncher.Side;
import net.minecraftforge.fml.relauncher.SideOnly;
import net.minecraftforge.items.CapabilityItemHandler;
import net.minecraftforge.items.ItemStackHandler;
import org.squiddev.plethora.api.IPlayerOwnable;
import org.squiddev.plethora.api.vehicle.IVehicleAccess;
import org.squiddev.plethora.api.vehicle.IVehicleUpgradeHandler;
import org.squiddev.plethora.gameplay.GuiHandler;
import org.squiddev.plethora.gameplay.Plethora;
import org.squiddev.plethora.utils.Helpers;
import org.squiddev.plethora.utils.PlayerHelpers;
import org.squiddev.plethora.utils.RenderHelper;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import javax.vecmath.Matrix4f;
import javax.vecmath.Vector3f;
import javax.vecmath.Vector4f;
import static org.squiddev.plethora.api.Constants.VEHICLE_UPGRADE_HANDLER_CAPABILITY;
@Mod.EventBusSubscriber(modid = Plethora.ID)
public class EntityMinecartComputer extends EntityMinecart implements IPlayerOwnable {
private static final ComputerFamily[] FAMILIES = ComputerFamily.values();
private static final ComputerState[] STATES = ComputerState.values();
private static final ComputerSide[] PERIPHERAL_MAPPINGS = new ComputerSide[]{
ComputerSide.TOP,
ComputerSide.LEFT,
ComputerSide.RIGHT,
ComputerSide.BACK
};
private static final AxisAlignedBB[] BOUNDS = new AxisAlignedBB[]{
// The main block: simply there to avoid reaching "through" the block.
new AxisAlignedBB(0, 0, -1, 1, 1, 0),
new AxisAlignedBB(0.125, 1, -0.875, 0.875, 1.125, -0.125),
new AxisAlignedBB(0.125, 0.125, -1.125, 0.875, 0.875, -1),
new AxisAlignedBB(0.125, 0.125, 0, 0.875, 0.875, 0.125),
new AxisAlignedBB(1, 0.125, -0.875, 1.125, 0.875, -0.125),
};
private static final DataParameter<Integer> INSTANCE_SLOT = EntityDataManager.createKey(EntityMinecartComputer.class, DataSerializers.VARINT);
private static final DataParameter<Integer> SESSION_SLOT = EntityDataManager.createKey(EntityMinecartComputer.class, DataSerializers.VARINT);
private static final DataParameter<Byte> FAMILY_SLOT = EntityDataManager.createKey(EntityMinecartComputer.class, DataSerializers.BYTE);
private static final DataParameter<Byte> STATE_SLOT = EntityDataManager.createKey(EntityMinecartComputer.class, DataSerializers.BYTE);
private static final int SLOTS = 4;
private int id;
private boolean on;
private boolean startOn;
private GameProfile profile;
/**
* The item handler, representing all upgrades in the minecart
*/
final UpgradeItemHandler itemHandler = new UpgradeItemHandler(SLOTS);
/**
* All peripherals provided by the items in {@link #itemHandler}.
*/
private final IPeripheral[] peripherals = new IPeripheral[SLOTS];
/**
* The minecart access object for each peripheral slot.
*/
final VehicleAccess[] accesses = new VehicleAccess[SLOTS];
@SideOnly(Side.CLIENT)
private Integer lastClientId;
public EntityMinecartComputer(World worldIn) {
super(worldIn);
setSize(0.98F, 0.98F);
// Initialise the upgrades
for (int i = 0; i < SLOTS; i++) accesses[i] = new VehicleAccess(this);
}
public EntityMinecartComputer(EntityMinecartEmpty minecart, int id, String label, ComputerFamily family, GameProfile profile) {
this(minecart.getEntityWorld());
setPositionAndRotation(minecart.posX, minecart.posY, minecart.posZ, minecart.rotationYaw, minecart.rotationPitch);
motionX = minecart.motionX;
motionY = minecart.motionY;
motionZ = minecart.motionZ;
setCurrentCartSpeedCapOnRail(minecart.getCurrentCartSpeedCapOnRail());
setMaxSpeedAirLateral(minecart.getMaxSpeedAirLateral());
setMaxSpeedAirVertical(minecart.getMaxSpeedAirVertical());
setDragAir(minecart.getDragAir());
setRollingAmplitude(minecart.getRollingAmplitude());
setRollingDirection(minecart.getRollingDirection());
this.id = id;
setFamily(family);
setCustomNameTag(label == null ? "" : label);
this.profile = profile;
}
@Override
protected void entityInit() {
super.entityInit();
dataManager.register(INSTANCE_SLOT, -1);
dataManager.register(SESSION_SLOT, -1);
dataManager.register(FAMILY_SLOT, (byte) 0);
dataManager.register(STATE_SLOT, (byte) 0);
}
private int getInstanceId() {
return dataManager.get(INSTANCE_SLOT);
}
private int getSessionId() {
return dataManager.get(SESSION_SLOT);
}
public ComputerFamily getFamily() {
return FAMILIES[dataManager.get(FAMILY_SLOT)];
}
private void setFamily(ComputerFamily family) {
dataManager.set(FAMILY_SLOT, (byte) family.ordinal());
}
public IVehicleAccess getAccess(int slot) {
return accesses[slot];
}
private ComputerState getState() {
return STATES[dataManager.get(STATE_SLOT)];
}
private void setState(ComputerState state) {
dataManager.set(STATE_SLOT, (byte) state.ordinal());
}
@Override
public void onUpdate() {
super.onUpdate();
if (getEntityWorld().isRemote) return;
ServerComputer computer = getServerComputer();
computer.setWorld(getEntityWorld());
computer.setPosition(getPosition());
if (startOn) {
startOn = false;
computer.turnOn();
}
computer.keepAlive();
String label = computer.getLabel();
setCustomNameTag(label == null ? "" : label);
on = computer.isOn();
ComputerState state = ComputerState.Off;
if (computer.isCursorDisplayed()) {
state = ComputerState.Blinking;
} else if (computer.isOn()) {
state = ComputerState.On;
}
setState(state);
WorldServer server = (WorldServer) getEntityWorld();
int stackDirty = itemHandler.getDirty();
itemHandler.clearDirty();
for (int slot = 0; slot < SLOTS; slot++) {
VehicleAccess access = accesses[slot];
boolean stackChanged = (stackDirty & (1 << slot)) != 0;
boolean accessChanged = access.dirty;
if (stackChanged) {
accesses[slot].reset();
IVehicleUpgradeHandler upgrade = itemHandler.getUpgrade(slot);
IPeripheral peripheral = peripherals[slot] = upgrade == null ? null : upgrade.create(accesses[slot]);
computer.setPeripheral(PERIPHERAL_MAPPINGS[slot], peripheral);
}
{
IVehicleUpgradeHandler upgrade = itemHandler.getUpgrade(slot);
if (upgrade != null) {
upgrade.update(access, peripherals[slot]);
accessChanged |= access.dirty;
}
}
access.dirty = false;
if (stackChanged || accessChanged) {
// Gather the appropriate data for this packet
MessageMinecartSlot message = new MessageMinecartSlot(this, slot);
message.setTag(access.compound);
if (stackChanged) message.setStack(itemHandler.getStackInSlot(slot));
// And send it to all players.
for (EntityPlayer player : server.getEntityTracker().getTrackingPlayers(this)) {
Plethora.network.sendTo(message, (EntityPlayerMP) player);
}
}
}
}
@Override
public boolean processInitialInteract(@Nonnull EntityPlayer player, @Nonnull EnumHand hand) {
if (MinecraftForge.EVENT_BUS.post(new MinecartInteractEvent(this, player, hand))) return true;
if (!getEntityWorld().isRemote) {
Matrix4f trans = getTranslationMatrix(1);
Vec3d from = new Vec3d(player.posX, player.posY + player.getEyeHeight(), player.posZ);
Vec3d look = player.getLook(1.0f);
double reach = 5;
if (player instanceof EntityPlayerMP) {
reach = player.getEntityAttribute(EntityPlayer.REACH_DISTANCE).getAttributeValue();
}
Vec3d to = new Vec3d(from.x + look.x * reach, from.y + look.y * reach, from.z + look.z * reach);
int slot = getIntersectSlot(from, to, trans);
if (slot >= 0) {
ItemStack heldStack = player.getHeldItem(hand);
ItemStack currentStack = itemHandler.getStackInSlot(slot);
if (heldStack.isEmpty() && !currentStack.isEmpty()) {
currentStack = itemHandler.extractItem(slot, 1, false);
if (!player.capabilities.isCreativeMode) {
Helpers.spawnItemStack(getEntityWorld(), posX, posY, posZ, currentStack);
}
} else if (!heldStack.isEmpty() && currentStack.isEmpty()) {
ItemStack copy = heldStack.copy();
copy.setCount(1);
if (itemHandler.insertItem(slot, copy, false).isEmpty() && !player.capabilities.isCreativeMode) {
heldStack.grow(-1);
if (heldStack.isEmpty()) player.setHeldItem(hand, ItemStack.EMPTY);
}
}
return true;
}
if (isUsable(player)) {
ServerComputer computer = getServerComputer();
computer.turnOn();
// computer.sendState(player); // We manually send the state as sometimes it doesn't sync correctly
GuiHandler.openMinecart(player, player.getEntityWorld(), this);
}
}
return true;
}
@Override
public void writeEntityToNBT(@Nonnull NBTTagCompound tag) {
super.writeEntityToNBT(tag);
tag.setInteger("computerId", id);
tag.setByte("family", (byte) getFamily().ordinal());
tag.setBoolean("on", startOn || on);
tag.setTag("items", itemHandler.serializeNBT());
PlayerHelpers.writeProfile(tag, profile);
}
@Override
protected void readEntityFromNBT(NBTTagCompound tag) {
super.readEntityFromNBT(tag);
id = tag.getInteger("computerId");
setFamily(FAMILIES[tag.getByte("family")]);
startOn |= tag.getBoolean("on");
if (tag.hasKey("items", Constants.NBT.TAG_COMPOUND)) {
itemHandler.deserializeNBT(tag.getCompoundTag("items"));
}
profile = PlayerHelpers.readProfile(tag);
}
@Nonnull
public ServerComputer getServerComputer() {
final ServerComputerRegistry manager = ComputerCraft.serverComputerRegistry;
final int sessionId = manager.getSessionID();
int instanceId = getInstanceId();
ServerComputer computer = null;
if (instanceId >= 0 && getSessionId() == sessionId) computer = manager.get(instanceId);
if (computer == null) {
instanceId = manager.getUnusedInstanceID();
computer = new ServerComputer(getEntityWorld(), id, getCustomNameTag(), instanceId, getFamily(), 51, 19);
computer.setWorld(getEntityWorld());
computer.setPosition(getPosition());
for (int slot = 0; slot < SLOTS; slot++) {
IVehicleUpgradeHandler upgrade = itemHandler.getUpgrade(slot);
IPeripheral peripheral = peripherals[slot] = upgrade == null ? null : upgrade.create(accesses[slot]);
computer.setPeripheral(PERIPHERAL_MAPPINGS[slot], peripheral);
}
if (getFamily() == ComputerFamily.Command) {
computer.addAPI(new CommandAPI(this));
}
manager.add(instanceId, computer);
dataManager.set(SESSION_SLOT, sessionId);
dataManager.set(INSTANCE_SLOT, instanceId);
}
return computer;
}
@Nullable
@SideOnly(Side.CLIENT)
public ClientComputer getClientComputer() {
final ClientComputerRegistry manager = ComputerCraft.clientComputerRegistry;
int instanceId = getInstanceId();
ClientComputer computer = null;
if (instanceId >= 0) computer = manager.get(instanceId);
if (computer == null && (lastClientId == null || lastClientId != instanceId)) {
// Sometimes the computer doesn't exist, so if we haven't attempted this before, try to fetch it.
// It is possible that the computer has been deleted on the server but not on the client yet so we
// store the last computer ID: ensuring that we don't re-create computers.
computer = new ClientComputer(instanceId);
manager.add(instanceId, computer);
}
if (computer != null) {
lastClientId = instanceId;
}
return computer;
}
@Nonnull
@Override
public Type getType() {
return Type.RIDEABLE;
}
@Override
public boolean canBeRidden() {
return false;
}
@Nonnull
@Override
public IBlockState getDisplayTile() {
switch (getFamily()) {
case Advanced:
case Normal:
default:
return ComputerCraft.Blocks.computer
.getDefaultState()
.withProperty(BlockComputer.Properties.ADVANCED, getFamily() == ComputerFamily.Advanced)
.withProperty(BlockComputer.Properties.STATE, getState());
case Command:
return ComputerCraft.Blocks.commandComputer
.getDefaultState()
.withProperty(BlockCommandComputer.Properties.STATE, getState());
}
}
@Override
public void killMinecart(DamageSource source) {
setDead();
if (getEntityWorld().getGameRules().getBoolean("doEntityDrops")) {
entityDropItem(new ItemStack(Items.MINECART, 1), 0);
ItemStack stack = ComputerItemFactory.create(id, getCustomNameTag(), getFamily());
entityDropItem(stack, 0);
for (int i = 0; i < SLOTS; i++) {
ItemStack child = itemHandler.getStackInSlot(i);
if (!child.isEmpty()) entityDropItem(child, 0);
}
}
}
public boolean isUsable(EntityPlayer player) {
if (isDead || player.getDistanceSq(this) > 64.0D) return false;
if (getFamily() != ComputerFamily.Command) return true;
if (getEntityWorld().isRemote) return true;
MinecraftServer server = player instanceof EntityPlayerMP ? ((EntityPlayerMP) player).server : null;
if (server == null || !server.isCommandBlockEnabled()) {
player.sendMessage(new TextComponentTranslation("advMode.notEnabled"));
return false;
}
if (!player.canUseCommandBlock() || !player.capabilities.isCreativeMode) {
player.sendMessage(new TextComponentTranslation("advMode.notAllowed"));
return false;
}
return true;
}
@Nullable
@Override
@SuppressWarnings("unchecked")
public <T> T getCapability(@Nonnull Capability<T> capability, @Nullable EnumFacing facing) {
return capability == CapabilityItemHandler.ITEM_HANDLER_CAPABILITY ? (T) itemHandler : super.getCapability(capability, facing);
}
@Override
public boolean hasCapability(@Nonnull Capability<?> capability, @Nullable EnumFacing facing) {
return capability == CapabilityItemHandler.ITEM_HANDLER_CAPABILITY || super.hasCapability(capability, facing);
}
private Matrix4f getTranslationMatrix(float partialTicks) {
// Tiny bit of random offset
long id = (long) getEntityId() * 493286711L;
id = id * id * 4392167121L + id * 98761L;
float ox = (((float) (id >> 16 & 7L) + 0.5F) / 8.0F - 0.5F) * 0.004F;
float oy = (((float) (id >> 20 & 7L) + 0.5F) / 8.0F - 0.5F) * 0.004F;
float oz = (((float) (id >> 24 & 7L) + 0.5F) / 8.0F - 0.5F) * 0.004F;
double x = lastTickPosX + (posX - lastTickPosX) * partialTicks;
double y = lastTickPosY + (posY - lastTickPosY) * partialTicks;
double z = lastTickPosZ + (posZ - lastTickPosZ) * partialTicks;
float pitch = prevRotationPitch + (rotationPitch - prevRotationPitch) * partialTicks;
float yaw = prevRotationYaw + (rotationYaw - prevRotationYaw) * partialTicks;
Vec3d offsetPos = getPos(x, y, z);
if (offsetPos != null) {
final double offset = 0.3;
Vec3d posOff = MinecartHelpers.getPosOffset(this, x, y, z, offset);
Vec3d negOff = MinecartHelpers.getPosOffset(this, x, y, z, -offset);
if (posOff == null) posOff = offsetPos;
if (negOff == null) negOff = offsetPos;
x = offsetPos.x;
y = (posOff.y + negOff.y) / 2.0D;
z = offsetPos.z;
Vec3d invoff = negOff.add(-posOff.x, -posOff.y, -posOff.z);
if (invoff.lengthSquared() != 0.0D) {
invoff = invoff.normalize();
yaw = (float) (Math.atan2(invoff.z, invoff.x) * 180.0D / Math.PI);
pitch = (float) (Math.atan(invoff.y) * 73.0D);
}
}
// Set up the translation matrix.
// This could probably be "inlined" but it'll do.
Matrix4f temp = new Matrix4f();
Matrix4f trans = new Matrix4f();
trans.setIdentity();
temp.setIdentity();
temp.setTranslation(new Vector3f((float) x + ox, (float) y + 0.375f + oy, (float) z + oz));
trans.mul(temp);
temp.setIdentity();
temp.rotY((float) Math.toRadians(180 - yaw));
trans.mul(temp);
temp.setIdentity();
temp.rotZ((float) Math.toRadians(-pitch));
trans.mul(temp);
float amplitude = getRollingAmplitude() - partialTicks;
float roll = getDamage() - partialTicks;
if (roll < 0.0F) roll = 0.0F;
if (amplitude > 0.0F) {
temp.setIdentity();
temp.rotX((float) Math.toRadians(MathHelper.sin(amplitude) * amplitude * roll / 10.0F * (float) getRollingDirection()));
trans.mul(temp);
}
trans.setScale(0.75F);
int offset = getDisplayTileOffset();
temp.setIdentity();
temp.setTranslation(new Vector3f(-0.5F, (offset - 8) / 16.0F, 0.5F));
trans.mul(temp);
return trans;
}
private static int getIntersectSlot(Vec3d fromVec, Vec3d toVec, Matrix4f transform) {
Matrix4f inv = new Matrix4f();
inv.invert(transform);
// Convert the vectors into "minecart" space
Vector4f to = new Vector4f((float) toVec.x, (float) toVec.y, (float) toVec.z, 1);
inv.transform(to);
Vector4f from = new Vector4f((float) fromVec.x, (float) fromVec.y, (float) fromVec.z, 1);
inv.transform(from);
Vector4f step = new Vector4f();
step.sub(to, from);
step.scale(1 / 100.0f);
// Now ray-trace to find where they intersect with the bounding box.
for (int offset = 0; offset <= 100; offset++) {
for (int i = 0; i < BOUNDS.length; i++) {
AxisAlignedBB bb = BOUNDS[i];
if (bb.contains(new Vec3d(from.getX(), from.getY(), from.getZ()))) {
// If we got the actual block itself then pretend nothing happened.
return i - 1;
}
}
from.add(step);
}
return -1;
}
@Nullable
@Override
public GameProfile getOwningProfile() {
return profile;
}
@SubscribeEvent
public static void onEntityInteraction(PlayerInteractEvent.EntityInteract event) {
EntityPlayer player = event.getEntityPlayer();
ItemStack stack = event.getItemStack();
if (stack.isEmpty()) return;
Item item = stack.getItem();
if (item != Item.getItemFromBlock(ComputerCraft.Blocks.commandComputer) && item != Item.getItemFromBlock(ComputerCraft.Blocks.computer)) {
return;
}
Entity target = event.getTarget();
if (!(target instanceof EntityMinecartEmpty)) return;
EntityMinecartEmpty minecart = (EntityMinecartEmpty) target;
if (minecart.hasDisplayTile()) return;
IComputerItem computerItem = (IComputerItem) item;
int id = computerItem.getComputerID(stack);
String label = computerItem.getLabel(stack);
ComputerFamily family = computerItem.getFamily(stack);
player.swingArm(event.getHand());
if (minecart.getEntityWorld().isRemote) return;
event.setCanceled(true);
minecart.setDead();
minecart.getEntityWorld().spawnEntity(new EntityMinecartComputer(minecart, id, label, family, player.getGameProfile()));
if (!player.capabilities.isCreativeMode) {
stack.grow(-1);
if (stack.isEmpty()) player.setHeldItem(event.getHand(), ItemStack.EMPTY);
}
}
@SubscribeEvent
public static void startTracking(PlayerEvent.StartTracking event) {
Entity entity = event.getTarget();
if (entity instanceof EntityMinecartComputer) {
EntityMinecartComputer minecart = (EntityMinecartComputer) entity;
for (int slot = 0; slot < SLOTS; slot++) {
ItemStack stack = minecart.itemHandler.getStackInSlot(slot);
NBTTagCompound tag = minecart.accesses[slot].compound;
if (!stack.isEmpty() || tag != null) {
MessageMinecartSlot message = new MessageMinecartSlot(minecart, slot);
message.setStack(stack);
message.setTag(tag);
Plethora.network.sendTo(message, (EntityPlayerMP) event.getEntityPlayer());
}
}
}
}
@SubscribeEvent
@SideOnly(Side.CLIENT)
public static void drawHighlight(DrawBlockHighlightEvent event) {
if (event.getTarget().typeOfHit != RayTraceResult.Type.ENTITY) return;
if (!(event.getTarget().entityHit instanceof EntityMinecartComputer)) return;
EntityMinecartComputer minecart = (EntityMinecartComputer) event.getTarget().entityHit;
float partialTicks = event.getPartialTicks();
GlStateManager.pushMatrix();
Matrix4f trans = minecart.getTranslationMatrix(partialTicks);
Matrix4f inv = new Matrix4f();
inv.invert(trans);
Entity player = Minecraft.getMinecraft().getRenderViewEntity();
Vec3d from = player.getPositionEyes(partialTicks);
Vec3d look = player.getLook(partialTicks);
double reach = 5;
if (player instanceof EntityPlayerMP) {
reach = ((EntityPlayerMP) player).getEntityAttribute(EntityPlayer.REACH_DISTANCE).getAttributeValue();
}
Vec3d to = new Vec3d(from.x + look.x * reach, from.y + look.y * reach, from.z + look.z * reach);
int slot = getIntersectSlot(from, to, trans);
// Shift everything back to be relative to the player
GlStateManager.translate(
-(player.lastTickPosX + (player.posX - player.lastTickPosX) * partialTicks),
-(player.lastTickPosY + (player.posY - player.lastTickPosY) * partialTicks),
-(player.lastTickPosZ + (player.posZ - player.lastTickPosZ) * partialTicks)
);
ForgeHooksClient.multiplyCurrentGlMatrix(trans);
if (slot >= 0) {
RenderHelper.renderBoundingBox(BOUNDS[slot + 1]);
}
GlStateManager.popMatrix();
event.setCanceled(true);
}
static final class UpgradeItemHandler extends ItemStackHandler {
private int dirty = 0;
private final IVehicleUpgradeHandler[] handlers;
public UpgradeItemHandler(int slots) {
super(slots);
handlers = new IVehicleUpgradeHandler[6];
}
@Override
protected void onContentsChanged(int slot) {
dirty |= 1 << slot;
ItemStack stack = getStackInSlot(slot);
handlers[slot] = stack.isEmpty() ? null : stack.getCapability(VEHICLE_UPGRADE_HANDLER_CAPABILITY, null);
}
@Override
protected void onLoad() {
for (int i = 0; i < getSlots(); i++) {
ItemStack stack = getStackInSlot(i);
handlers[i] = stack.isEmpty() ? null : stack.getCapability(VEHICLE_UPGRADE_HANDLER_CAPABILITY, null);
}
}
public IVehicleUpgradeHandler getUpgrade(int slot) {
validateSlotIndex(slot);
return handlers[slot];
}
public int getDirty() {
return dirty;
}
public void clearDirty() {
dirty = 0;
}
@Nonnull
@Override
public ItemStack insertItem(int slot, @Nonnull ItemStack stack, boolean simulate) {
if (!stack.hasCapability(VEHICLE_UPGRADE_HANDLER_CAPABILITY, null)) {
return stack;
}
return super.insertItem(slot, stack, simulate);
}
@Override
public String toString() {
return stacks.toString();
}
}
static final class VehicleAccess implements IVehicleAccess {
private final EntityMinecart minecart;
NBTTagCompound compound;
boolean dirty = false;
private VehicleAccess(EntityMinecart minecart) {
this.minecart = minecart;
}
@Nonnull
@Override
public EntityMinecart getVehicle() {
return minecart;
}
@Nonnull
@Override
public NBTTagCompound getData() {
NBTTagCompound tag = compound;
if (tag == null) tag = compound = new NBTTagCompound();
return tag;
}
@Override
public void markDataDirty() {
dirty = true;
}
public void reset() {
compound = null;
dirty = false;
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. The ASF licenses this file to You
* under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License. For additional information regarding
* copyright in this work, please see the NOTICE file in the top level
* directory of this distribution.
*/
package org.apache.roller.util;
import java.text.ParseException;
import java.text.ParsePosition;
import java.text.SimpleDateFormat;
import java.util.Calendar;
import java.util.Date;
import java.util.Locale;
import java.util.TimeZone;
import org.apache.commons.lang.StringUtils;
/**
* General purpose date utilities.
*/
public abstract class DateUtil {
public static final long millisInDay = 86400000;
// a bunch of date formats
private static final String formatDefaultDate = "dd.MM.yyyy";
private static final String formatDefaultDateMinimal = "d.M.yy";
private static final String formatDefaultTimestamp = "yyyy-MM-dd HH:mm:ss.SSS";
private static final String formatFriendlyTimestamp = "dd.MM.yyyy HH:mm:ss";
private static final String format6chars = "yyyyMM";
private static final String format8chars = "yyyyMMdd";
private static final String formatIso8601 = "yyyy-MM-dd'T'HH:mm:ssZ";
private static final String formatIso8601Day = "yyyy-MM-dd";
private static final String formatRfc822 = "EEE, d MMM yyyy HH:mm:ss Z";
/**
* Returns a Date set to the last possible millisecond of the day, just
* before midnight. If a null day is passed in, a new Date is created.
* midnight (00m 00h 00s)
*/
public static Date getEndOfDay(Date day) {
return getEndOfDay(day,Calendar.getInstance());
}
public static Date getEndOfDay(Date day,Calendar cal) {
if (day == null) day = new Date();
cal.setTime(day);
cal.set(Calendar.HOUR_OF_DAY, cal.getMaximum(Calendar.HOUR_OF_DAY));
cal.set(Calendar.MINUTE, cal.getMaximum(Calendar.MINUTE));
cal.set(Calendar.SECOND, cal.getMaximum(Calendar.SECOND));
cal.set(Calendar.MILLISECOND, cal.getMaximum(Calendar.MILLISECOND));
return cal.getTime();
}
/**
* Returns a Date set to the first possible millisecond of the month, just
* after midnight. If a null day is passed in, a new Date is created.
* midnight (00m 00h 00s)
*/
public static Date getStartOfMonth(Date day) {
return getStartOfMonth(day, Calendar.getInstance());
}
public static Date getStartOfMonth(Date day, Calendar cal) {
if (day == null) day = new Date();
cal.setTime(day);
// set time to start of day
cal.set(Calendar.HOUR_OF_DAY, cal.getMinimum(Calendar.HOUR_OF_DAY));
cal.set(Calendar.MINUTE, cal.getMinimum(Calendar.MINUTE));
cal.set(Calendar.SECOND, cal.getMinimum(Calendar.SECOND));
cal.set(Calendar.MILLISECOND, cal.getMinimum(Calendar.MILLISECOND));
// set time to first day of month
cal.set(Calendar.DAY_OF_MONTH, 1);
return cal.getTime();
}
/**
* Returns a Date set to the last possible millisecond of the month, just
* before midnight. If a null day is passed in, a new Date is created.
* midnight (00m 00h 00s)
*/
public static Date getEndOfMonth(Date day) {
return getEndOfMonth(day, Calendar.getInstance());
}
public static Date getEndOfMonth(Date day,Calendar cal) {
if (day == null) day = new Date();
cal.setTime(day);
// set time to end of day
cal.set(Calendar.HOUR_OF_DAY, cal.getMaximum(Calendar.HOUR_OF_DAY));
cal.set(Calendar.MINUTE, cal.getMaximum(Calendar.MINUTE));
cal.set(Calendar.SECOND, cal.getMaximum(Calendar.SECOND));
cal.set(Calendar.MILLISECOND, cal.getMaximum(Calendar.MILLISECOND));
// set time to first day of month
cal.set(Calendar.DAY_OF_MONTH, 1);
// add one month
cal.add(Calendar.MONTH, 1);
// back up one day
cal.add(Calendar.DAY_OF_MONTH, -1);
return cal.getTime();
}
/**
* Returns a Date set to the first possible millisecond of the day, just
* after midnight. If a null day is passed in, a new Date is created.
* midnight (00m 00h 00s)
*/
public static Date getStartOfDay(Date day) {
return getStartOfDay(day, Calendar.getInstance());
}
/**
* Returns a Date set to the first possible millisecond of the day, just
* after midnight. If a null day is passed in, a new Date is created.
* midnight (00m 00h 00s)
*/
public static Date getStartOfDay(Date day, Calendar cal) {
if (day == null) day = new Date();
cal.setTime(day);
cal.set(Calendar.HOUR_OF_DAY, cal.getMinimum(Calendar.HOUR_OF_DAY));
cal.set(Calendar.MINUTE, cal.getMinimum(Calendar.MINUTE));
cal.set(Calendar.SECOND, cal.getMinimum(Calendar.SECOND));
cal.set(Calendar.MILLISECOND, cal.getMinimum(Calendar.MILLISECOND));
return cal.getTime();
}
/**
* Returns a Date set just to Noon, to the closest possible millisecond
* of the day. If a null day is passed in, a new Date is created.
* nnoon (00m 12h 00s)
*/
public static Date getNoonOfDay(Date day, Calendar cal) {
if (day == null) day = new Date();
cal.setTime(day);
cal.set(Calendar.HOUR_OF_DAY, 12);
cal.set(Calendar.MINUTE, cal.getMinimum(Calendar.MINUTE));
cal.set(Calendar.SECOND, cal.getMinimum(Calendar.SECOND));
cal.set(Calendar.MILLISECOND, cal.getMinimum(Calendar.MILLISECOND));
return cal.getTime();
}
/**
* Returns a java.sql.Timestamp equal to the current time
**/
public static java.sql.Timestamp now() {
return new java.sql.Timestamp(new java.util.Date().getTime());
}
/**
* Returns a string the represents the passed-in date parsed
* according to the passed-in format. Returns an empty string
* if the date or the format is null.
**/
public static String format(Date aDate, SimpleDateFormat aFormat) {
if (aDate == null || aFormat == null ) { return ""; }
synchronized (aFormat) {
return aFormat.format(aDate);
}
}
/**
* Returns a Date using the passed-in string and format. Returns null if the string
* is null or empty or if the format is null. The string must match the format.
**/
public static Date parse(String aValue, SimpleDateFormat aFormat) throws ParseException {
if (StringUtils.isEmpty(aValue) || aFormat == null) {
return null;
}
synchronized(aFormat) {
return aFormat.parse(aValue);
}
}
/**
* Returns true if endDate is after startDate or if startDate equals endDate
* or if they are the same date. Returns false if either value is null.
**/
public static boolean isValidDateRange(Date startDate, Date endDate) {
return isValidDateRange(startDate, endDate, true);
}
/**
* Returns true if endDate is after startDate or if startDate equals endDate.
* Returns false if either value is null. If equalOK, returns true if the
* dates are equal.
**/
public static boolean isValidDateRange(Date startDate, Date endDate, boolean equalOK) {
// false if either value is null
if (startDate == null || endDate == null) { return false; }
if (equalOK) {
// true if they are equal
if (startDate.equals(endDate)) { return true; }
}
// true if endDate after startDate
if (endDate.after(startDate)) { return true; }
return false;
}
// convenience method returns minimal date format
public static SimpleDateFormat defaultDateFormat() {
return DateUtil.friendlyDateFormat(true);
}
// convenience method returns minimal date format
public static java.text.SimpleDateFormat minimalDateFormat() {
return friendlyDateFormat(true);
}
// convenience method that returns friendly data format
// using full month, day, year digits.
public static SimpleDateFormat fullDateFormat() {
return friendlyDateFormat(false);
}
/**
* Returns a "friendly" date format.
* @param mimimalFormat Should the date format allow single digits.
**/
public static SimpleDateFormat friendlyDateFormat(boolean minimalFormat) {
if (minimalFormat) {
return new SimpleDateFormat(formatDefaultDateMinimal);
}
return new SimpleDateFormat(formatDefaultDate);
}
// returns full timestamp format
public static SimpleDateFormat defaultTimestampFormat() {
return new SimpleDateFormat(formatDefaultTimestamp);
}
// convenience method returns long friendly timestamp format
public static SimpleDateFormat friendlyTimestampFormat() {
return new SimpleDateFormat(formatFriendlyTimestamp);
}
// convenience method returns minimal date format
public static SimpleDateFormat get8charDateFormat() {
return new SimpleDateFormat(format8chars);
}
// convenience method returns minimal date format
public static SimpleDateFormat get6charDateFormat() {
return new SimpleDateFormat(format6chars);
}
// convenience method returns minimal date format
public static SimpleDateFormat getIso8601DateFormat() {
return new SimpleDateFormat(formatIso8601);
}
// convenience method returns minimal date format
public static SimpleDateFormat getIso8601DayDateFormat() {
return new SimpleDateFormat(formatIso8601Day);
}
// convenience method returns minimal date format
public static SimpleDateFormat getRfc822DateFormat() {
// http://www.w3.org/Protocols/rfc822/Overview.html#z28
// Using Locale.US to fix ROL-725 and ROL-628
return new SimpleDateFormat(formatRfc822, Locale.US);
}
// convenience method
public static String defaultDate(Date date) {
return format(date, defaultDateFormat());
}
// convenience method using minimal date format
public static String minimalDate(Date date) {
return format(date, DateUtil.minimalDateFormat());
}
public static String fullDate(Date date) {
return format(date, DateUtil.fullDateFormat());
}
/**
* Format the date using the "friendly" date format.
*/
public static String friendlyDate(Date date, boolean minimalFormat) {
return format(date, friendlyDateFormat(minimalFormat));
}
// convenience method
public static String friendlyDate(Date date) {
return format(date, friendlyDateFormat(true));
}
// convenience method
public static String defaultTimestamp(Date date) {
return format(date, defaultTimestampFormat());
}
// convenience method returns long friendly formatted timestamp
public static String friendlyTimestamp(Date date) {
return format(date, friendlyTimestampFormat());
}
// convenience method returns 8 char day stamp YYYYMMDD
public static String format8chars(Date date) {
return format(date, get8charDateFormat());
}
// convenience method returns 6 char month stamp YYYYMM
public static String format6chars(Date date) {
return format(date, get6charDateFormat());
}
// convenience method returns long friendly formatted timestamp
public static String formatIso8601Day(Date date) {
return format(date, getIso8601DayDateFormat());
}
public static String formatRfc822(Date date) {
return format(date, getRfc822DateFormat());
}
// This is a hack, but it seems to work
public static String formatIso8601(Date date) {
if (date == null) return "";
// Add a colon 2 chars before the end of the string
// to make it a valid ISO-8601 date.
String str = format(date, getIso8601DateFormat());
StringBuffer sb = new StringBuffer();
sb.append( str.substring(0,str.length()-2) );
sb.append( ":" );
sb.append( str.substring(str.length()-2) );
return sb.toString();
}
public static Date parseIso8601(String value) throws Exception {
return ISO8601DateParser.parse(value);
}
/**
* Parse data as either 6-char or 8-char format.
*/
public static Date parseWeblogURLDateString(String dateString, TimeZone tz, Locale locale) {
Date ret = new Date();
SimpleDateFormat char8DateFormat = DateUtil.get8charDateFormat();
SimpleDateFormat char6DateFormat = DateUtil.get6charDateFormat();
if (dateString != null
&& dateString.length()==8
&& StringUtils.isNumeric(dateString) ) {
ParsePosition pos = new ParsePosition(0);
ret = char8DateFormat.parse(dateString, pos);
// make sure the requested date is not in the future
Date today = null;
Calendar todayCal = Calendar.getInstance();
todayCal = Calendar.getInstance(tz, locale);
todayCal.setTime(new Date());
today = todayCal.getTime();
if(ret.after(today)) {
ret = today;
}
} else if(dateString != null
&& dateString.length()==6
&& StringUtils.isNumeric(dateString)) {
ParsePosition pos = new ParsePosition(0);
ret = char6DateFormat.parse(dateString, pos);
// make sure the requested date is not in the future
Calendar todayCal = Calendar.getInstance();
todayCal = Calendar.getInstance(tz, locale);
todayCal.setTime(new Date());
Date today = todayCal.getTime();
if(ret.after(today)) {
ret = today;
}
}
return ret;
}
}
| |
package percolation;
import edu.princeton.cs.algs4.WeightedQuickUnionUF;
/**
*
* @author Oleksandr Kruk
*
* This class implement the Percolation model that allows to manipulate
* a grid n by n and test if it percolates or not by opening sites
*
*/
public class Percolation {
private static final int OPEN = 1;
private int[][] gridState;
private WeightedQuickUnionUF wqu;
private int virtualTop;
private int virtualBottom;
private int gridSize;
/**
* Percolation model constructor that initializes the necessary data
* structures. Produces a grid of n x n. Also initializes the Quick union data
* structure with two extra nodes to represent the virtual top and bottom
* nodes
*
* @param n
* is the size of the grid
*/
public Percolation(int n) {
validatePositive(n);
gridSize = n;
virtualTop = n * n;
virtualBottom = n * n + 1;
// initialize with extra two nodes top and bottom
int quickUnionSize = n * n + 2;
wqu = new WeightedQuickUnionUF(quickUnionSize);
gridState = new int[n][n];
// link virtual top and bottom nodes
for (int i = 0; i < n; i++) {
wqu.union(virtualTop, i);
}
for (int i = n * n - 1; i >= (n * n - n); i--) {
wqu.union(virtualBottom, i);
}
}
/**
* Test client for percolation model based on the following 5x5 grid
* structure:
*
* 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 ^ ^
* topNode bottomNode
*/
public static void main(String[] args) {
/* Test index conversion */
Percolation perc = new Percolation(5);
System.out.println("Index for 1,1: " + perc.rowColumnToIndex(1, 1));
System.out.println("Index for 2,3: " + perc.rowColumnToIndex(2, 3));
System.out.println("Index for 5,5: " + perc.rowColumnToIndex(5, 5));
/* Test opening sites */
perc.open(1, 1);
perc.open(1, 2);
perc.open(1, 3);
System.out.println(perc.wqu.connected(0, 2) ? "Success" : "Failure");
perc.open(2, 1);
perc.open(3, 1);
perc.open(4, 1);
perc.open(5, 1);
// Print grid state
for (int i = 0; i < 5; i++) {
for (int j = 0; j < 5; j++) {
System.out.print("|" + perc.gridState[i][j] + "|");
}
System.out.println();
}
// Should percolate
System.out.println(perc.percolates() ? "Percolates" : "Fail");
Percolation cornerPercolation = new Percolation(1);
System.out.println("Percolates: " + cornerPercolation.percolates());
}
/**
* Opens a site on the grid at location i,j where i is the row and j is the
* column The range is 1 to n for both i and j
*
* @param i
* row in the grid
* @param j
* column in the grid
*/
public void open(int i, int j) {
validateRange(1, gridState.length, i, j);
int index = rowColumnToIndex(i, j);
int right = index + 1;
int left = index - 1;
int top = index - gridSize;
int bottom = index + gridSize;
if (!isOpen(i, j)) {
gridState[i - 1][j - 1] = OPEN;
// open to right
if (insideGrid(i, j + 1) && isOpen(i, j + 1)) {
wqu.union(index, right);
}
// open to left
if (insideGrid(i, j - 1) && isOpen(i, j - 1)) {
wqu.union(index, left);
}
// open to top
if (insideGrid(i - 1, j) && isOpen(i - 1, j)) {
wqu.union(index, top);
}
// open to bottom
if (insideGrid(i + 1, j) && isOpen(i + 1, j)) {
wqu.union(index, bottom);
}
}
}
/**
* Test if a site is open on the grid at location {@code i}, {@code j} where i
* is the row and j is the column The range is {@code 1} to {@code n} for both
* {@code i} and {@code j}
*
* @param i
* row in the grid
* @param j
* column in the grid
*/
public boolean isOpen(int i, int j) {
validateRange(1, gridSize, i, j);
return gridState[i - 1][j - 1] == OPEN;
}
/**
* Test if a site is full on the grid at location i,j where i is the row and j
* is the column. The range is 1 to n for both i and j
*
* @param i
* row in the grid
* @param j
* column in the grid
*/
public boolean isFull(int i, int j) {
validateRange(1, gridSize, i, j);
return isOpen(i, j)
&& wqu.connected(virtualTop, rowColumnToIndex(i, j));
}
/**
* Method to test if virtual top node and virtual bottom node are connected.
* In case these are connected it means that the grid has a path between both
* node and percolates
*
* @return boolean {@code true} if the grid percolates, {@code false}
* otherwise
*/
public boolean percolates() {
if (gridSize == 1)
return isOpen(1, 1) && wqu.connected(virtualTop, virtualBottom);
return wqu.connected(virtualTop, virtualBottom);
}
/**
* Test if an arbitrary number of integers {@code indexes} are within the
* specified range of {@code min} and {@code max}
*
* @param min
* @param max
* @param indexes
*
*/
private void validateRange(int min, int max, int... indexes) {
for (int i : indexes) {
if (i < min || i > max)
throw new IndexOutOfBoundsException();
}
}
/**
* Validates if a list of integers is positive
*
* @param numbers
* arbitrary list of numbers to be validated
*
*/
private void validatePositive(int... numbers) {
for (int n : numbers) {
if (n <= 0)
throw new IllegalArgumentException();
}
}
/**
*
* @param i
* row in the grid
* @param j
* column in the grid
* @return {@code true} when {@code i, j} indexes refer to a position inside
* the grid, {@code false} otherwise
*/
private boolean insideGrid(int i, int j) {
int row = i - 1;
int col = j - 1;
return row >= 0 && col >= 0 && row < gridSize && col < gridSize;
}
/**
* Translates two dimensional coordinates into one dimensional
*
* @param row
* on the grid
* @param column
* on the grid
* @return index in quick union array representing the {@code i, j} position
* on the grid
*/
private int rowColumnToIndex(int row, int column) {
int i = row - 1;
int j = column - 1;
return i * gridSize + j;
}
}
| |
package edu.uweo.javaintro.tools;
// <pre>
// Copy this file in its entirety to a file named Vic.java
// Compile it before trying to compile any program that uses it
// This implementation uses ArrayLists for the sequences of CDs
// It also uses an initializer method to create the tableau before any Vics
import java.util.*;
import java.awt.*;
import javax.swing.*;
public class Vic
{
private static Stack<String> theStack =
new Stack<>(); // where spare CDs are kept
private SlotsList itsSequence; // its slots
private int itsPos; // 0,1,2,...; 0 is at front
private int itsID; // 1 for first Vic, 2 for...
// for initializing the stack with up to eight CDs.
static private String[] initCDs =
new String[]{ "LyleL", "GarthB", "Calexico", "MethenyP",
"FloydP", "CoreaC", "DiMeolaA", "ClarkeS"
};
// Number of cds to initialize the stack; -1 means
// initialize stack randomly.
static private int numInitCDs = -1;
/**
BLU
*/
/* QUERY METHODS */
/** Return the current position as a String value. */
public String getPosition()
{ return itsID + ", " + itsPos;
} //======================
/** Tell whether there is a slot at its position. */
public boolean seesSlot()
{ return itsPos < itsSequence.size();
} //======================
/** Tell whether there is a CD in its current slot. */
public boolean seesCD()
{ if (! seesSlot())
fail ("Can't see a CD where there is no slot!");
return itsSequence.get (itsPos) != null;
} //======================
/** Return the CD that is in its current slot. */
public String getCD()
{ if (! seesSlot())
fail ("There is no slot to get a CD from!");
return itsSequence.get (itsPos);
} //======================
/** Tell whether the stack has any CDs available. */
public static boolean stackHasCD()
{ return ! theStack.isEmpty();
} //======================
/* ACTION METHODS */
/** Move forward to the next slot in the sequence. */
public void moveOn()
{ if (! seesSlot())
fail ("Already at the end of the sequence!");
itsPos++;
trace ("moveOn to slot " + (itsPos + 1));
} //======================
/** Back up to the previous slot in the sequence. */
public void backUp()
{ if (itsPos == 0)
fail ("Already at the front of the sequence!");
itsPos--;
trace ("backUp to slot " + (itsPos + 1));
} //======================
/** Move a CD from the stack to the current slot. */
public void putCD()
{ if (! seesCD() && stackHasCD())
itsSequence.set (itsPos, theStack.pop());
trace ("putCD at slot " + (itsPos + 1));
} //======================
/** Move a CD from the current slot to the stack. */
public void takeCD()
{ if (seesCD())
{ theStack.push (itsSequence.get (itsPos));
itsSequence.set (itsPos, null);
}
trace ("takeCD at slot " + (itsPos + 1));
} //======================
/** Terminate the program with an appropriate message. */
private void fail (String cause)
{ JOptionPane.showMessageDialog (null, "STOPPING: " + cause
+ " (Vic #)" + itsID + ", position =" + itsPos);
System.exit (0);
} //======================
/** Two convenience methods */
public void shiftFromSlotToStack()
{ takeCD();
} //======================
public void shiftFromStackToSlot()
{ putCD();
} //======================
/* METHODS THAT USE THE FRAME */
private static String vicSay = "Programmable CD Organizer "
+ " mfd by Jones & Co.";
private static final VicFrame theFrame = new VicFrame();
//////////////////////////////////
public static void say (String message)
{ vicSay = message;
theFrame.repaint();
} //======================
/** Print a trace of the Vic's action. */
private void trace (String message)
{ System.out.println (message + " for Vic #" + itsID);
theFrame.repaint();
pause (500); // half-a-second between actions
} //======================
/** Pause for the specified number of milliseconds. */
private static void pause (int milliseconds)
{ try
{ Thread.sleep (milliseconds);
}
catch (InterruptedException e)
{ // never happens
}
} //======================
/* THE INITIALIZER AND CONSTRUCTOR METHODS */
private static final int MAXSLOTS = 8;
private static final int MINSLOTS = 3;
private static final int MAXVICS = 4;
private static final Random random = new Random();
private static int theMaxVics = random.nextInt (MAXVICS) + 1;
private static SlotsList[] theSeq = new SlotsList[theMaxVics];
private static int theNumVics = 0;
private static final Vic[] theVics = {null, null, null, null};
//////////////////////////////////
/** Initialize individual sequences and stacks. An initializer method
is used because these have to exist before any Vics are created. */
static
{ for (int k = 0; k < theMaxVics; k++)
{ theSeq[k] = new SlotsList();
int numSlots = random.nextInt (MAXSLOTS - MINSLOTS + 1)
+ MINSLOTS;
for (int i = 0; i < numSlots; i++)
{ String it = random.nextInt (2) == 0 ? null
: "" + (char) (i + 'a') + (k + 1);
theSeq[k].add (it);
}
}
// start with up to 2 CDs on the stack
if (random.nextInt (3) > 0) // 2 times out of 3
{ theStack.push ("GarthB");
if (random.nextInt (2) == 0) // 1 time out of 3
theStack.push ("LyleL");
}
} //======================
/** Construct a new Vic. */
public Vic()
{ super();
itsSequence = theNumVics < theMaxVics
? theSeq[theNumVics] : new SlotsList();
itsPos = 0;
itsID = theNumVics + 1;
theVics[theNumVics] = this;
theNumVics++;
trace ("construction");
} //======================
/** Replace random initialization of rows and stack with user-specified arrangement.
<p>
Parameter <tt>args</tt> is an array of up to five strings:
A string beginning with a pound sign (#) must be followed by
a valid integer between 0 and 8, inclusive, and indicates
the number of CDs initially placed on the stack. A number
greater than 8 will be forced to 8; a number less than 0,
or an invalid integer will be ignored.
<p>
There may be up to four additional strings consisting of
0 to 8 ones and zeros. The number of strings indicates the
number of rows to be created; if there are more than four
strings the extraneous strings will be ignored. Within each
of the strings there may be 0 to eight ones and zeros. The
number of characters indicates the number of slots to create.
A zero indicates that the slot will be empty; a one indicates
that the slot will be occupied. If a string consists of more
than 8 characters, the extraneous characters will be ignored;
a character other than zero or one will be treated as a one.
<p>
Examples:
<blockquote>
Create one row of four empty slots:
<pre> "0000"</pre>
Create one row of four empty slots,
and one row of seven slots, alternating between
filled and empty:
<pre> "0000" "0101010"</pre>
Create two rows of three filled slots each, and
a stack with an initial size of four:
<pre> "111" "111" "#4"</pre>
</blockquote>
@author Jack Straub
@param args An array of strings that specify the
configuration of rows and stack.
*/
public static void reset( String[] args )
{
Vector<String> vec = new Vector<String>();
for ( int inx = 0 ; inx < args.length ; ++inx )
{
if ( args[inx].length() < 2 || args[inx].charAt( 0 ) != '#' )
vec.add( args[inx] );
else
{
try
{
int num = Integer.parseInt( args[inx].substring( 1 ) );
numInitCDs = num;
}
catch ( NumberFormatException exc )
{
// if number after # is invalid, just toss the
// string into the vector and let life go on as
// it used to.
vec.add( args[inx] );
}
}
}
String[] newArgs = new String[vec.size()];
vec.toArray( newArgs );
reset1( newArgs );
}
private static void reset1 (String[] args)
{ if (args.length > 0 && theNumVics == 0)
{ theMaxVics = Math.min (args.length, MAXVICS);
theSeq = new SlotsList[theMaxVics];
for (int k = 0; k < theMaxVics; k++)
{ theSeq[k] = new SlotsList();
int longest = Math.min (args[k].length(), MAXSLOTS);
for (int i = 0; i < longest; i++)
{ String it = args[k].charAt (i) == '0' ? null
: "" + (char)(i + 'a') + (k + 1);
theSeq[k].add (it);
}
}
}
theStack = new Stack<String>();
int num = numInitCDs > -1 ? numInitCDs: random.nextInt( 3 );
if ( num > initCDs.length )
num = initCDs.length;
for ( int inx = 0 ; inx < num ; ++inx )
theStack.push( initCDs[inx] );
} //======================
// THE NESTED FRAME CLASS
static class VicFrame extends JFrame
{
private static final long serialVersionUID = 0x10L;
private final int SLOT = 75; // between CD slots
private final int EDGE = 10; // leave free at left side
private final int WIDTH = (MAXSLOTS + 2) * SLOT + 2 * EDGE;
private final int DIST = 60; // between CD sequences
private final int SAY = 45; // depth of say's output
private final int TOPSEQ = SAY + DIST; // depth of first seq
public VicFrame()
{ addWindowListener (new Closer());
setSize (WIDTH, TOPSEQ + MAXVICS * DIST + 2 * EDGE);
setBackground (new Color (255, 252, 224)); // a nice cream
setVisible (true); // make it visible to user
} //======================
/** Same as for an applet; called by repaint. */
public void paint (Graphics page)
{ // PRINT THE vicSay MESSAGE AT THE TOP
page.setColor (getBackground());
page.fillRect (EDGE, EDGE, WIDTH - 2 * EDGE,
TOPSEQ + MAXVICS * DIST);
page.setColor (Color.white);
page.fillRect (20, SAY - 20, WIDTH - 40, 20);
page.setColor (new Color (0, 96, 0)); // a light green
page.drawString (vicSay, 25, SAY - 5); // message
// DRAW UP TO FOUR Vic SEQUENCES AND THE STACK
for (int k = 0; k < theMaxVics; k++)
drawSequence (page, k, TOPSEQ + k * DIST);
page.setColor (Color.red);
int y = TOPSEQ + MAXVICS * DIST;
page.drawString ("stack", EDGE, y);
page.fillRect (EDGE, y - 25, 40, 5); // dividing line
for (int k = 0; k < theStack.size(); k++)
page.drawString (theStack.get (k),
EDGE, y - 30 - k * 20);
} //======================
/** Called by VicFrame's paint method. */
private void drawSequence (Graphics page, int index, int y)
{ page.setColor (Color.red);
if (theVics[index] != null)
drawMacMan (page, theVics[index].itsPos, y - 15);
page.setColor (Color.blue);
drawAllCDs (page, y, theSeq[index]);
} //======================
private void drawAllCDs (Graphics page, int y,
SlotsList slots)
{ int atEnd = slots.size();
for (int n = 0; n < atEnd; n++)
{ String it = slots.get (n);
page.drawString (it == null ? "---" : it,
(n + 1) * SLOT + EDGE, y);
}
page.drawString ("END", (atEnd + 1) * SLOT + EDGE, y);
} //======================
private void drawMacMan (Graphics page, int pos, int y)
{ // <x, y> is the lower-left corner of the stick figure
int x = pos * SLOT + EDGE + 78;
page.setColor (Color.black);
page.drawLine (x, y, x + 6, y - 6); // leg
page.drawLine (x + 6, y - 6, x + 12, y); // leg
page.drawLine (x + 6, y - 6, x + 6, y - 18); // body
page.drawLine (x, y - 14, x + 12, y - 14); // arms
page.drawOval (x + 1, y - 28, 10, 10); // head
page.drawLine (x + 4, y - 25, x + 5, y - 25); // eye
page.drawLine (x + 7, y - 25, x + 8, y - 25); // eye
page.drawLine (x + 3, y - 22, x + 9, y - 22); // mouth
} //======================
} // end of VicFrame class
private static class SlotsList
{
ArrayList<String> slots = new ArrayList<String>();
public void add( String str )
{
slots.add( str );
}
public String get( int inx )
{
return slots.get( inx );
}
public int size()
{
return slots.size();
}
public void set( int inx, String str )
{
slots.set( inx, str );
}
}
private static class Closer extends java.awt.event.WindowAdapter
{
public void windowClosing (java.awt.event.WindowEvent e)
{ System.exit (0);
} //======================
}
}
// </pre>
| |
// Utils.java
//
// (c) 1999-2001 PAL Development Core Team
//
// This package may be distributed under the
// terms of the Lesser GNU General Public License (LGPL)
package jebl.util;
/**
* Provides some miscellaneous methods.
*
* @author Matthew Goode
* @version $Id: Utils.java 264 2006-03-20 17:59:22Z pepster $
*/
public class Utils {
/**
* Test if a string occurs within a set
*
* @param set the set of strings
* @param query the query string
* @return true if the query string is in the set (as determined by object equality)
*/
public static boolean isContains(String[] set, String query) {
for (String string : set) {
if (query.equals(string)) {
return true;
}
}
return false;
}
/**
* Clones an array of doubles
*
* @return null if input is null, otherwise return complete copy.
*/
public static double[] getCopy(double[] array) {
if (array == null) {
return null;
}
double[] copy = new double[array.length];
System.arraycopy(array, 0, copy, 0, array.length);
return copy;
}
/**
* Calculate the total of an array
*
* @param array The array to sum up
* @return the sum of all the elements
*/
public static double getSum(double[] array) {
double total = 0;
for (double a : array) {
total += a;
}
return total;
}
/**
* Calculate the max of an array
*
* @param array The array to check
* @return the max of all the elements
*/
public static double getMax(double[] array) {
return getMax(array, 0, array.length);
}
/**
* Calculate the max of an array
*
* @param array The array to check
* @param start the first index to check
* @param end the index after the last index to check
* @return the max of all the elements
*/
public static double getMax(double[] array, int start, int end) {
double max = Double.NEGATIVE_INFINITY;
for (int i = start; i < end; i++) {
final double v = array[i + start];
if (v > max) {
max = v;
}
}
return max;
}
/**
* Calculate the min of an array
*
* @param array The array to check
* @return the min of all the elements
*/
public static double getMin(double[] array) {
double min = Double.POSITIVE_INFINITY;
for (double v : array) {
if (v < min) {
min = v;
}
}
return min;
}
/**
* Calculate the mean value of an array
*
* @param array the values
* @return the average
*/
public static double getMean(double[] array) {
return getSum(array) / array.length;
}
/**
* Clones an array of doubles from index start (inclusive) to index end (exclusive)
*
* @return null if input is null
*/
public static double[] getCopy(double[] array, int start, int end) {
if (array == null) {
return null;
}
double[] copy = new double[end - start];
System.arraycopy(array, start, copy, 0, copy.length);
return copy;
}
/**
* Clones an array of doubles from index start (inclusive) to end
*
* @return null if input is null
*/
public static double[] getCopy(double[] array, int start) {
return getCopy(array, start, array.length);
}
/**
* Clones an array of bytes
*
* @param array the bytes to copy
* @return null if input is null, otherwise return complete copy.
*/
public static byte[] getCopy(byte[] array) {
if (array == null) {
return null;
}
byte[] copy = new byte[array.length];
System.arraycopy(array, 0, copy, 0, array.length);
return copy;
}
/**
* Clones an array of Strings
*
* @param array the strings to copy
* @return null if input is null, otherwise return complete copy.
*/
public static String[] getCopy(String[] array) {
if (array == null) {
return null;
}
String[] copy = new String[array.length];
System.arraycopy(array, 0, copy, 0, array.length);
return copy;
}
/**
* Clones an array of doubles
*
* @return null if input is null, otherwise return complete copy.
*/
public static double[][] getCopy(double[][] array) {
if (array == null) {
return null;
}
double[][] copy = new double[array.length][];
for (int i = 0; i < copy.length; i++) {
copy[i] = new double[array[i].length];
System.arraycopy(array[i], 0, copy[i], 0, array[i].length);
}
return copy;
}
/**
* Clones a matrix of ints
*
* @param matrix the matrix to clone
* @return null if input is null, otherwise return complete copy.
*/
public static int[][] getCopy(int[][] matrix) {
if (matrix == null) {
return null;
}
int[][] copy = new int[matrix.length][];
for (int i = 0; i < copy.length; i++) {
copy[i] = new int[matrix[i].length];
System.arraycopy(matrix[i], 0, copy[i], 0, matrix[i].length);
}
return copy;
}
/**
* Clones an array of doubles
*
* @return null if input is null, otherwise return complete copy.
*/
public static double[][][] getCopy(double[][][] array) {
if (array == null) {
return null;
}
double[][][] copy = new double[array.length][][];
for (int i = 0; i < copy.length; i++) {
copy[i] = getCopy(array[i]);
}
return copy;
}
/**
* Clones an array of bytes
*
* @return null if input is null, otherwise return complete copy.
*/
public static byte[][] getCopy(byte[][] array) {
if (array == null) {
return null;
}
byte[][] copy = new byte[array.length][];
for (int i = 0; i < copy.length; i++) {
copy[i] = new byte[array[i].length];
System.arraycopy(array[i], 0, copy[i], 0, array[i].length);
}
return copy;
}
/**
* Clones an array of booleans
*
* @return null if input is null, otherwise return complete copy.
*/
public static boolean[][] getCopy(boolean[][] array) {
if (array == null) {
return null;
}
boolean[][] copy = new boolean[array.length][];
for (int i = 0; i < copy.length; i++) {
copy[i] = new boolean[array[i].length];
System.arraycopy(array[i], 0, copy[i], 0, array[i].length);
}
return copy;
}
/**
* Clones an array of ints
*
* @return null if input is null, otherwise return complete copy.
*/
public static int[] getCopy(int[] array) {
if (array == null) {
return null;
}
int[] copy = new int[array.length];
System.arraycopy(array, 0, copy, 0, array.length);
return copy;
}
/**
* Clones an array of ints
*
* @return null if input is null, otherwise return complete copy.
*/
public static int[] getCopy(int[] array, int startingIndex) {
if (array == null) {
return null;
}
int[] copy = new int[array.length - startingIndex];
System.arraycopy(array, startingIndex, copy, 0, array.length - startingIndex);
return copy;
}
/**
* Copies all of source into dest - assumes dest to be large enough
*/
public static void copy(double[][] source, double[][] dest) {
for (int i = 0; i < source.length; i++) {
System.arraycopy(source[i], 0, dest[i], 0, source[i].length);
}
}
/**
* A simple toString method for an array of doubles.
* No fancy formating.
* Puts spaces between each value
*
* @param number number of elements to process starting from first element
*/
public static String toString(double[] array, int number) {
StringBuilder sb = new StringBuilder(array.length * 7);
for (int i = 0; i < number; i++) {
sb.append(array[i]);
sb.append(' ');
}
return sb.toString();
}
/**
* A simple toString method for an array of objects.
* No fancy formating.
* Puts spaces between each value
*
* @param number number of elements to process starting from first element
*/
public static String toString(Object[] array, int number) {
StringBuilder sb = new StringBuilder(array.length * 7);
for (int i = 0; i < number; i++) {
sb.append(array[i]);
sb.append(' ');
}
return sb.toString();
}
/**
* A simple toString method for an array of objects.
* No fancy formating.
* Puts user defined string between each value
*/
public static String toString(Object[] array, String divider) {
return toString(array, divider, array.length);
}
/**
* A simple toString method for an array of objects.
* No fancy formating.
* Puts user defined string between each value
*
* @param number number of elements to process starting from first element
*/
public static String toString(Object[] array, String divider, int number) {
StringBuilder sb = new StringBuilder(array.length * 7);
for (int i = 0; i < number; i++) {
sb.append(array[i]);
if (i != number - 1) {
sb.append(divider);
}
}
return sb.toString();
}
/**
* A simple toString method for an array of doubles.
* No fancy formating.
* Puts spaces between each value
*/
public static String toString(Object[] array) {
return toString(array, array.length);
}
/**
* A simple toString method for an array of doubles.
* No fancy formating.
* Puts spaces between each value
*/
public static String toString(double[] array) {
return toString(array, array.length);
}
/**
* A simple toString method for an array of ints.
* No fancy formating.
* Puts spaces between each value
*/
public static String toString(int[] array) {
return toString(array, array.length);
}
public static String toString(int[] array, int number) {
StringBuilder sb = new StringBuilder(array.length * 7);
for (int i = 0; i < number; i++) {
sb.append(array[i]);
sb.append(' ');
}
return sb.toString();
}
/**
* A simple toString method for an array of doubles.
* No fancy formating.
* Puts spaces between each value
*/
public static String toString(double[][] array) {
String ss = "";
for (int i = 0; i < array.length; i++) {
ss += i + ":" + toString(array[i]) + '\n';
}
return ss;
}
/**
* A simple toString method for an array of ints.
* No fancy formating.
* Puts spaces between each value
*/
public static String toString(int[][] array) {
String ss = "";
for (int i = 0; i < array.length; i++) {
ss += i + ":" + toString(array[i]) + '\n';
}
return ss;
}
/**
* Find the maximum "argument". if array is zero length returns -1
*
* @param array The array to examine
* @return the element of the array with the maximum value
*/
public static int getArgmax(int[] array) {
if (array.length == 0) {
return -1;
}
int maxValue = array[0];
int maxIndex = 0;
for (int i = 1; i < array.length; i++) {
final int v = array[i];
if (v > maxValue) {
maxValue = v;
maxIndex = i;
}
}
return maxIndex;
}
/**
* Find the maximum "argument" (of a double array). if array is zero length returns -1
* @param array The array to examine
* @return the element of the array with the maximum value
*/
public static int getArgmax(double[] array) {
if (array.length == 0) {
return -1;
}
double maxValue = array[0];
int maxIndex = 0;
for (int i = 1; i < array.length; i++) {
final double v = array[i];
if (v > maxValue) {
maxValue = v;
maxIndex = i;
}
}
return maxIndex;
}
}
| |
/*
* Copyright (c) 2008-2018, Hazelcast, Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hazelcast.osgi.impl;
import com.hazelcast.logging.ILogger;
import com.hazelcast.logging.Logger;
import com.hazelcast.nio.ClassLoaderUtil;
import com.hazelcast.nio.IOUtil;
import org.osgi.framework.Bundle;
import org.osgi.framework.BundleContext;
import javax.script.Bindings;
import javax.script.ScriptEngine;
import javax.script.ScriptEngineFactory;
import javax.script.ScriptEngineManager;
import javax.script.SimpleBindings;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.net.URL;
import java.util.ArrayList;
import java.util.Enumeration;
import java.util.List;
/*
Imported from Apache Felix project.
http://svn.apache.org/repos/asf/felix/trunk/mishell/src/main/java/org/apache/felix/mishell/OSGiScriptEngineManager.java
*/
/**
* This class acts as a delegate for all the available ScriptEngineManagers. Unluckily, the standard did not
* define it as an interface, so we need to extend it to allow polymorphism. However, no calls to super are used.
* It wraps all available ScriptEngineManagers in the OSGi ServicePlatform into a merged ScriptEngineManager.
* <p/>
* Internally, what this class does is creating ScriptEngineManagers for each bundle
* that contains a ScriptEngineFactory and includes a META-INF/services/javax.script.ScriptEngineFactory file.
* It assumes that the file contains a list of {@link ScriptEngineFactory} classes. For each bundle, it creates a
* ScriptEngineManager, then merges them. {@link ScriptEngineFactory} objects are wrapped
* into @link OSGiScriptEngineFactory objects to deal with problems of context class loader:
* Those scripting engines that rely on the ContextClassloader for finding resources need to use this wrapper
* and the @link OSGiScriptFactory. Mainly, jruby does.
* <p/>
* Note that even if no context classloader issues arose, it would still be needed to search manually for the
* factories and either use them directly (losing the mimeType/extension/shortName mechanisms for finding engines
* or manually registering them) or still use this class, which would be smarter. In the latter case,
* it would only be needed to remove the hack that temporarily sets the context classloader to the appropriate,
* bundle-related, class loader.
* <p/>
* Caveats:
* <ul><li>
* All factories are wrapped with an {@link OSGiScriptEngineFactory}. As Engines are not wrapped,
* calls like
* <code>
* ScriptEngineManager osgiManager=new OSGiScriptEngineManager(context);<br>
* ScriptEngine engine=osgiManager.getEngineByName("ruby");
* ScriptEngineFactory factory=engine.getFactory() //this does not return the OSGiFactory wrapper
* factory.getScriptEngine(); //this might fail, as it does not use OSGiScriptEngineFactory wrapper
* </code>
* might result in unexpected errors. Future versions may wrap the ScriptEngine with a OSGiScriptEngine to solve this
* issue, but for the moment it is not needed.
* </li></ul>
*/
public class OSGiScriptEngineManager extends ScriptEngineManager {
private static final String RHINO_SCRIPT_ENGINE_FACTORY = "com.sun.script.javascript.RhinoScriptEngineFactory";
private static final String NASHORN_SCRIPT_ENGINE_FACTORY = "jdk.nashorn.api.scripting.NashornScriptEngineFactory";
private final ILogger logger = Logger.getLogger(getClass());
private Bindings bindings;
private List<ScriptEngineManagerInfo> scriptEngineManagerInfoList;
private BundleContext context;
public OSGiScriptEngineManager(BundleContext context) {
this.context = context;
bindings = new SimpleBindings();
this.scriptEngineManagerInfoList = findManagers(context);
}
private static final class ScriptEngineManagerInfo {
private final ScriptEngineManager scriptEngineManager;
private final ClassLoader classloader;
private ScriptEngineManagerInfo(ScriptEngineManager scriptEngineManager, ClassLoader classloader) {
this.scriptEngineManager = scriptEngineManager;
this.classloader = classloader;
}
}
/**
* This method is the only one that is visible and not part of the ScriptEngineManager class.
* Its purpose is to find new managers that weren't available before, but keeping the globalScope bindings
* set.
* If you want to clean the bindings you can either get a fresh instance of OSGiScriptManager or
* setting up a new bindings object.
* This can be done with:
* <code>
* ScriptEngineManager manager=new OSGiScriptEngineManager(context);
* (...)//do stuff
* osgiManager=(OSGiScriptEngineManager)manager;//cast to ease reading
* osgiManager.reloadManagers();
* <p/>
* manager.setBindings(new OSGiBindings());//or you can use your own bindings implementation
* <p/>
* </code>
*/
public void reloadManagers() {
this.scriptEngineManagerInfoList = findManagers(context);
}
@Override
public Object get(String key) {
return bindings.get(key);
}
@Override
public Bindings getBindings() {
return bindings;
}
/**
* Follows the same behavior of @link javax.script.ScriptEngineManager#setBindings(Bindings)
* This means that the same bindings are applied to all the underlying managers.
*
* @param bindings
*/
@Override
public void setBindings(Bindings bindings) {
this.bindings = bindings;
for (ScriptEngineManagerInfo info : scriptEngineManagerInfoList) {
info.scriptEngineManager.setBindings(bindings);
}
}
@Override
public ScriptEngine getEngineByExtension(String extension) {
// TODO this is a hack to deal with context classloader issues
ScriptEngine engine = null;
for (ScriptEngineManagerInfo info : scriptEngineManagerInfoList) {
Thread currentThread = Thread.currentThread();
ClassLoader old = currentThread.getContextClassLoader();
currentThread.setContextClassLoader(info.classloader);
engine = info.scriptEngineManager.getEngineByExtension(extension);
currentThread.setContextClassLoader(old);
if (engine != null) {
break;
}
}
return engine;
}
@Override
public ScriptEngine getEngineByMimeType(String mimeType) {
// TODO this is a hack to deal with context classloader issues
ScriptEngine engine = null;
for (ScriptEngineManagerInfo info : scriptEngineManagerInfoList) {
Thread currentThread = Thread.currentThread();
ClassLoader old = currentThread.getContextClassLoader();
currentThread.setContextClassLoader(info.classloader);
engine = info.scriptEngineManager.getEngineByMimeType(mimeType);
currentThread.setContextClassLoader(old);
if (engine != null) {
break;
}
}
return engine;
}
@Override
public ScriptEngine getEngineByName(String shortName) {
// TODO this is a hack to deal with context classloader issues
for (ScriptEngineManagerInfo info : scriptEngineManagerInfoList) {
Thread currentThread = Thread.currentThread();
ClassLoader old = currentThread.getContextClassLoader();
ClassLoader contextClassLoader = info.classloader;
currentThread.setContextClassLoader(contextClassLoader);
ScriptEngine engine = info.scriptEngineManager.getEngineByName(shortName);
currentThread.setContextClassLoader(old);
if (engine != null) {
OSGiScriptEngineFactory factory = new OSGiScriptEngineFactory(engine.getFactory(), contextClassLoader);
return new OSGiScriptEngine(engine, factory);
}
}
return null;
}
@Override
public List<ScriptEngineFactory> getEngineFactories() {
List<ScriptEngineFactory> osgiFactories = new ArrayList<ScriptEngineFactory>();
for (ScriptEngineManagerInfo info : scriptEngineManagerInfoList) {
for (ScriptEngineFactory factory : info.scriptEngineManager.getEngineFactories()) {
OSGiScriptEngineFactory scriptEngineFactory = new OSGiScriptEngineFactory(factory, info.classloader);
osgiFactories.add(scriptEngineFactory);
}
}
return osgiFactories;
}
@Override
public void put(String key, Object value) {
bindings.put(key, value);
}
@Override
public void registerEngineExtension(String extension, ScriptEngineFactory factory) {
for (ScriptEngineManagerInfo info : scriptEngineManagerInfoList) {
info.scriptEngineManager.registerEngineExtension(extension, factory);
}
}
@Override
public void registerEngineMimeType(String type, ScriptEngineFactory factory) {
for (ScriptEngineManagerInfo info : scriptEngineManagerInfoList) {
info.scriptEngineManager.registerEngineMimeType(type, factory);
}
}
@Override
public void registerEngineName(String name, ScriptEngineFactory factory) {
for (ScriptEngineManagerInfo info : scriptEngineManagerInfoList) {
info.scriptEngineManager.registerEngineName(name, factory);
}
}
private List<ScriptEngineManagerInfo> findManagers(BundleContext context) {
List<ScriptEngineManagerInfo> scriptEngineManagerInfos = new ArrayList<ScriptEngineManagerInfo>();
try {
for (String factoryName : findFactoryCandidates(context)) {
ClassLoader factoryClassLoader = loadScriptEngineFactoryClassLoader(factoryName);
if (factoryClassLoader == null) {
continue;
}
ScriptEngineManagerInfo scriptEngineManagerInfo =
createScriptEngineManagerInfo(factoryName, factoryClassLoader);
if (scriptEngineManagerInfo != null) {
scriptEngineManagerInfos.add(scriptEngineManagerInfo);
}
}
return scriptEngineManagerInfos;
} catch (IOException ioe) {
throw new RuntimeException(ioe);
}
}
private ClassLoader loadScriptEngineFactoryClassLoader(String factoryName) {
// We do not really need the class, but we need the classloader
try {
return ClassLoaderUtil.tryLoadClass(factoryName).getClassLoader();
} catch (ClassNotFoundException cnfe) {
// may fail if script implementation is not in environment
logger.warning("Found ScriptEngineFactory candidate for "
+ factoryName + ", but cannot load class! -> " + cnfe);
if (logger.isFinestEnabled()) {
logger.finest(cnfe);
}
return null;
}
}
private ScriptEngineManagerInfo createScriptEngineManagerInfo(String factoryName, ClassLoader factoryLoader) {
try {
ScriptEngineManager manager = new ScriptEngineManager(factoryLoader);
manager.setBindings(bindings);
return new ScriptEngineManagerInfo(manager, factoryLoader);
} catch (Exception e) {
// May fail if script implementation is not in environment
logger.warning("Found ScriptEngineFactory candidate for " + factoryName
+ ", but could not load ScripEngineManager! -> " + e);
if (logger.isFinestEnabled()) {
logger.finest(e);
}
return null;
}
}
/**
* Iterates through all bundles to get the available {@link ScriptEngineFactory} classes
*
* @return the names of the available ScriptEngineFactory classes
* @throws IOException
*/
private List<String> findFactoryCandidates(BundleContext context) throws IOException {
Bundle[] bundles = context.getBundles();
List<String> factoryCandidates = new ArrayList<String>();
for (Bundle bundle : bundles) {
if (bundle == null) {
continue;
}
if ("system.bundle".equals(bundle.getSymbolicName())) {
continue;
}
Enumeration urls = bundle.findEntries("META-INF/services", "javax.script.ScriptEngineFactory", false);
if (urls == null) {
continue;
}
while (urls.hasMoreElements()) {
URL u = (URL) urls.nextElement();
BufferedReader reader = null;
try {
reader = new BufferedReader(
new InputStreamReader(u.openStream(), "UTF-8"));
String line;
while ((line = reader.readLine()) != null) {
line = line.trim();
if (!line.startsWith("#") && line.length() > 0) {
factoryCandidates.add(line);
}
}
} finally {
IOUtil.closeResource(reader);
}
}
}
// Add java built in JavaScript ScriptEngineFactory's
addJavaScriptEngine(factoryCandidates);
return factoryCandidates;
}
/**
* Adds the JDK build-in JavaScript engine into the given list of scripting engine factories.
*
* @param factoryCandidates List of scripting engine factories
*/
private void addJavaScriptEngine(List<String> factoryCandidates) {
// Add default script engine manager
factoryCandidates.add(OSGiScriptEngineFactory.class.getName());
// Rhino is available in java < 8, Nashorn is available in java >= 8
if (ClassLoaderUtil.isClassDefined(RHINO_SCRIPT_ENGINE_FACTORY)) {
factoryCandidates.add(RHINO_SCRIPT_ENGINE_FACTORY);
} else if (ClassLoaderUtil.isClassDefined(NASHORN_SCRIPT_ENGINE_FACTORY)) {
factoryCandidates.add(NASHORN_SCRIPT_ENGINE_FACTORY);
} else {
logger.warning("No built-in JavaScript ScriptEngineFactory found.");
}
}
public String printScriptEngines() {
StringBuilder msg = new StringBuilder("Available script engines are:\n");
for (ScriptEngineFactory scriptEngineFactory : getEngineFactories()) {
msg.append("\t- ").append(scriptEngineFactory.getEngineName()).append('\n');
}
return msg.toString();
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership. The ASF licenses this file to you under the Apache License, Version
* 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package org.apache.storm.utils;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import org.apache.storm.shade.org.apache.commons.io.FileUtils;
public class VersionedStore {
private static final String FINISHED_VERSION_SUFFIX = ".version";
private String _root;
/**
* Creates a store at the given path.
*
* @param The path for the store
* @param createRootDir option to create the path directory
*/
public VersionedStore(String path, boolean createRootDir) throws IOException {
_root = path;
if (createRootDir) {
mkdirs(_root);
}
}
public String getRoot() {
return _root;
}
public String versionPath(long version) {
return new File(_root, "" + version).getAbsolutePath();
}
public String mostRecentVersionPath() throws IOException {
Long v = mostRecentVersion();
if (v == null) {
return null;
}
return versionPath(v);
}
public String mostRecentVersionPath(long maxVersion) throws IOException {
Long v = mostRecentVersion(maxVersion);
if (v == null) {
return null;
}
return versionPath(v);
}
public Long mostRecentVersion() throws IOException {
List<Long> all = getAllVersions();
if (all.size() == 0) {
return null;
}
return all.get(0);
}
public Long mostRecentVersion(long maxVersion) throws IOException {
List<Long> all = getAllVersions();
for (Long v : all) {
if (v <= maxVersion) {
return v;
}
}
return null;
}
public String createVersion() throws IOException {
Long mostRecent = mostRecentVersion();
long version = Time.currentTimeMillis();
if (mostRecent != null && version <= mostRecent) {
version = mostRecent + 1;
}
return createVersion(version);
}
public String createVersion(long version) throws IOException {
String ret = versionPath(version);
if (getAllVersions().contains(version)) {
throw new RuntimeException("Version already exists or data already exists");
} else {
return ret;
}
}
public void failVersion(String path) throws IOException {
deleteVersion(validateAndGetVersion(path));
}
public void deleteVersion(long version) throws IOException {
File versionFile = new File(versionPath(version));
File tokenFile = new File(tokenPath(version));
if (tokenFile.exists()) {
FileUtils.forceDelete(tokenFile);
}
if (versionFile.exists()) {
FileUtils.forceDelete(versionFile);
}
}
public void succeedVersion(String path) throws IOException {
long version = validateAndGetVersion(path);
// should rewrite this to do a file move
createNewFile(tokenPath(version));
}
public void cleanup() throws IOException {
cleanup(-1);
}
public void cleanup(int versionsToKeep) throws IOException {
List<Long> versions = getAllVersions();
if (versionsToKeep >= 0) {
versions = versions.subList(0, Math.min(versions.size(), versionsToKeep));
}
HashSet<Long> keepers = new HashSet<Long>(versions);
for (String p : listDir(_root)) {
Long v = parseVersion(p);
if (v != null && !keepers.contains(v)) {
deleteVersion(v);
}
}
}
/**
* Sorted from most recent to oldest.
*/
public List<Long> getAllVersions() throws IOException {
List<Long> ret = new ArrayList<Long>();
for (String s : listDir(_root)) {
if (s.endsWith(FINISHED_VERSION_SUFFIX) && new File(s.substring(0, s.length() - FINISHED_VERSION_SUFFIX.length())).exists()) {
ret.add(validateAndGetVersion(s));
}
}
Collections.sort(ret);
Collections.reverse(ret);
return ret;
}
private String tokenPath(long version) {
return new File(_root, "" + version + FINISHED_VERSION_SUFFIX).getAbsolutePath();
}
private long validateAndGetVersion(String path) {
Long v = parseVersion(path);
if (v == null) {
throw new RuntimeException(path + " is not a valid version");
}
return v;
}
private Long parseVersion(String path) {
String name = new File(path).getName();
if (name.endsWith(FINISHED_VERSION_SUFFIX)) {
name = name.substring(0, name.length() - FINISHED_VERSION_SUFFIX.length());
}
try {
return Long.parseLong(name);
} catch (NumberFormatException e) {
return null;
}
}
private void createNewFile(String path) throws IOException {
new File(path).createNewFile();
}
private void mkdirs(String path) throws IOException {
new File(path).mkdirs();
}
private List<String> listDir(String dir) throws IOException {
List<String> ret = new ArrayList<String>();
File[] contents = new File(dir).listFiles();
if (contents != null) {
for (File f : contents) {
ret.add(f.getAbsolutePath());
}
}
return ret;
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.yarn.server.resourcemanager.recovery;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import static org.mockito.Mockito.mock;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.ha.HAServiceProtocol;
import org.apache.hadoop.ha.HAServiceProtocol.StateChangeRequestInfo;
import org.apache.hadoop.service.Service;
import org.apache.hadoop.yarn.api.protocolrecords.SubmitApplicationRequest;
import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext;
import org.apache.hadoop.yarn.api.records.ContainerLaunchContext;
import org.apache.hadoop.yarn.api.records.Priority;
import org.apache.hadoop.yarn.api.records.Resource;
import org.apache.hadoop.yarn.conf.HAUtil;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.server.resourcemanager.ClientRMService;
import org.apache.hadoop.yarn.server.resourcemanager.ResourceManager;
import org.apache.hadoop.yarn.server.resourcemanager.recovery.records.RMStateVersion;
import org.apache.hadoop.yarn.server.resourcemanager.recovery.records.impl.pb.RMStateVersionPBImpl;
import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMApp;
import org.apache.zookeeper.ZooKeeper;
import org.apache.zookeeper.data.Stat;
import org.junit.Test;
public class TestZKRMStateStore extends RMStateStoreTestBase {
public static final Log LOG = LogFactory.getLog(TestZKRMStateStore.class);
class TestZKRMStateStoreTester implements RMStateStoreHelper {
ZooKeeper client;
TestZKRMStateStoreInternal store;
String workingZnode;
class TestZKRMStateStoreInternal extends ZKRMStateStore {
public TestZKRMStateStoreInternal(Configuration conf, String workingZnode)
throws Exception {
init(conf);
start();
assertTrue(znodeWorkingPath.equals(workingZnode));
}
@Override
public ZooKeeper getNewZooKeeper() throws IOException {
return client;
}
public String getVersionNode() {
return znodeWorkingPath + "/" + ROOT_ZNODE_NAME + "/" + VERSION_NODE;
}
public RMStateVersion getCurrentVersion() {
return CURRENT_VERSION_INFO;
}
public String getAppNode(String appId) {
return workingZnode + "/" + ROOT_ZNODE_NAME + "/" + RM_APP_ROOT + "/"
+ appId;
}
}
public RMStateStore getRMStateStore() throws Exception {
YarnConfiguration conf = new YarnConfiguration();
workingZnode = "/Test";
conf.set(YarnConfiguration.RM_ZK_ADDRESS, hostPort);
conf.set(YarnConfiguration.ZK_RM_STATE_STORE_PARENT_PATH, workingZnode);
this.client = createClient();
this.store = new TestZKRMStateStoreInternal(conf, workingZnode);
return this.store;
}
@Override
public boolean isFinalStateValid() throws Exception {
List<String> nodes = client.getChildren(store.znodeWorkingPath, false);
return nodes.size() == 1;
}
@Override
public void writeVersion(RMStateVersion version) throws Exception {
client.setData(store.getVersionNode(), ((RMStateVersionPBImpl) version)
.getProto().toByteArray(), -1);
}
@Override
public RMStateVersion getCurrentVersion() throws Exception {
return store.getCurrentVersion();
}
public boolean appExists(RMApp app) throws Exception {
Stat node =
client.exists(store.getAppNode(app.getApplicationId().toString()),
false);
return node !=null;
}
}
@Test (timeout = 60000)
public void testZKRMStateStoreRealZK() throws Exception {
TestZKRMStateStoreTester zkTester = new TestZKRMStateStoreTester();
testRMAppStateStore(zkTester);
testRMDTSecretManagerStateStore(zkTester);
testCheckVersion(zkTester);
testAppDeletion(zkTester);
}
private Configuration createHARMConf(
String rmIds, String rmId, int adminPort) {
Configuration conf = new YarnConfiguration();
conf.setBoolean(YarnConfiguration.RM_HA_ENABLED, true);
conf.set(YarnConfiguration.RM_HA_IDS, rmIds);
conf.setBoolean(YarnConfiguration.RECOVERY_ENABLED, true);
conf.set(YarnConfiguration.RM_STORE, ZKRMStateStore.class.getName());
conf.set(YarnConfiguration.RM_ZK_ADDRESS, hostPort);
conf.set(YarnConfiguration.RM_HA_ID, rmId);
for (String rpcAddress : YarnConfiguration.RM_SERVICES_ADDRESS_CONF_KEYS) {
for (String id : HAUtil.getRMHAIds(conf)) {
conf.set(HAUtil.addSuffix(rpcAddress, id), "localhost:0");
}
}
conf.set(HAUtil.addSuffix(YarnConfiguration.RM_ADMIN_ADDRESS, rmId),
"localhost:" + adminPort);
return conf;
}
@SuppressWarnings("unchecked")
@Test
public void testFencing() throws Exception {
StateChangeRequestInfo req = new StateChangeRequestInfo(
HAServiceProtocol.RequestSource.REQUEST_BY_USER);
Configuration conf1 = createHARMConf("rm1,rm2", "rm1", 1234);
ResourceManager rm1 = new ResourceManager();
rm1.init(conf1);
rm1.start();
rm1.getRMContext().getRMAdminService().transitionToActive(req);
assertEquals("RM with ZKStore didn't start",
Service.STATE.STARTED, rm1.getServiceState());
assertEquals("RM should be Active",
HAServiceProtocol.HAServiceState.ACTIVE,
rm1.getRMContext().getRMAdminService().getServiceStatus().getState());
Configuration conf2 = createHARMConf("rm1,rm2", "rm2", 5678);
ResourceManager rm2 = new ResourceManager();
rm2.init(conf2);
rm2.start();
rm2.getRMContext().getRMAdminService().transitionToActive(req);
assertEquals("RM with ZKStore didn't start",
Service.STATE.STARTED, rm2.getServiceState());
assertEquals("RM should be Active",
HAServiceProtocol.HAServiceState.ACTIVE,
rm2.getRMContext().getRMAdminService().getServiceStatus().getState());
// Submitting an application to RM1 to trigger a state store operation.
// RM1 should realize that it got fenced and is not the Active RM anymore.
Map mockMap = mock(Map.class);
ApplicationSubmissionContext asc =
ApplicationSubmissionContext.newInstance(
ApplicationId.newInstance(1000, 1),
"testApplication", // app Name
"default", // queue name
Priority.newInstance(0),
ContainerLaunchContext.newInstance(mockMap, mockMap,
new ArrayList<String>(), mockMap, mock(ByteBuffer.class),
mockMap),
false, // unmanaged AM
true, // cancelTokens
1, // max app attempts
Resource.newInstance(1024, 1));
ClientRMService rmService = rm1.getClientRMService();
rmService.submitApplication(SubmitApplicationRequest.newInstance(asc));
for (int i = 0; i < 30; i++) {
if (HAServiceProtocol.HAServiceState.ACTIVE ==
rm1.getRMContext().getRMAdminService().getServiceStatus().getState()) {
Thread.sleep(100);
}
}
assertEquals("RM should have been fenced",
HAServiceProtocol.HAServiceState.STANDBY,
rm1.getRMContext().getRMAdminService().getServiceStatus().getState());
assertEquals("RM should be Active",
HAServiceProtocol.HAServiceState.ACTIVE,
rm2.getRMContext().getRMAdminService().getServiceStatus().getState());
}
}
| |
/*! ******************************************************************************
*
* Pentaho Data Integration
*
* Copyright (C) 2002-2018 by Hitachi Vantara : http://www.pentaho.com
*
*******************************************************************************
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
******************************************************************************/
package org.pentaho.di.www;
import java.io.IOException;
import java.io.PrintWriter;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import com.google.common.annotations.VisibleForTesting;
import org.owasp.encoder.Encode;
import org.pentaho.di.core.Const;
import org.pentaho.di.core.util.Utils;
import org.pentaho.di.core.logging.KettleLogStore;
import org.pentaho.di.core.xml.XMLHandler;
import org.pentaho.di.i18n.BaseMessages;
import org.pentaho.di.job.Job;
import org.pentaho.di.www.cache.CarteStatusCache;
public class RemoveJobServlet extends BaseHttpServlet implements CartePluginInterface {
private static Class<?> PKG = RemoveJobServlet.class; // for i18n purposes, needed by Translator2!!
private static final long serialVersionUID = -2051906998698124039L;
public static final String CONTEXT_PATH = "/kettle/removeJob";
@VisibleForTesting
private CarteStatusCache cache = CarteStatusCache.getInstance();
public RemoveJobServlet() {
}
public RemoveJobServlet( JobMap jobMap ) {
super( jobMap );
}
/**
<div id="mindtouch">
<h1>/kettle/removeJob</h1>
<a name="GET"></a>
<h2>GET</h2>
<p>Remove specified job from Carte server.</p>
<p><b>Example Request:</b><br />
<pre function="syntax.xml">
GET /kettle/removeJob/?name=dummy_job&xml=Y
</pre>
</p>
<h3>Parameters</h3>
<table class="pentaho-table">
<tbody>
<tr>
<th>name</th>
<th>description</th>
<th>type</th>
</tr>
<tr>
<td>name</td>
<td>Name of the job to be removed.</td>
<td>query</td>
</tr>
<tr>
<td>xml</td>
<td>Boolean flag which sets the output format required. Use <code>Y</code> to receive XML response.</td>
<td>boolean, optional</td>
</tr>
<tr>
<td>id</td>
<td>Carte job ID of the job to be removed. This parameter is optional when xml=Y is used.</td>
<td>query, optional</td>
</tr>
</tbody>
</table>
<h3>Response Body</h3>
<table class="pentaho-table">
<tbody>
<tr>
<td align="right">text:</td>
<td>HTML</td>
</tr>
<tr>
<td align="right">media types:</td>
<td>text/xml, text/html</td>
</tr>
</tbody>
</table>
<p>Response XML or HTML containing operation result. When using xml=Y <code>result</code> field indicates whether
operation was successful (<code>OK</code>) or not (<code>ERROR</code>).</p>
<p><b>Example Response:</b></p>
<pre function="syntax.xml">
<?xml version="1.0" encoding="UTF-8"?>
<webresult>
<result>OK</result>
<message/>
<id/>
</webresult>
</pre>
<h3>Status Codes</h3>
<table class="pentaho-table">
<tbody>
<tr>
<th>code</th>
<th>description</th>
</tr>
<tr>
<td>200</td>
<td>Request was processed.</td>
</tr>
<tr>
<td>500</td>
<td>Internal server error occurs during request processing.</td>
</tr>
</tbody>
</table>
</div>
*/
public void doGet( HttpServletRequest request, HttpServletResponse response ) throws ServletException,
IOException {
if ( isJettyMode() && !request.getContextPath().startsWith( CONTEXT_PATH ) ) {
return;
}
if ( log.isDebug() ) {
logDebug( BaseMessages.getString( PKG, "RemoveJobServlet.Log.RemoveJobRequested" ) );
}
String jobName = request.getParameter( "name" );
String id = request.getParameter( "id" );
boolean useXML = "Y".equalsIgnoreCase( request.getParameter( "xml" ) );
response.setStatus( HttpServletResponse.SC_OK );
if ( useXML ) {
response.setContentType( "text/xml" );
response.setCharacterEncoding( Const.XML_ENCODING );
} else {
response.setContentType( "text/html;charset=UTF-8" );
}
PrintWriter out = response.getWriter();
// ID is optional...
//
Job job;
CarteObjectEntry entry;
if ( Utils.isEmpty( id ) ) {
// get the first transformation that matches...
//
entry = getJobMap().getFirstCarteObjectEntry( jobName );
if ( entry == null ) {
job = null;
} else {
id = entry.getId();
job = getJobMap().getJob( entry );
}
} else {
// Take the ID into account!
//
entry = new CarteObjectEntry( jobName, id );
job = getJobMap().getJob( entry );
}
if ( job != null ) {
cache.remove( job.getLogChannelId() );
KettleLogStore.discardLines( job.getLogChannelId(), true );
getJobMap().removeJob( entry );
if ( useXML ) {
response.setContentType( "text/xml" );
response.setCharacterEncoding( Const.XML_ENCODING );
out.print( XMLHandler.getXMLHeader( Const.XML_ENCODING ) );
out.print( WebResult.OK.getXML() );
} else {
response.setContentType( "text/html;charset=UTF-8" );
out.println( "<HTML>" );
out.println( "<HEAD>" );
out.println( "<TITLE>" + BaseMessages.getString( PKG, "RemoveJobServlet.JobRemoved" ) + "</TITLE>" );
out.println( "<META http-equiv=\"Content-Type\" content=\"text/html; charset=UTF-8\">" );
out.println( "</HEAD>" );
out.println( "<BODY>" );
out.println( "<H3>"
+ Encode.forHtml( BaseMessages
.getString( PKG, "RemoveJobServlet.TheJobWasRemoved", jobName, id ) ) + "</H3>" );
out.print( "<a href=\""
+ convertContextPath( GetStatusServlet.CONTEXT_PATH ) + "\">"
+ BaseMessages.getString( PKG, "TransStatusServlet.BackToStatusPage" ) + "</a><br>" );
out.println( "<p>" );
out.println( "</BODY>" );
out.println( "</HTML>" );
}
} else {
if ( useXML ) {
out.println( new WebResult( WebResult.STRING_ERROR, BaseMessages.getString(
PKG, "RemoveJobServlet.Log.CoundNotFindSpecJob", jobName ) ) );
} else {
out.println( "<H1>"
+ Encode.forHtml( BaseMessages.getString(
PKG, "RemoveJobServlet.JobRemoved.Log.CoundNotFindJob", jobName, id ) ) + "</H1>" );
out.println( "<a href=\""
+ convertContextPath( GetStatusServlet.CONTEXT_PATH ) + "\">"
+ BaseMessages.getString( PKG, "TransStatusServlet.BackToStatusPage" ) + "</a><p>" );
response.setStatus( HttpServletResponse.SC_BAD_REQUEST );
}
}
}
public String toString() {
return "Remove job servlet";
}
public String getService() {
return CONTEXT_PATH + " (" + toString() + ")";
}
public String getContextPath() {
return CONTEXT_PATH;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.geode.internal.cache.tier.sockets.command;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import org.apache.geode.annotations.Immutable;
import org.apache.geode.cache.DynamicRegionFactory;
import org.apache.geode.cache.InterestResultPolicy;
import org.apache.geode.cache.operations.RegisterInterestOperationContext;
import org.apache.geode.internal.cache.LocalRegion;
import org.apache.geode.internal.cache.tier.CachedRegionHelper;
import org.apache.geode.internal.cache.tier.Command;
import org.apache.geode.internal.cache.tier.InterestType;
import org.apache.geode.internal.cache.tier.MessageType;
import org.apache.geode.internal.cache.tier.sockets.BaseCommand;
import org.apache.geode.internal.cache.tier.sockets.ChunkedMessage;
import org.apache.geode.internal.cache.tier.sockets.Message;
import org.apache.geode.internal.cache.tier.sockets.Part;
import org.apache.geode.internal.cache.tier.sockets.ServerConnection;
import org.apache.geode.internal.security.AuthorizeRequest;
import org.apache.geode.internal.security.SecurityService;
import org.apache.geode.security.ResourcePermission.Operation;
import org.apache.geode.security.ResourcePermission.Resource;
/**
* @since GemFire 6.1
*/
public class RegisterInterestList61 extends BaseCommand {
@Immutable
private static final RegisterInterestList61 singleton = new RegisterInterestList61();
public static Command getCommand() {
return singleton;
}
RegisterInterestList61() {}
@Override
public void cmdExecute(final Message clientMessage, final ServerConnection serverConnection,
final SecurityService securityService, long start) throws IOException, InterruptedException {
Part regionNamePart = null, keyPart = null, numberOfKeysPart = null;
String regionName = null;
Object key = null;
InterestResultPolicy policy;
List keys = null;
CachedRegionHelper crHelper = serverConnection.getCachedRegionHelper();
int numberOfKeys = 0, partNumber = 0;
serverConnection.setAsTrue(REQUIRES_RESPONSE);
serverConnection.setAsTrue(REQUIRES_CHUNKED_RESPONSE);
ChunkedMessage chunkedResponseMsg = serverConnection.getRegisterInterestResponseMessage();
// bserverStats.incLong(readDestroyRequestTimeId,
// DistributionStats.getStatTime() - start);
// bserverStats.incInt(destroyRequestsId, 1);
// start = DistributionStats.getStatTime();
// Retrieve the data from the message parts
regionNamePart = clientMessage.getPart(0);
regionName = regionNamePart.getCachedString();
// Retrieve the InterestResultPolicy
try {
policy = (InterestResultPolicy) clientMessage.getPart(1).getObject();
} catch (Exception e) {
writeChunkedException(clientMessage, e, serverConnection);
serverConnection.setAsTrue(RESPONDED);
return;
}
boolean isDurable = false;
try {
Part durablePart = clientMessage.getPart(2);
byte[] durablePartBytes = (byte[]) durablePart.getObject();
isDurable = durablePartBytes[0] == 0x01;
} catch (Exception e) {
writeChunkedException(clientMessage, e, serverConnection);
serverConnection.setAsTrue(RESPONDED);
return;
}
// region data policy
byte[] regionDataPolicyPartBytes;
try {
Part regionDataPolicyPart = clientMessage.getPart(clientMessage.getNumberOfParts() - 1);
regionDataPolicyPartBytes = (byte[]) regionDataPolicyPart.getObject();
} catch (Exception e) {
writeChunkedException(clientMessage, e, serverConnection);
serverConnection.setAsTrue(RESPONDED);
return;
}
numberOfKeysPart = clientMessage.getPart(3);
numberOfKeys = numberOfKeysPart.getInt();
partNumber = 4;
keys = new ArrayList();
for (int i = 0; i < numberOfKeys; i++) {
keyPart = clientMessage.getPart(partNumber + i);
try {
key = keyPart.getStringOrObject();
} catch (Exception e) {
writeChunkedException(clientMessage, e, serverConnection);
serverConnection.setAsTrue(RESPONDED);
return;
}
keys.add(key);
}
boolean sendUpdatesAsInvalidates = false;
// VJR: Check for an extra part for client version 6.0.3 onwards for the
// time being until refactoring into a new command version.
if (clientMessage.getNumberOfParts() > (numberOfKeys + partNumber)) {
try {
Part notifyPart = clientMessage.getPart(numberOfKeys + partNumber);
byte[] notifyPartBytes = (byte[]) notifyPart.getObject();
sendUpdatesAsInvalidates = notifyPartBytes[0] == 0x01;
} catch (Exception e) {
writeChunkedException(clientMessage, e, serverConnection);
serverConnection.setAsTrue(RESPONDED);
return;
}
}
if (logger.isDebugEnabled()) {
logger.debug(
"{}: Received register interest 61 request ({} bytes) from {} for the following {} keys in region {}: {}",
serverConnection.getName(), clientMessage.getPayloadLength(),
serverConnection.getSocketString(), numberOfKeys, regionName, keys);
}
/*
* AcceptorImpl acceptor = servConn.getAcceptor();
*
* // Check if the Server is running in NotifyBySubscription=true mode. if
* (!acceptor.getCacheClientNotifier().getNotifyBySubscription()) { // This should have been
* taken care at the client. String err = LocalizedStrings.
* RegisterInterest_INTEREST_REGISTRATION_IS_SUPPORTED_ONLY_FOR_SERVERS_WITH_NOTIFYBYSUBSCRIPTION_SET_TO_TRUE
* ); writeChunkedErrorResponse(msg,
* MessageType.REGISTER_INTEREST_DATA_ERROR, err, servConn); servConn.setAsTrue(RESPONDED);
* return; }
*/
// Process the register interest request
if (keys.isEmpty() || regionName == null) {
String errMessage = null;
if (keys.isEmpty() && regionName == null) {
errMessage =
"The input list of keys is empty and the input region name is null for the register interest request.";
} else if (keys.isEmpty()) {
errMessage =
"The input list of keys for the register interest request is empty.";
} else if (regionName == null) {
errMessage =
"The input region name for the register interest request is null.";
}
logger.warn("{}: {}", serverConnection.getName(), errMessage);
writeChunkedErrorResponse(clientMessage, MessageType.REGISTER_INTEREST_DATA_ERROR, errMessage,
serverConnection);
serverConnection.setAsTrue(RESPONDED);
return;
}
LocalRegion region = (LocalRegion) serverConnection.getCache().getRegion(regionName);
if (region == null) {
logger.info("{}: Region named {} was not found during register interest list request.",
new Object[] {serverConnection.getName(), regionName});
// writeChunkedErrorResponse(msg,
// MessageType.REGISTER_INTEREST_DATA_ERROR, message);
// responded = true;
} // else { // region not null
try {
securityService.authorize(Resource.DATA, Operation.READ, regionName);
AuthorizeRequest authzRequest = serverConnection.getAuthzRequest();
if (authzRequest != null) {
if (!DynamicRegionFactory.regionIsDynamicRegionList(regionName)) {
RegisterInterestOperationContext registerContext =
authzRequest.registerInterestListAuthorize(regionName, keys, policy);
keys = (List) registerContext.getKey();
}
}
// Register interest
serverConnection.getAcceptor().getCacheClientNotifier().registerClientInterest(regionName,
keys, serverConnection.getProxyID(), isDurable, sendUpdatesAsInvalidates, true,
regionDataPolicyPartBytes[0], true);
} catch (Exception ex) {
// If an interrupted exception is thrown , rethrow it
checkForInterrupt(serverConnection, ex);
// Otherwise, write an exception message and continue
writeChunkedException(clientMessage, ex, serverConnection);
serverConnection.setAsTrue(RESPONDED);
return;
}
// Update the statistics and write the reply
// bserverStats.incLong(processDestroyTimeId,
// DistributionStats.getStatTime() - start);
// start = DistributionStats.getStatTime();
boolean isPrimary = serverConnection.getAcceptor().getCacheClientNotifier()
.getClientProxy(serverConnection.getProxyID()).isPrimary();
if (!isPrimary) {
chunkedResponseMsg.setMessageType(MessageType.RESPONSE_FROM_SECONDARY);
chunkedResponseMsg.setTransactionId(clientMessage.getTransactionId());
chunkedResponseMsg.sendHeader();
chunkedResponseMsg.setLastChunk(true);
if (logger.isDebugEnabled()) {
logger.debug(
"{}: Sending register interest response chunk from secondary for region: {} for key: {} chunk=<{}>",
serverConnection.getName(), regionName, key, chunkedResponseMsg);
}
chunkedResponseMsg.sendChunk(serverConnection);
} else { // isPrimary
// Send header which describes how many chunks will follow
chunkedResponseMsg.setMessageType(MessageType.RESPONSE_FROM_PRIMARY);
chunkedResponseMsg.setTransactionId(clientMessage.getTransactionId());
chunkedResponseMsg.sendHeader();
// Send chunk response
try {
fillAndSendRegisterInterestResponseChunks(region, keys, InterestType.KEY, policy,
serverConnection);
serverConnection.setAsTrue(RESPONDED);
} catch (Exception e) {
// If an interrupted exception is thrown , rethrow it
checkForInterrupt(serverConnection, e);
// otherwise send the exception back to client
writeChunkedException(clientMessage, e, serverConnection);
serverConnection.setAsTrue(RESPONDED);
return;
}
if (logger.isDebugEnabled()) {
// logger.debug(getName() + ": Sent chunk (1 of 1) of register interest
// response (" + chunkedResponseMsg.getBufferLength() + " bytes) for
// region " + regionName + " key " + key);
logger.debug(
"{}: Sent register interest response for the following {} keys in region {}: {}",
serverConnection.getName(), numberOfKeys, regionName, keys);
}
// bserverStats.incLong(writeDestroyResponseTimeId,
// DistributionStats.getStatTime() - start);
// bserverStats.incInt(destroyResponsesId, 1);
} // isPrimary
// } // region not null
}
}
| |
/*
*
* This file was generated by LLRP Code Generator
* see http://llrp-toolkit.cvs.sourceforge.net/llrp-toolkit/
* for more information
* Generated on: Sun Apr 08 14:14:12 EDT 2012;
*
*/
/*
* Copyright 2007 ETH Zurich
*
* Licensed under the Apache License, Version 2.0 (the "License");
*
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions
* and limitations under the License.
*
*/
package org.llrp.ltk.generated.parameters;
import maximsblog.blogspot.com.llrpexplorer.Logger;
import org.jdom2.Content;
import org.jdom2.Document;
import org.jdom2.Element;
import org.jdom2.Namespace;
import org.llrp.ltk.exceptions.InvalidLLRPMessageException;
import org.llrp.ltk.exceptions.MissingParameterException;
import org.llrp.ltk.generated.LLRPConstants;
import org.llrp.ltk.generated.enumerations.C1G2KillResultType;
import org.llrp.ltk.generated.interfaces.AccessCommandOpSpecResult;
import org.llrp.ltk.types.LLRPBitList;
import org.llrp.ltk.types.LLRPMessage;
import org.llrp.ltk.types.SignedShort;
import org.llrp.ltk.types.TLVParameter;
import org.llrp.ltk.types.TVParameter;
import org.llrp.ltk.types.UnsignedShort;
import java.util.LinkedList;
import java.util.List;
/**
*
Contains the result from a kill operation.
See also {@link <a href="http://www.epcglobalinc.org/standards/llrp/llrp_1_0_1-standard-20070813.pdf#page=112&view=fit">LLRP Specification Section 15.2.1.5.5.3</a>}
and {@link <a href="http://www.epcglobalinc.org/standards/llrp/llrp_1_0_1-standard-20070813.pdf#page=160&view=fit">LLRP Specification Section 16.3.1.5.5.3</a>}
*/
/**
*
Contains the result from a kill operation.
See also {@link <a href="http://www.epcglobalinc.org/standards/llrp/llrp_1_0_1-standard-20070813.pdf#page=112&view=fit">LLRP Specification Section 15.2.1.5.5.3</a>}
and {@link <a href="http://www.epcglobalinc.org/standards/llrp/llrp_1_0_1-standard-20070813.pdf#page=160&view=fit">LLRP Specification Section 16.3.1.5.5.3</a>}
.
*/
public class C1G2KillOpSpecResult extends TLVParameter
implements AccessCommandOpSpecResult {
public static final SignedShort TYPENUM = new SignedShort(351);
private static final Logger LOGGER = Logger.getLogger(C1G2KillOpSpecResult.class);
protected C1G2KillResultType result;
protected UnsignedShort opSpecID;
/**
* empty constructor to create new parameter.
*/
public C1G2KillOpSpecResult() {
}
/**
* Constructor to create parameter from binary encoded parameter
* calls decodeBinary to decode parameter.
* @param list to be decoded
*/
public C1G2KillOpSpecResult(LLRPBitList list) {
decodeBinary(list);
}
/**
* Constructor to create parameter from xml encoded parameter
* calls decodeXML to decode parameter.
* @param element to be decoded
*/
public C1G2KillOpSpecResult(Element element)
throws InvalidLLRPMessageException {
decodeXML(element);
}
/**
* {@inheritDoc}
*/
public LLRPBitList encodeBinarySpecific() {
LLRPBitList resultBits = new LLRPBitList();
if (result == null) {
LOGGER.warn(" result not set");
throw new MissingParameterException(
" result not set for Parameter of Type C1G2KillOpSpecResult");
}
resultBits.append(result.encodeBinary());
if (opSpecID == null) {
LOGGER.warn(" opSpecID not set");
throw new MissingParameterException(
" opSpecID not set for Parameter of Type C1G2KillOpSpecResult");
}
resultBits.append(opSpecID.encodeBinary());
return resultBits;
}
/**
* {@inheritDoc}
*/
public Content encodeXML(String name, Namespace ns) {
// element in namespace defined by parent element
Element element = new Element(name, ns);
// child element are always in default LLRP namespace
ns = Namespace.getNamespace("llrp", LLRPConstants.LLRPNAMESPACE);
if (result == null) {
LOGGER.warn(" result not set");
throw new MissingParameterException(" result not set");
} else {
element.addContent(result.encodeXML("Result", ns));
}
if (opSpecID == null) {
LOGGER.warn(" opSpecID not set");
throw new MissingParameterException(" opSpecID not set");
} else {
element.addContent(opSpecID.encodeXML("OpSpecID", ns));
}
//parameters
return element;
}
/**
* {@inheritDoc}
*/
protected void decodeBinarySpecific(LLRPBitList binary) {
int position = 0;
int tempByteLength;
int tempLength = 0;
int count;
SignedShort type;
int fieldCount;
Custom custom;
result = new C1G2KillResultType(binary.subList(position,
C1G2KillResultType.length()));
position += C1G2KillResultType.length();
opSpecID = new UnsignedShort(binary.subList(position,
UnsignedShort.length()));
position += UnsignedShort.length();
}
/**
* {@inheritDoc}
*/
public void decodeXML(Element element) throws InvalidLLRPMessageException {
List<Element> tempList = null;
boolean atLeastOnce = false;
Custom custom;
Element temp = null;
// child element are always in default LLRP namespace
Namespace ns = Namespace.getNamespace(LLRPConstants.LLRPNAMESPACE);
temp = element.getChild("Result", ns);
if (temp != null) {
result = new C1G2KillResultType(temp);
}
element.removeChild("Result", ns);
temp = element.getChild("OpSpecID", ns);
if (temp != null) {
opSpecID = new UnsignedShort(temp);
}
element.removeChild("OpSpecID", ns);
if (element.getChildren().size() > 0) {
String message = "C1G2KillOpSpecResult has unknown element " +
((Element) element.getChildren().get(0)).getName();
throw new InvalidLLRPMessageException(message);
}
}
//setters
/**
* set result of type C1G2KillResultType .
* @param result to be set
*/
public void setResult(final C1G2KillResultType result) {
this.result = result;
}
/**
* set opSpecID of type UnsignedShort .
* @param opSpecID to be set
*/
public void setOpSpecID(final UnsignedShort opSpecID) {
this.opSpecID = opSpecID;
}
// end setter
//getters
/**
* get result of type C1G2KillResultType.
* @return C1G2KillResultType
*/
public C1G2KillResultType getResult() {
return result;
}
/**
* get opSpecID of type UnsignedShort.
* @return type UnsignedShort to be set
*/
public UnsignedShort getOpSpecID() {
return this.opSpecID;
}
// end getters
//add methods
// end add
/**
* For TLV Parameter length can not be determined at compile time. This method therefore always returns 0.
* @return Integer always zero
*/
public static Integer length() {
return 0;
}
/**
* {@inheritDoc}
*/
public SignedShort getTypeNum() {
return TYPENUM;
}
/**
* {@inheritDoc}
*/
public String getName() {
return "C1G2KillOpSpecResult";
}
/**
* return string representation. All field values but no parameters are included
* @return String
*/
public String toString() {
String result = "C1G2KillOpSpecResult: ";
result += ", result: ";
result += result;
result += ", opSpecID: ";
result += opSpecID;
result = result.replaceFirst(", ", "");
return result;
}
}
| |
/*
* Copyright (c) 2010-2015 Pivotal Software, Inc. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you
* may not use this file except in compliance with the License. You
* may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing
* permissions and limitations under the License. See accompanying
* LICENSE file.
*/
package com.pivotal.gemfirexd.internal.engine;
import java.io.Serializable;
import java.sql.SQLException;
import java.util.List;
import java.util.Set;
import com.gemstone.gemfire.internal.cache.EntryEventImpl;
import com.gemstone.gemfire.internal.cache.LocalRegion;
import com.gemstone.gemfire.internal.cache.PartitionedRegion;
import com.gemstone.gemfire.internal.cache.RegionEntry;
import com.gemstone.gemfire.internal.cache.TXStateProxy;
import com.gemstone.gnu.trove.THashMap;
import com.pivotal.gemfirexd.execute.QueryObserver;
import com.pivotal.gemfirexd.internal.engine.access.index.OpenMemIndex;
import com.pivotal.gemfirexd.internal.engine.access.index.GfxdIndexManager;
import com.pivotal.gemfirexd.internal.engine.ddl.resolver.GfxdPartitionResolver;
import com.pivotal.gemfirexd.internal.engine.distributed.ResultHolder;
import com.pivotal.gemfirexd.internal.engine.distributed.GfxdConnectionWrapper;
import com.pivotal.gemfirexd.internal.engine.distributed.message.StatementExecutorMessage;
import com.pivotal.gemfirexd.internal.engine.distributed.metadata.ColocationCriteria;
import com.pivotal.gemfirexd.internal.engine.distributed.metadata.ComparisonQueryInfo;
import com.pivotal.gemfirexd.internal.engine.distributed.metadata.QueryInfo;
import com.pivotal.gemfirexd.internal.engine.distributed.metadata.SelectQueryInfo;
import com.pivotal.gemfirexd.internal.engine.distributed.metadata.SubQueryInfo;
import com.pivotal.gemfirexd.internal.engine.procedure.ProcedureChunkMessage;
import com.pivotal.gemfirexd.internal.engine.procedure.cohort.ProcedureSender;
import com.pivotal.gemfirexd.internal.engine.sql.conn.GfxdHeapThresholdListener;
import com.pivotal.gemfirexd.internal.engine.sql.execute.AbstractGemFireActivation;
import com.pivotal.gemfirexd.internal.engine.sql.execute.AbstractGemFireResultSet;
import com.pivotal.gemfirexd.internal.engine.sql.execute.GemFireDistributedResultSet;
import com.pivotal.gemfirexd.internal.engine.store.CompactCompositeIndexKey;
import com.pivotal.gemfirexd.internal.engine.store.GemFireContainer;
import com.pivotal.gemfirexd.internal.engine.store.RowFormatter;
import com.pivotal.gemfirexd.internal.iapi.error.StandardException;
import com.pivotal.gemfirexd.internal.iapi.sql.Activation;
import com.pivotal.gemfirexd.internal.iapi.sql.ParameterValueSet;
import com.pivotal.gemfirexd.internal.iapi.sql.ResultSet;
import com.pivotal.gemfirexd.internal.iapi.sql.conn.LanguageConnectionContext;
import com.pivotal.gemfirexd.internal.iapi.sql.dictionary.TriggerDescriptor;
import com.pivotal.gemfirexd.internal.iapi.sql.execute.ExecRow;
import com.pivotal.gemfirexd.internal.iapi.store.access.BackingStoreHashtable;
import com.pivotal.gemfirexd.internal.iapi.store.access.ColumnOrdering;
import com.pivotal.gemfirexd.internal.iapi.store.access.ScanController;
import com.pivotal.gemfirexd.internal.iapi.store.access.conglomerate.Conglomerate;
import com.pivotal.gemfirexd.internal.iapi.types.RowLocation;
import com.pivotal.gemfirexd.internal.impl.jdbc.EmbedPreparedStatement;
import com.pivotal.gemfirexd.internal.impl.jdbc.EmbedResultSet;
import com.pivotal.gemfirexd.internal.impl.jdbc.EmbedStatement;
import com.pivotal.gemfirexd.internal.impl.sql.GenericParameterValueSet;
import com.pivotal.gemfirexd.internal.impl.sql.GenericPreparedStatement;
import com.pivotal.gemfirexd.internal.impl.sql.StatementStats;
import com.pivotal.gemfirexd.internal.impl.sql.compile.FromBaseTable;
import com.pivotal.gemfirexd.internal.impl.sql.compile.SelectNode;
import com.pivotal.gemfirexd.internal.impl.sql.compile.StatementNode;
/**
* This interface is used by testing/debugging code to be notified of various
* query events
*
* @author Asif
*/
public interface GemFireXDQueryObserver extends QueryObserver {
/**
* Callback indicating query parsing is happening.
*
* @param query
* Query string being executed
* @param qt
* the {@link StatementNode} for the currently compiling statement
* @param lcc
* the {@link LanguageConnectionContext} for the current statement
*/
public void afterQueryParsing(String query, StatementNode qt,
LanguageConnectionContext lcc);
/**
* Callback invoked after optimization of query tree providing the optimized
* result.
*
* @param query
* Query string being executed
* @param qt
* the {@link StatementNode} for the currently compiling statement
* @param lcc
* the {@link LanguageConnectionContext} for the current statement
*/
public void beforeOptimizedParsedTree(String query, StatementNode qt,
LanguageConnectionContext lcc);
/**
* Callback invoked after optimization of query tree providing the optimized
* result.
*
* @param query
* Query string being executed
* @param qt
* the {@link StatementNode} for the currently compiling statement
* @param lcc
* the {@link LanguageConnectionContext} for the current statement
*/
public void afterOptimizedParsedTree(String query, StatementNode qt,
LanguageConnectionContext lcc);
/**
* Callback invoked after optimization of query tree during statement prepare
* phase.
* @param gps
* GenericPreparedStatement for the query
* @param lcc TODO
* @param qInfo
* SubqueryInfo object created from the optimized parsed tree
*/
public void subQueryInfoObjectFromOptmizedParsedTree(
List<SubQueryInfo> qInfos, GenericPreparedStatement gps, LanguageConnectionContext lcc);
/**
* Callback invoked after optimization of query tree during statement prepare
* phase.
*
* @param qInfo
* QueryInfo object created from the optimized parsed tree
* @param gps
* GenericPreparedStatement for the query
* @param lcc TODO
*/
public void queryInfoObjectFromOptmizedParsedTree(QueryInfo qInfo,
GenericPreparedStatement gps, LanguageConnectionContext lcc);
/**
* Callback invoked after prepared statement is complete initialized during
* statement prepare phase.
*
* @param qInfo
* QueryInfo object created from the optimized parsed tree
* @param gps
* GenericPreparedStatement for the query
* @param lcc TODO
*/
public void queryInfoObjectAfterPreparedStatementCompletion(QueryInfo qInfo,
GenericPreparedStatement gps, LanguageConnectionContext lcc);
/**
* Callback invoked before start of a query execution.
*
* @param stmt
* the {@link EmbedStatement} for the current query
* @param activation
* the {@link Activation} object for current query
*/
public void beforeQueryExecution(EmbedStatement stmt, Activation activation)
throws SQLException;
/**
* Callback invoked before start of a batched query execution.
*
* @param stmt
* the {@link EmbedStatement} for the current query
* @param batchSize
* the number of elements in the batch
*/
public void beforeBatchQueryExecution(EmbedStatement stmt, int batchSize)
throws SQLException;
/**
* Callback invoked after end of a successful batch query execution.
*
* @param stmt
* the {@link EmbedStatement} for the current query
* @param batchSize
* the number of elements in the batch
*/
public void afterBatchQueryExecution(EmbedStatement stmt, int batchSize);
/**
* Callback invoked iff SanityManager.DEBUG is true, before
* activation.execute() call in
* {@link GenericPreparedStatement#execute(Activation, boolean, long, boolean)}
*
* At this point GenericResultSets are about to get created. <BR>
* Primarily used as a query cancellation point.
*
* @param stmt
* the {@link GenericPreparedStatement} used for the query.
* @param lcc TODO
* @throws StandardException
*/
public void beforeQueryExecution(GenericPreparedStatement stmt, LanguageConnectionContext lcc)
throws StandardException;
/**
* Callback invoked iff SanityManager.DEBUG is true, after
* activation.execute() call in
* {@link GenericPreparedStatement#execute(Activation, boolean, long, boolean)}
* .
*
* At this point GenericResultSets have just been created. Callback is skipped
* on exception. <BR>
* Primarily used as a query cancellation point.
*
* @param stmt
* the {@link GenericPreparedStatement} used for the query.
* @param activation
* the activatio for the statement
* @throws StandardException
*/
public void afterQueryExecution(GenericPreparedStatement stmt,
Activation activation) throws StandardException;
/**
* Callback invoked iff SanityManager.DEBUG is true, after
* activation.execute() call in
* {@link GenericPreparedStatement#execute(Activation, boolean, long, boolean)}
* .
*
* At this point GenericResultSets have just been created. Depending on the
* query execution has prepared to operate on the underlying store. Sometimes
* in presence of grouping, ordering, distinct clauses in a query this will
* fetch and replicate the required data from the store.
*
* Callback is skipped on exception. <BR>
* Primarily used as a query cancellation point.
*
* @param stmt
* the {@link GenericPreparedStatement} used for the query.
* @param lcc TODO
* @param resultSet TODO
*/
public void afterResultSetOpen(GenericPreparedStatement stmt, LanguageConnectionContext lcc, ResultSet resultSet);
/**
* Callback invoked iff SanityManager.DEBUG is true, and cursor movement
* methods are called e.g. rs.next(). <BR>
* Primarily used as a query cancellation point.
* @param newRow
* the new {@link ExecRow} after the cursor is moved
*/
public void onEmbedResultSetMovePosition(EmbedResultSet rs, ExecRow newRow,
ResultSet theResults);
/**
* Callback invoked before a {@link ResultSet} of type
* {@link AbstractGemFireResultSet} is opened.
*
* @param ac
* the {@link AbstractGemFireResultSet} that has been opened
*/
public void beforeGemFireActivationCreate(AbstractGemFireActivation ac);
/**
* Callback invoked before a {@link ResultSet} of type
* {@link AbstractGemFireResultSet} is opened.
*
* @param rs
* the {@link AbstractGemFireResultSet} that has been opened
*/
public void afterGemFireActivationCreate(AbstractGemFireActivation ac);
/**
* Callback invoked before a {@link ResultSet} of type
* {@link AbstractGemFireResultSet} is opened.
*
* @param rs
* the {@link AbstractGemFireResultSet} that has been opened
* @param lcc TODO
* @throws StandardException
*/
public void beforeGemFireResultSetOpen(AbstractGemFireResultSet rs, LanguageConnectionContext lcc)
throws StandardException;
/**
* Callback invoked after a {@link ResultSet} of type
* {@link AbstractGemFireResultSet} is successfully opened.
*
* @param rs
* the {@link AbstractGemFireResultSet} that has been opened
* @param lcc TODO
*/
public void afterGemFireResultSetOpen(AbstractGemFireResultSet rs, LanguageConnectionContext lcc);
/**
* Callback invoked before a {@link ResultSet} of type
* {@link AbstractGemFireResultSet} is opened.
*
* @param rs
* the {@link AbstractGemFireResultSet} that has been opened
* @param lcc TODO
* @throws StandardException
*/
public void beforeFlushBatch(ResultSet rs, LanguageConnectionContext lcc)
throws StandardException;
/**
* Callback invoked before a {@link ResultSet} of type
* {@link AbstractGemFireResultSet} is opened.
*
* @param rs
* the {@link AbstractGemFireResultSet} that has been opened
* @param lcc TODO
* @throws StandardException
*/
public void afterFlushBatch(ResultSet rs, LanguageConnectionContext lcc)
throws StandardException;
/**
* Callback invoked before retrieving ResultSet using GemFireXD's Activation
* class ( GemFireActivation or GemfireDistributedActivation) This callback is
* generated from execute method of Activation class
*
* @param activation
* Instance of type AbstractGemFireActivation
* @see AbstractGemFireActivation
* @see GemfireDistributionActivation
* @see GemFireActivation
*/
public void beforeGemFireResultSetExecuteOnActivation(
AbstractGemFireActivation activation);
/**
* Callback invoked before computation of routing object.
*
* @param activation
* Instance of {@link AbstractGemFireActivation}
*/
public void beforeComputeRoutingObjects(AbstractGemFireActivation activation);
/**
* Callback invoked after computation of routing object.
*
* @param activation
* Instance of {@link AbstractGemFireActivation}
*/
public void afterComputeRoutingObjects(AbstractGemFireActivation activation);
/**
* Callback invoked before distribution of query to other nodes.
*
* @param executorMessage
* the message being executed
* @param activation
* Instance of {@link AbstractGemFireActivation}
*/
public <T extends Serializable> void beforeQueryDistribution(
StatementExecutorMessage<T> executorMessage, boolean streaming);
/**
* Callback invoked after distribution of query to other nodes.
*
* @param executorMessage
* the message being executed
* @param activation
* Instance of {@link AbstractGemFireActivation}
*/
public <T extends Serializable> void afterQueryDistribution(
StatementExecutorMessage<T> executorMessage, boolean streaming);
/**
* Callback invoked after retrieving ResultSet using GemFireXD's Activation
* class (GemFireActivation or GemfireDistributedActivation). This callback is
* generated from execute method of Activation class.
*
* @param activation
* Instance of type AbstractGemFireActivation
*
* @see AbstractGemFireActivation
* @see GemfireDistributionActivation
* @see GemFireActivation
*/
public void afterGemFireResultSetExecuteOnActivation(
AbstractGemFireActivation activation);
/**
* Callback invoked before a {@link ResultSet} of type
* {@link AbstractGemFireResultSet} is closed.
*
* @param rs
* the {@link AbstractGemFireResultSet} that has been opened
* @param query
* Query string that will be executed
*/
public void beforeGemFireResultSetClose(AbstractGemFireResultSet rs,
String query);
/**
* Callback invoked after a {@link ResultSet} of type
* {@link AbstractGemFireResultSet} is successfully closed.
*
* @param rs
* the {@link AbstractGemFireResultSet} that has been opened
* @param query
* Query string that has been executed
*/
public void afterGemFireResultSetClose(AbstractGemFireResultSet rs,
String query);
public void beforeEmbedResultSetClose(EmbedResultSet rs,
String query);
/**
* Callback given after creating GemFireXD ResultSet(s) using GemFireXD's
* Activation class ( GemFireActivation or GemfireDistributedActivation) This
* callback is generated from constructor of GemFire..ResultSet class.
*
* @param resultset
* Instance of type ResultSet
* @see GemFireDistributedResultSet
* @see GemFireUpdateResultSet
*/
public void createdGemFireXDResultSet(ResultSet rs);
/**
* Callback invoked just before the StatementQueryExecutorFunction executes
* the query on the data store node.
*
* @param wrapper
* Instance of GfxdConnectionWrapper which stores the EmbedConnection
* for a connectionID and wraps the Statement Objects.
* @param stmt
* Instance of {@link EmbedStatement} that will execute the query.
* @param query
* Query string that will be executed
*/
public void beforeQueryExecutionByStatementQueryExecutor(
GfxdConnectionWrapper wrapper, EmbedStatement stmt, String query);
/**
* Callback invoked just after the StatementQueryExecutorFunction successfully
* executes the query on the data store node.
*
* @param wrapper
* Instance of GfxdConnectionWrapper which stores the EmbedConnection
* for a connectionID and wraps the Statement Objects.
* @param stmt
* Instance of {@link EmbedStatement} that executed the query.
* @param query
* Query string that was executed
*/
public void afterQueryExecutionByStatementQueryExecutor(
GfxdConnectionWrapper wrapper, EmbedStatement stmt, String query);
/**
* Callback invoked just before the PrepStatementQueryExecutorFunction
* executes the query on the data store node.
*
* @param wrapper
* Instance of GfxdConnectionWrapper which stores the EmbedConnection
* for a connectionID and wraps the Statement Objects.
* @param pstmt
* Instance of {@link EmbedPreparedStatement} that will be executed.
* @param query
* Query string that was sent to this node. Note that this may be
* null in case the node has already seen the query so to get the
* actual query string use pstmt.getSQLText()
*/
public void beforeQueryExecutionByPrepStatementQueryExecutor(
GfxdConnectionWrapper wrapper, EmbedPreparedStatement pstmt, String query);
/**
* Callback invoked just after the PrepStatementQueryExecutorFunction
* successfully executes the query on the data store node.
*
* @param wrapper
* Instance of GfxdConnectionWrapper which stores the EmbedConnection
* for a connectionID and wraps the Statement Objects.
* @param pstmt
* Instance of {@link EmbedPreparedStatement} that was executed.
* @param query
* Query string that was sent to this node. Note that this may be
* null in case the node has already seen the query so to get the
* actual query string use pstmt.getSQLText()
*/
public void afterQueryExecutionByPrepStatementQueryExecutor(
GfxdConnectionWrapper wrapper, EmbedPreparedStatement pstmt, String query);
/**
* Callback invoked before query execution and serialization in
* {@link ResultHolder}.
*
* @param wrapper
* Instance of GfxdConnectionWrapper which stores the EmbedConnection
* for a connectionID and wraps the Statement Objects.
* @param es
* the {@link EmbedStatement} for the current execution
*/
public void beforeResultHolderExecution(GfxdConnectionWrapper wrapper,
EmbedStatement es);
/**
* Callback invoked before iteration of a single result in
* {@link ResultHolder}.
*
* @param wrapper
* Instance of GfxdConnectionWrapper which stores the EmbedConnection
* for a connectionID and wraps the Statement Objects.
* @param es
* the {@link EmbedStatement} for the current execution
*/
public void beforeResultHolderIteration(GfxdConnectionWrapper wrapper,
EmbedStatement es);
/**
* Callback invoked after iteration of a single result in {@link ResultHolder}
* .
*
* @param wrapper
* Instance of GfxdConnectionWrapper which stores the EmbedConnection
* for a connectionID and wraps the Statement Objects.
* @param es
* the {@link EmbedStatement} for the current execution
*/
public void afterResultHolderIteration(GfxdConnectionWrapper wrapper,
EmbedStatement es);
/**
* Callback invoked before serialization of a single result in
* {@link ResultHolder}.
*
* @param wrapper
* Instance of GfxdConnectionWrapper which stores the EmbedConnection
* for a connectionID and wraps the Statement Objects.
* @param es
* the {@link EmbedStatement} for the current execution
*/
public void beforeResultHolderSerialization(GfxdConnectionWrapper wrapper,
EmbedStatement es);
/**
* Callback invoked after serialization of a single result in
* {@link ResultHolder}.
*
* @param wrapper
* Instance of GfxdConnectionWrapper which stores the EmbedConnection
* for a connectionID and wraps the Statement Objects.
* @param es
* the {@link EmbedStatement} for the current execution
*/
public void afterResultHolderSerialization(GfxdConnectionWrapper wrapper,
EmbedStatement es);
/**
* Callback invoked after successful query execution and serialization in
* {@link ResultHolder}.
*
* @param wrapper
* Instance of GfxdConnectionWrapper which stores the EmbedConnection
* for a connectionID and wraps the Statement Objects.
* @param es
* the {@link EmbedStatement} for the current execution
* @param query
* Query string that was executed
*/
public void afterResultHolderExecution(GfxdConnectionWrapper wrapper,
EmbedStatement es, String query);
/**
* Callback invoked before deserialization of a row from {@link ResultHolder}.
*
* @param cdl
* the {@link RowFormatter} for the current row
* @param act
* {@link Activation} object for this execution
*/
public void beforeResultSetHolderRowRead(RowFormatter rf, Activation act);
/**
* Callback invoked after successful deserialization of a row from
* {@link ResultHolder}.
*
* @param cdl
* the {@link RowFormatter} for the current row
* @param row
* the {@link ExecRow} that was read from stream
* @param act
* {@link Activation} object for this execution
*/
public void afterResultSetHolderRowRead(RowFormatter rf, ExecRow row,
Activation act);
/**
* Invoked before index updates performed by {@link GfxdIndexManager}.
*/
public void beforeIndexUpdatesAtRegionLevel(LocalRegion owner,
EntryEventImpl event, RegionEntry entry);
public void beforeForeignKeyConstraintCheckAtRegionLevel();
public void beforeUniqueConstraintCheckAtRegionLevel();
/**
* Callback invoked before global index lookup of a primary key or unique key.
*
* @param lcc
* the {@link LanguageConnectionContext} of the current operation
* @param indexRegion
* the {@link PartitionedRegion} of the global index region
* @param indexKey
* the global index lookup key
*/
public void beforeGlobalIndexLookup(LanguageConnectionContext lcc,
PartitionedRegion indexRegion, Serializable indexKey);
/**
* Callback invoked after global index lookup of a primary key or unique key.
*
* @param lcc
* the {@link LanguageConnectionContext} of the current operation
* @param indexRegion
* the {@link PartitionedRegion} of the global index region
* @param indexKey
* the global index lookup key
* @param result
* the result of global index lookup for the key
*/
public void afterGlobalIndexLookup(LanguageConnectionContext lcc,
PartitionedRegion indexRegion, Serializable indexKey, Object result);
/**
* Callback invoked when a scan is opened on a table on index.
*
* @param sc
* a {@link ScanController} that has been opened in current
* query/update/delete, or a {@link BackingStoreHashtable} opened for
* hash/distinct scans
* @param conglom
* {@link Conglomerate} for which the scan has been opened
*/
public void scanControllerOpened(Object sc, Conglomerate conglom);
/**
* Calback invoked before executing connection close by the
* DistributedConnectionCloseExecutorFunction
*/
public void beforeConnectionCloseByExecutorFunction(long[] connectionIDs);
/**
* Calback invoked after successfully executing connection close by the
* DistributedConnectionCloseExecutorFunction
*/
public void afterConnectionCloseByExecutorFunction(long[] connectionIDs);
/**
* Callback invoked when the ORM process of ResultSet starts.
*/
public void beforeORM(Activation activation, AbstractGemFireResultSet rs);
/**
* Callback invoked when the ORM process of ResultSet ends.
*/
public void afterORM(Activation activation, AbstractGemFireResultSet rs);
/**
* Callback invoked during the process of identification of the optimal query
* plan. This hook can be used to override the cost identified by the derby
* query engine in using the HashIndex and thus forcing derby to pick up a
* query plan as per our requirement. In case of Sql Fabric, the HashIndex
* will usually refer to the underlying region of the table where fields are
* stored against the primary key.
*
* @param memIndex
* Instance of type OpenMemIndex which can provide information about
* the index being considered
*
* @param optimzerEvalutatedCost
* double indicating the cost identified by the derby engine. If a
* test does not want to modify the value, it should return this
* value back.
* @return double indicating the cost assosciated with the hash index to be
* used by derby
*/
public double overrideDerbyOptimizerIndexUsageCostForHash1IndexScan(
OpenMemIndex memIndex, double optimzerEvalutatedCost);
/**
* Callback invoked during the process of identification of the optimal query
* plan. This hook can be used to override the cost identified by the derby
* query engine in using the Local Sorted Index and thus forcing derby to pick
* up a query plan as per our requirement
*
* @param memIndex
* Instance of type OpenMemIndex which can provide information about
* the index being considered
*
* @param optimzerEvalutatedCost
* double indicating the cost identified by the derby engine. If a
* test does not want to modify the value, it should return this
* value back.
* @return double indicating the cost assosciated with the local sorted index
* to be used by derby
*/
public double overrideDerbyOptimizerIndexUsageCostForSortedIndexScan(
OpenMemIndex memIndex, double optimzerEvalutatedCost);
/**
* Callback invoked during the process of identification of the optimal query
* plan. This hook can be used to override the cost identified by the derby
* query engine in using the Table Scan and thus forcing derby to pick up a
* query plan as per our requirement
*
* @param gfContainer
* the GemfireContainer object on which table scan is proposed
* @param optimzerEvalutatedCost
* double indicating the cost identified by the derby engine. If a
* test does not want to modify the value, it should return this
* value back.
* @return double indicating the cost assosciated with the table scan to be
* used by derby
*/
public double overrideDerbyOptimizerCostForMemHeapScan(
GemFireContainer gfContainer, double optimzerEvalutatedCost);
/**
* This callback gets invoked when CRITICAL_UP memory event happens and
* SanityManager.DEBUG is true i.e. only for sane builds.
*
* @param listener
* singleton instance of the listener.
*/
public void criticalUpMemoryEvent(GfxdHeapThresholdListener listener);
/**
* This callback gets invoked when CRITICAL_DOWN memory event happens and
* SanityManager.DEBUG is true i.e. only for sane builds.
*
* @param listener
* singleton instance of the listener.
*/
public void criticalDownMemoryEvent(GfxdHeapThresholdListener listener);
public void estimatingMemoryUsage(String stmtText, Object resultSet);
public long estimatedMemoryUsage(String stmtText, long memused);
public void putAllCalledWithMapSize(int size);
public void afterClosingWrapperPreparedStatement(long wrapperPrepStatementID,
long wrapperConnectionID);
/**
* Callback invoked on each call to
* {@link ColocationCriteria#updateColocationCriteria(ComparisonQueryInfo)}
*/
public void updatingColocationCriteria(ComparisonQueryInfo cqi);
/**
* Callback invoked during statement execution.
*
* @param stats
* StatementStats for the current statement being executed.
*/
public void statementStatsBeforeExecutingStatement(StatementStats stats);
/**
* Clear the state (if any) of the query observer. Some implementations may
* choose to dump information prior to resetting.
*
* The observer remains usable after this unlike in {@link #close()}.
*/
public void reset();
public void subqueryNodeProcessedData(SelectQueryInfo qInfo,
GenericPreparedStatement gps, String subquery,
List<Integer> paramPositions);
public void insertMultipleRowsBeingInvoked(int numElements);
/**
* Callback invoked after inserting a key into a local index.
*
* @param key
* index key.
* @param rowLocation
* the region entry/row location.
* @param container
* the index container.
*/
public void keyAndContainerAfterLocalIndexInsert(Object key,
Object rowLocation, GemFireContainer container);
/**
* Callback invoked after deleting a key from a local index.
*
* @param key
* index key.
* @param rowLocation
* the region entry/row location.
* @param container
* the index container.
*/
public void keyAndContainerAfterLocalIndexDelete(Object key,
Object rowLocation, GemFireContainer container);
public void keyAndContainerBeforeLocalIndexDelete(Object key,
Object rowLocation, GemFireContainer container);
public void getAllInvoked(int numKeys);
public void getAllGlobalIndexInvoked(int numKeys);
public void getAllLocalIndexInvoked(int numKeys);
public void getAllLocalIndexExecuted();
public void ncjPullResultSetOpenCoreInvoked();
public void getStatementIDs(long stID, long rootID, int stLevel);
public void ncjPullResultSetVerifyBatchSize(int value);
public void ncjPullResultSetVerifyCacheSize(int value);
public void ncjPullResultSetVerifyVarInList(boolean value);
public void independentSubqueryResultsetFetched(Activation activation,
ResultSet results);
/**
* Invoked when Index Scan is being used and there is a potential of having
* modified data in transaction, in which case instead of committed
* RowLocation, uncommitted data needs to be returned.
*
* @param regionEntry
*/
public void beforeInvokingContainerGetTxRowLocation(RowLocation regionEntry);
/**
* Invoked after {@link GfxdPartitionResolver#getRoutObject} method has been
* invoked.
*/
public void afterGetRoutingObject(Object routingObject);
public long overrideUniqueID(long actualUniqueID, boolean forRegionKey);
/**
* Invoked before send of a DAP ResultSet from execution node.
*
* If this method returns false then the send will be skipped.
*/
public boolean beforeProcedureResultSetSend(ProcedureSender sender,
EmbedResultSet rs);
/**
* Invoked before send of a DAP out parameters from execution node.
*
* If this method returns false then the send will be skipped.
*/
public boolean beforeProcedureOutParamsSend(ProcedureSender sender,
ParameterValueSet pvs);
/**
* Invoked before toData() of {@link ProcedureChunkMessage} is invoked.
*/
public void beforeProcedureChunkMessageSend(ProcedureChunkMessage message);
public void lockingRowForTX(TXStateProxy tx, GemFireContainer container,
RegionEntry entry, boolean writeLock);
public void attachingKeyInfoForUpdate(GemFireContainer container,
RegionEntry entry);
/**
* Overrides decision to avoid merge run.
*
* @return
*/
public boolean avoidMergeRuns();
/**
* This overrides sort buffer size to simulate multiple merge runs.
*
* @param columnOrdering
* @param sortBufferMax
* @return
*/
public int overrideSortBufferSize(ColumnOrdering[] columnOrdering,
int sortBufferMax);
public void callAtOldValueSameAsNewValueCheckInSM2IIOp();
public void onGetNextRowCore(ResultSet resultSet);
public void onGetNextRowCoreOfBulkTableScan(ResultSet resultSet);
public void onGetNextRowCoreOfGfxdSubQueryResultSet(ResultSet resultSet);
public void onDeleteResultSetOpen(ResultSet resultSet);
public void onSortResultSetOpen(ResultSet resultSet);
public void onGroupedAggregateResultSetOpen(ResultSet resultSet);
public void onUpdateResultSetOpen(ResultSet resultSet);
public void onUpdateResultSetDoneUpdate(ResultSet resultSet);
public void onDeleteResultSetOpenAfterRefChecks(ResultSet resultSet);
public void onDeleteResultSetOpenBeforeRefChecks(ResultSet resultSet);
public void setRoutingObjectsBeforeExecution(
Set<Object> routingKeysToExecute);
public void beforeDropGatewayReceiver();
public void beforeDropDiskStore();
/**
* Sets whether authentication of a connection from a member (peer or internal
* connection) was skipped.
*
* @param skipped
*/
public void memberConnectionAuthenticationSkipped(boolean skipped);
/**
* Sets whether authentication of a connection from user was skipped
*
* @param skipped
*/
public void userConnectionAuthenticationSkipped(boolean skipped);
/**
* Checks count(*) queries are converted to Region.size() messages instead of
* full table scan.
*
* @param fbt
*/
public void regionSizeOptimizationTriggered(FromBaseTable fbt,
SelectNode selectNode);
/**
* Checks count(*) queries are converted to Region.size() messages instead of
* full table scan.
* Similar to above #regionSizeOptimizationTriggered() function
* but called at different place
*/
public void regionSizeOptimizationTriggered2(SelectNode selectNode);
/**
* To test bug #47407 ... will invoke this just when it enters insert
* multiple rows when it enters GemFireContainer.insertMultipleRows
*/
public void invokeCacheCloseAtMultipleInsert();
public boolean isCacheClosedForTesting();
/**
* Invoked after global index insert is completed
*/
public void afterGlobalIndexInsert(boolean posDup);
/**
* this map is filled during index recovery so that tests can do verifications
*/
public boolean needIndexRecoveryAccounting();
/**
* setting the above map
*/
public void setIndexRecoveryAccountingMap(THashMap map);
public void beforeQueryReprepare(GenericPreparedStatement gpst,
LanguageConnectionContext lcc) throws StandardException;
/** artificially throw the exception after even successful putAll */
public boolean throwPutAllPartialException();
/**
* after a row a qualified or disqualified in the requalification phase (in
* non-txn case) by SortedMap2IndexScanController
*
* @param success
* true means requalification succeeded, false means it failed and
* null means that it was skipped with success since no change in
* value was detected
*/
public void afterIndexRowRequalification(Boolean success,
CompactCompositeIndexKey ccKey, ExecRow row, Activation activation);
public void beforeRowTrigger(LanguageConnectionContext lcc, ExecRow execRow, ExecRow newRow);
public void afterRowTrigger(TriggerDescriptor trigD, GenericParameterValueSet gpvs);
/**
* Invoked just before GlobalHashIndexDeleteOperation is fired
*/
public void beforeGlobalIndexDelete();
public void beforeDeferredUpdate();
public void beforeDeferredDelete();
public void bucketIdcalculated(int bid);
public void beforeReturningCachedVal(Serializable globalIndexKey,
Object cachedVal);
public void afterPuttingInCached(Serializable globalIndexKey,
Object result);
public void afterSingleRowInsert(Object routingObj);
public void afterQueryPlanGeneration();
public void afterLockingTableDuringImport();
public boolean testIndexRecreate();
}
| |
// Copyright (c) 2008 The Board of Trustees of The Leland Stanford Junior University
// Copyright (c) 2011, 2012 Open Networking Foundation
// Copyright (c) 2012, 2013 Big Switch Networks, Inc.
// This library was generated by the LoxiGen Compiler.
// See the file LICENSE.txt which should have been included in the source distribution
// Automatically generated by LOXI from template of_class.java
// Do not modify
package org.projectfloodlight.openflow.protocol.ver11;
import org.projectfloodlight.openflow.protocol.*;
import org.projectfloodlight.openflow.protocol.action.*;
import org.projectfloodlight.openflow.protocol.actionid.*;
import org.projectfloodlight.openflow.protocol.bsntlv.*;
import org.projectfloodlight.openflow.protocol.errormsg.*;
import org.projectfloodlight.openflow.protocol.meterband.*;
import org.projectfloodlight.openflow.protocol.instruction.*;
import org.projectfloodlight.openflow.protocol.instructionid.*;
import org.projectfloodlight.openflow.protocol.match.*;
import org.projectfloodlight.openflow.protocol.stat.*;
import org.projectfloodlight.openflow.protocol.oxm.*;
import org.projectfloodlight.openflow.protocol.oxs.*;
import org.projectfloodlight.openflow.protocol.queueprop.*;
import org.projectfloodlight.openflow.types.*;
import org.projectfloodlight.openflow.util.*;
import org.projectfloodlight.openflow.exceptions.*;
import java.util.Set;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import io.netty.buffer.ByteBuf;
import com.google.common.hash.PrimitiveSink;
import com.google.common.hash.Funnel;
class OFActionSetQueueVer11 implements OFActionSetQueue {
private static final Logger logger = LoggerFactory.getLogger(OFActionSetQueueVer11.class);
// version: 1.1
final static byte WIRE_VERSION = 2;
final static int LENGTH = 8;
private final static long DEFAULT_QUEUE_ID = 0x0L;
// OF message fields
private final long queueId;
//
// Immutable default instance
final static OFActionSetQueueVer11 DEFAULT = new OFActionSetQueueVer11(
DEFAULT_QUEUE_ID
);
// package private constructor - used by readers, builders, and factory
OFActionSetQueueVer11(long queueId) {
this.queueId = queueId;
}
// Accessors for OF message fields
@Override
public OFActionType getType() {
return OFActionType.SET_QUEUE;
}
@Override
public long getQueueId() {
return queueId;
}
@Override
public OFVersion getVersion() {
return OFVersion.OF_11;
}
public OFActionSetQueue.Builder createBuilder() {
return new BuilderWithParent(this);
}
static class BuilderWithParent implements OFActionSetQueue.Builder {
final OFActionSetQueueVer11 parentMessage;
// OF message fields
private boolean queueIdSet;
private long queueId;
BuilderWithParent(OFActionSetQueueVer11 parentMessage) {
this.parentMessage = parentMessage;
}
@Override
public OFActionType getType() {
return OFActionType.SET_QUEUE;
}
@Override
public long getQueueId() {
return queueId;
}
@Override
public OFActionSetQueue.Builder setQueueId(long queueId) {
this.queueId = queueId;
this.queueIdSet = true;
return this;
}
@Override
public OFVersion getVersion() {
return OFVersion.OF_11;
}
@Override
public OFActionSetQueue build() {
long queueId = this.queueIdSet ? this.queueId : parentMessage.queueId;
//
return new OFActionSetQueueVer11(
queueId
);
}
}
static class Builder implements OFActionSetQueue.Builder {
// OF message fields
private boolean queueIdSet;
private long queueId;
@Override
public OFActionType getType() {
return OFActionType.SET_QUEUE;
}
@Override
public long getQueueId() {
return queueId;
}
@Override
public OFActionSetQueue.Builder setQueueId(long queueId) {
this.queueId = queueId;
this.queueIdSet = true;
return this;
}
@Override
public OFVersion getVersion() {
return OFVersion.OF_11;
}
//
@Override
public OFActionSetQueue build() {
long queueId = this.queueIdSet ? this.queueId : DEFAULT_QUEUE_ID;
return new OFActionSetQueueVer11(
queueId
);
}
}
final static Reader READER = new Reader();
static class Reader implements OFMessageReader<OFActionSetQueue> {
@Override
public OFActionSetQueue readFrom(ByteBuf bb) throws OFParseError {
int start = bb.readerIndex();
// fixed value property type == 21
short type = bb.readShort();
if(type != (short) 0x15)
throw new OFParseError("Wrong type: Expected=OFActionType.SET_QUEUE(21), got="+type);
int length = U16.f(bb.readShort());
if(length != 8)
throw new OFParseError("Wrong length: Expected=8(8), got="+length);
if(bb.readableBytes() + (bb.readerIndex() - start) < length) {
// Buffer does not have all data yet
bb.readerIndex(start);
return null;
}
if(logger.isTraceEnabled())
logger.trace("readFrom - length={}", length);
long queueId = U32.f(bb.readInt());
OFActionSetQueueVer11 actionSetQueueVer11 = new OFActionSetQueueVer11(
queueId
);
if(logger.isTraceEnabled())
logger.trace("readFrom - read={}", actionSetQueueVer11);
return actionSetQueueVer11;
}
}
public void putTo(PrimitiveSink sink) {
FUNNEL.funnel(this, sink);
}
final static OFActionSetQueueVer11Funnel FUNNEL = new OFActionSetQueueVer11Funnel();
static class OFActionSetQueueVer11Funnel implements Funnel<OFActionSetQueueVer11> {
private static final long serialVersionUID = 1L;
@Override
public void funnel(OFActionSetQueueVer11 message, PrimitiveSink sink) {
// fixed value property type = 21
sink.putShort((short) 0x15);
// fixed value property length = 8
sink.putShort((short) 0x8);
sink.putLong(message.queueId);
}
}
public void writeTo(ByteBuf bb) {
WRITER.write(bb, this);
}
final static Writer WRITER = new Writer();
static class Writer implements OFMessageWriter<OFActionSetQueueVer11> {
@Override
public void write(ByteBuf bb, OFActionSetQueueVer11 message) {
// fixed value property type = 21
bb.writeShort((short) 0x15);
// fixed value property length = 8
bb.writeShort((short) 0x8);
bb.writeInt(U32.t(message.queueId));
}
}
@Override
public String toString() {
StringBuilder b = new StringBuilder("OFActionSetQueueVer11(");
b.append("queueId=").append(queueId);
b.append(")");
return b.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
OFActionSetQueueVer11 other = (OFActionSetQueueVer11) obj;
if( queueId != other.queueId)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * (int) (queueId ^ (queueId >>> 32));
return result;
}
}
| |
package org.apache.solr.handler;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import org.apache.lucene.util.LuceneTestCase;
import org.apache.solr.client.solrj.SolrClient;
import org.apache.solr.client.solrj.impl.HttpSolrClient;
import org.apache.solr.cloud.AbstractFullDistribZkTestBase;
import org.apache.solr.common.cloud.DocCollection;
import org.apache.solr.common.cloud.Replica;
import org.apache.solr.common.cloud.Slice;
import org.apache.solr.common.cloud.ZkStateReader;
import org.apache.solr.core.RequestParams;
import org.apache.solr.core.TestSolrConfigHandler;
import org.apache.solr.util.RESTfulServerProvider;
import org.apache.solr.util.RestTestHarness;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static java.util.Arrays.asList;
import static org.apache.solr.handler.TestSolrConfigHandlerCloud.compareValues;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
@LuceneTestCase.BadApple(bugUrl = "https://issues.apache.org/jira/browse/SOLR-7362")
public class TestReqParamsAPI extends AbstractFullDistribZkTestBase {
static final Logger log = LoggerFactory.getLogger(TestSolrConfigHandlerCloud.class);
private List<RestTestHarness> restTestHarnesses = new ArrayList<>();
private void setupHarnesses() {
for (final SolrClient client : clients) {
RestTestHarness harness = new RestTestHarness(new RESTfulServerProvider() {
@Override
public String getBaseURL() {
return ((HttpSolrClient) client).getBaseURL();
}
});
restTestHarnesses.add(harness);
}
}
@Override
public void distribTearDown() throws Exception {
super.distribTearDown();
for (RestTestHarness r : restTestHarnesses) {
r.close();
}
}
@Test
public void test() throws Exception {
setupHarnesses();
testReqParams();
}
private void testReqParams() throws Exception {
DocCollection coll = cloudClient.getZkStateReader().getClusterState().getCollection("collection1");
List<String> urls = new ArrayList<>();
for (Slice slice : coll.getSlices()) {
for (Replica replica : slice.getReplicas())
urls.add("" + replica.get(ZkStateReader.BASE_URL_PROP) + "/" + replica.get(ZkStateReader.CORE_NAME_PROP));
}
RestTestHarness writeHarness = restTestHarnesses.get(random().nextInt(restTestHarnesses.size()));
String payload = " {\n" +
" 'set' : {'x': {" +
" 'a':'A val',\n" +
" 'b': 'B val'}\n" +
" }\n" +
" }";
TestSolrConfigHandler.runConfigCommand(writeHarness, "/config/params?wt=json", payload);
Map result = TestSolrConfigHandler.testForResponseElement(null,
urls.get(random().nextInt(urls.size())),
"/config/params?wt=json",
cloudClient,
asList("response", "params", "x", "a"),
"A val",
10);
compareValues(result, "B val", asList("response", "params", "x", "b"));
payload = "{\n" +
"'create-requesthandler' : { 'name' : '/dump', 'class': 'org.apache.solr.handler.DumpRequestHandler' }\n" +
"}";
TestSolrConfigHandler.runConfigCommand(writeHarness, "/config?wt=json", payload);
TestSolrConfigHandler.testForResponseElement(null,
urls.get(random().nextInt(urls.size())),
"/config/overlay?wt=json",
cloudClient,
asList("overlay", "requestHandler", "/dump", "name"),
"/dump",
10);
result = TestSolrConfigHandler.testForResponseElement(null,
urls.get(random().nextInt(urls.size())),
"/dump?wt=json&useParams=x",
cloudClient,
asList("params", "a"),
"A val",
5);
compareValues(result, "", asList("params", RequestParams.USEPARAM));
TestSolrConfigHandler.testForResponseElement(null,
urls.get(random().nextInt(urls.size())),
"/dump?wt=json&useParams=x&a=fomrequest",
cloudClient,
asList("params", "a"),
"fomrequest",
5);
payload = "{\n" +
"'create-requesthandler' : { 'name' : '/dump1', 'class': 'org.apache.solr.handler.DumpRequestHandler', 'useParams':'x' }\n" +
"}";
TestSolrConfigHandler.runConfigCommand(writeHarness, "/config?wt=json", payload);
result = TestSolrConfigHandler.testForResponseElement(null,
urls.get(random().nextInt(urls.size())),
"/config/overlay?wt=json",
cloudClient,
asList("overlay", "requestHandler", "/dump1", "name"),
"/dump1",
10);
result = TestSolrConfigHandler.testForResponseElement(null,
urls.get(random().nextInt(urls.size())),
"/dump1?wt=json",
cloudClient,
asList("params", "a"),
"A val",
5);
writeHarness = restTestHarnesses.get(random().nextInt(restTestHarnesses.size()));
payload = " {\n" +
" 'set' : {'y':{\n" +
" 'c':'CY val',\n" +
" 'b': 'BY val', " +
" 'i': 20, " +
" 'd': ['val 1', 'val 2']}\n" +
" }\n" +
" }";
TestSolrConfigHandler.runConfigCommand(writeHarness, "/config/params?wt=json", payload);
result = TestSolrConfigHandler.testForResponseElement(
null,
urls.get(random().nextInt(urls.size())),
"/config/params?wt=json",
cloudClient,
asList("response", "params", "y", "c"),
"CY val",
10);
compareValues(result, 20l, asList("response", "params", "y", "i"));
result = TestSolrConfigHandler.testForResponseElement(null,
urls.get(random().nextInt(urls.size())),
"/dump?wt=json&useParams=y",
cloudClient,
asList("params", "c"),
"CY val",
5);
compareValues(result, "BY val", asList("params", "b"));
compareValues(result, null, asList("params", "a"));
compareValues(result, Arrays.asList("val 1", "val 2"), asList("params", "d"));
compareValues(result, "20", asList("params", "i"));
payload = " {\n" +
" 'update' : {'y': {\n" +
" 'c':'CY val modified',\n" +
" 'e':'EY val',\n" +
" 'b': 'BY val'" +
"}\n" +
" }\n" +
" }";
TestSolrConfigHandler.runConfigCommand(writeHarness, "/config/params?wt=json", payload);
result = TestSolrConfigHandler.testForResponseElement(
null,
urls.get(random().nextInt(urls.size())),
"/config/params?wt=json",
cloudClient,
asList("response", "params", "y", "c"),
"CY val modified",
10);
compareValues(result, "EY val", asList("response", "params", "y", "e"));
payload = " {\n" +
" 'set' : {'y': {\n" +
" 'p':'P val',\n" +
" 'q': 'Q val'" +
"}\n" +
" }\n" +
" }";
TestSolrConfigHandler.runConfigCommand(writeHarness, "/config/params?wt=json", payload);
result = TestSolrConfigHandler.testForResponseElement(
null,
urls.get(random().nextInt(urls.size())),
"/config/params?wt=json",
cloudClient,
asList("response", "params", "y", "p"),
"P val",
10);
compareValues(result, null, asList("response", "params", "y", "c"));
payload = " {'delete' : 'y'}";
TestSolrConfigHandler.runConfigCommand(writeHarness, "/config/params?wt=json", payload);
TestSolrConfigHandler.testForResponseElement(
null,
urls.get(random().nextInt(urls.size())),
"/config/params?wt=json",
cloudClient,
asList("response", "params", "y", "p"),
null,
10);
}
}
| |
/**
* Copyright (C) 2009 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.financial.depgraph.provider;
import static org.testng.AssertJUnit.assertEquals;
import static org.testng.AssertJUnit.assertTrue;
import java.io.UnsupportedEncodingException;
import java.net.URI;
import java.net.URISyntaxException;
import java.net.URLDecoder;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
import org.threeten.bp.Instant;
import org.threeten.bp.LocalDate;
import com.google.common.base.Throwables;
import com.opengamma.engine.ComputationTargetSpecification;
import com.opengamma.engine.marketdata.spec.MarketData;
import com.opengamma.engine.marketdata.spec.MarketDataSpecification;
import com.opengamma.engine.marketdata.spec.UserMarketDataSpecification;
import com.opengamma.engine.target.ComputationTargetRequirement;
import com.opengamma.engine.target.ComputationTargetType;
import com.opengamma.engine.value.ValueProperties;
import com.opengamma.engine.value.ValueRequirement;
import com.opengamma.financial.depgraph.rest.DependencyGraphTraceBuilderProperties;
import com.opengamma.id.ExternalId;
import com.opengamma.id.UniqueId;
import com.opengamma.id.VersionCorrection;
import com.opengamma.util.test.TestGroup;
/**
* Test.
*/
@Test(groups = TestGroup.UNIT)
public class RemoteDependencyGraphTraceProviderTest {
private RemoteDependencyGraphTraceProvider _provider;
private final String _baseUrl = "http://host.com/";
@BeforeMethod
public void beforeMethod() throws URISyntaxException {
final URI uri = new URI(_baseUrl);
_provider = new RemoteDependencyGraphTraceProvider(uri);
}
@Test
public void getTraceDefaults() {
final URI uri = _provider.buildUri(new DependencyGraphTraceBuilderProperties());
final String uriStr = decode(uri);
//assert default values are there
assertTrue(uriStr.contains("calculationConfigurationName/Default"));
assertTrue(uriStr.contains("defaultProperties/EMPTY"));
assertTrue(uriStr.contains("resolutionTime/VLATEST.CLATEST"));
}
@Test
public void getTraceCalculationConfigurationName() {
DependencyGraphTraceBuilderProperties properties = new DependencyGraphTraceBuilderProperties();
properties = properties.calculationConfigurationName("test");
final URI uri = _provider.buildUri(properties);
final String uriStr = decode(uri);
assertTrue(uriStr.contains("calculationConfigurationName/test"));
}
@Test
public void getTraceDefaultProperties() {
DependencyGraphTraceBuilderProperties properties = new DependencyGraphTraceBuilderProperties();
final String defaultPropertiesStr1 = "{A=[foo,bar],B=[*]}";
final String defaultPropertiesStr2 = "{A=[bar,foo],B=[*]}";
final ValueProperties parsed = ValueProperties.parse(defaultPropertiesStr1);
properties = properties.defaultProperties(parsed);
final URI uri = _provider.buildUri(properties);
final String uriStr = decode(uri);
assertTrue(uriStr.contains("defaultProperties/" + defaultPropertiesStr1) || uriStr.contains("defaultProperties/" + defaultPropertiesStr2));
}
@Test
public void getTraceMarketDataUser() {
DependencyGraphTraceBuilderProperties properties = new DependencyGraphTraceBuilderProperties();
final String snapshotId = "Foo~1";
final UserMarketDataSpecification marketData = MarketData.user(UniqueId.parse(snapshotId));
properties = properties.addMarketData(marketData);
final URI uri = _provider.buildUri(properties);
final String uriStr = decode(uri);
assertTrue(uriStr.contains("marketDataSnapshot/" + snapshotId));
}
@Test
public void getTraceMarketDataLiveDefault() {
DependencyGraphTraceBuilderProperties properties = new DependencyGraphTraceBuilderProperties();
final MarketDataSpecification marketData = MarketData.live();
properties = properties.addMarketData(marketData);
final URI uri = _provider.buildUri(properties);
final String uriStr = decode(uri);
assertTrue(uriStr.contains("marketDataLiveDefault"));
}
@Test
public void getTraceMarketDataLive() {
DependencyGraphTraceBuilderProperties properties = new DependencyGraphTraceBuilderProperties();
final MarketDataSpecification marketData = MarketData.live("BB");
properties = properties.addMarketData(marketData);
final URI uri = _provider.buildUri(properties);
final String uriStr = decode(uri);
assertTrue(uriStr.contains("marketDataLive/BB"));
}
@Test
public void getTraceMarketDataHistorical() {
DependencyGraphTraceBuilderProperties properties = new DependencyGraphTraceBuilderProperties();
final LocalDate now = LocalDate.now();
final MarketDataSpecification marketData = MarketData.historical(now, "timeseries");
properties = properties.addMarketData(marketData);
final URI uri = _provider.buildUri(properties);
final String uriStr = decode(uri);
assertTrue(uriStr.contains("marketDataHistorical/" + now.toString() + "/timeseries"));
assertEquals(now, LocalDate.parse(now.toString()));
}
@Test
public void getTraceResolutionTime() {
DependencyGraphTraceBuilderProperties properties = new DependencyGraphTraceBuilderProperties();
final String rtStr = "V1970-01-01T00:00:01Z.CLATEST";
final VersionCorrection rt = VersionCorrection.parse(rtStr);
properties = properties.resolutionTime(rt);
final URI uri = _provider.buildUri(properties);
final String uriStr = decode(uri);
assertTrue(uriStr.contains("resolutionTime/" + rtStr));
}
@Test
public void getTraceValuationTime() {
DependencyGraphTraceBuilderProperties properties = new DependencyGraphTraceBuilderProperties();
final String instantStr = "2013-06-24T12:18:01.094Z";
final Instant instant = Instant.parse(instantStr);
properties = properties.valuationTime(instant);
final URI uri = _provider.buildUri(properties);
final String uriStr = decode(uri);
assertTrue(uriStr.contains("valuationTime/" + instantStr));
}
@Test
public void uriValueRequirementByExternalId() throws UnsupportedEncodingException {
DependencyGraphTraceBuilderProperties properties = new DependencyGraphTraceBuilderProperties();
final String valueName = "test1";
final String targetType = "POSITION";
final String idStr = "GOLDMAN~Foo1";
final ExternalId id = ExternalId.parse(idStr);
properties = properties.addRequirement(new ValueRequirement(valueName, new ComputationTargetRequirement(ComputationTargetType.POSITION, id)));
final URI uri = _provider.buildUri(properties);
final String uriStr = decode(uri);
assertTrue(uriStr.contains("requirement/" + valueName + "/" + targetType + "/" + idStr));
}
@Test
public void uriValueRequirementByUniqueId() throws UnsupportedEncodingException {
DependencyGraphTraceBuilderProperties properties = new DependencyGraphTraceBuilderProperties();
final String valueName = "test1";
final String targetType = "POSITION";
final String idStr = "GOLDMAN~Foo1";
final UniqueId id = UniqueId.parse(idStr);
properties = properties.addRequirement(new ValueRequirement(valueName, new ComputationTargetSpecification(ComputationTargetType.POSITION, id)));
final URI uri = _provider.buildUri(properties);
final String uriStr = decode(uri);
assertTrue(uriStr.contains("value/" + valueName + "/" + targetType + "/" + idStr));
}
private String decode(final URI uriDefaultProperties) {
final String urlStr = uriDefaultProperties.toString();
try {
return URLDecoder.decode(urlStr, "UTF-8");
} catch (final UnsupportedEncodingException ex) {
throw Throwables.propagate(ex);
}
}
}
| |
/*
* ***** BEGIN LICENSE BLOCK *****
* Zimbra Collaboration Suite Server
* Copyright (C) 2010, 2011, 2012 Zimbra, Inc.
*
* The contents of this file are subject to the Zimbra Public License
* Version 1.3 ("License"); you may not use this file except in
* compliance with the License. You may obtain a copy of the License at
* http://www.zimbra.com/license.
*
* Software distributed under the License is distributed on an "AS IS"
* basis, WITHOUT WARRANTY OF ANY KIND, either express or implied.
* ***** END LICENSE BLOCK *****
*/
package com.zimbra.cs.account;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.UUID;
import com.google.common.collect.Maps;
import com.zimbra.common.account.Key;
import com.zimbra.common.account.Key.AccountBy;
import com.zimbra.common.account.Key.ShareLocatorBy;
import com.zimbra.common.account.Key.UCServiceBy;
import com.zimbra.common.account.ProvisioningConstants;
import com.zimbra.common.mime.MimeConstants;
import com.zimbra.common.service.ServiceException;
import com.zimbra.cs.account.NamedEntry.Visitor;
import com.zimbra.cs.account.auth.AuthContext;
import com.zimbra.cs.account.auth.AuthContext.Protocol;
import com.zimbra.cs.mime.MimeTypeInfo;
import com.zimbra.cs.mime.MockMimeTypeInfo;
import com.zimbra.cs.mime.handler.MessageRFC822Handler;
import com.zimbra.cs.mime.handler.TextCalendarHandler;
import com.zimbra.cs.mime.handler.TextHtmlHandler;
import com.zimbra.cs.mime.handler.TextPlainHandler;
import com.zimbra.cs.mime.handler.UnknownTypeHandler;
import com.zimbra.cs.redolog.MockRedoLogProvider;
import com.zimbra.soap.admin.type.CacheEntryType;
import com.zimbra.soap.admin.type.DataSourceType;
/**
* Mock implementation of {@link Provisioning} for testing.
*
* @author ysasaki
*/
public final class MockProvisioning extends Provisioning {
public static final String DEFAULT_ACCOUNT_ID = new UUID(0L, 0L).toString();
private final Map<String, Account> id2account = Maps.newHashMap();
private final Map<String, Account> name2account = Maps.newHashMap();
private final Map<String, Domain> id2domain = Maps.newHashMap();
private final Map<String, Cos> id2cos = Maps.newHashMap();
private final Map<String, List<MimeTypeInfo>> mimeConfig = Maps.newHashMap();
private final Config config = new Config(new HashMap<String, Object>(), this);
private final Map<String, ShareLocator> shareLocators = Maps.newHashMap();
private final Server localhost;
public MockProvisioning() {
Map<String, Object> attrs = new HashMap<String, Object>();
attrs.put(A_zimbraServiceHostname, "localhost");
attrs.put(A_zimbraRedoLogProvider, MockRedoLogProvider.class.getName());
attrs.put(A_zimbraId, UUID.randomUUID().toString());
attrs.put(A_zimbraMailMode, MailMode.http.toString());
attrs.put(A_zimbraSmtpPort, "7025");
localhost = new Server("localhost", "localhost", attrs, Collections.<String, Object>emptyMap(), this);
initializeMimeHandlers();
}
@Override
public Account createAccount(String email, String password, Map<String, Object> attrs) throws ServiceException {
validate(ProvisioningValidator.CREATE_ACCOUNT, email, null, attrs);
if (!attrs.containsKey(A_zimbraId)) {
attrs.put(A_zimbraId, DEFAULT_ACCOUNT_ID);
}
if (!attrs.containsKey(A_zimbraMailHost)) {
attrs.put(A_zimbraMailHost, "localhost");
}
if (!attrs.containsKey(A_zimbraAccountStatus)) {
attrs.put(A_zimbraAccountStatus, ACCOUNT_STATUS_ACTIVE);
}
if (!attrs.containsKey(A_zimbraDumpsterEnabled)) {
attrs.put(A_zimbraDumpsterEnabled, TRUE);
}
attrs.put(A_zimbraBatchedIndexingSize, Integer.MAX_VALUE); // suppress indexing
Account account = new Account(email, email, attrs, null, this);
try {
name2account.put(email, account);
id2account.put(account.getId(), account);
return account;
} finally {
validate(ProvisioningValidator.CREATE_ACCOUNT_SUCCEEDED, email, account);
}
}
@Override
public Account get(AccountBy keyType, String key) {
switch (keyType) {
case name:
return name2account.get(key);
case id:
default:
return id2account.get(key);
}
}
@Override
public List<MimeTypeInfo> getMimeTypes(String mime) {
List<MimeTypeInfo> result = mimeConfig.get(mime);
if (result != null) {
return result;
} else {
MockMimeTypeInfo info = new MockMimeTypeInfo();
info.setHandlerClass(UnknownTypeHandler.class.getName());
return Collections.<MimeTypeInfo>singletonList(info);
}
}
@Override
public List<MimeTypeInfo> getAllMimeTypes() {
List<MimeTypeInfo> result = new ArrayList<MimeTypeInfo>();
for (List<MimeTypeInfo> entry : mimeConfig.values()) {
result.addAll(entry);
}
return result;
}
public void addMimeType(String mime, MimeTypeInfo info) {
List<MimeTypeInfo> list = mimeConfig.get(mime);
if (list == null) {
list = new ArrayList<MimeTypeInfo>();
mimeConfig.put(mime, list);
}
list.add(info);
}
private void initializeMimeHandlers() {
MockMimeTypeInfo plain = new MockMimeTypeInfo();
plain.setMimeTypes(MimeConstants.CT_TEXT_PLAIN);
plain.setHandlerClass(TextPlainHandler.class.getName());
plain.setIndexingEnabled(true);
addMimeType(MimeConstants.CT_TEXT_PLAIN, plain);
MockMimeTypeInfo html = new MockMimeTypeInfo();
html.setMimeTypes(MimeConstants.CT_TEXT_HTML);
html.setHandlerClass(TextHtmlHandler.class.getName());
html.setFileExtensions("html", "htm");
html.setIndexingEnabled(true);
addMimeType(MimeConstants.CT_TEXT_HTML, html);
MockMimeTypeInfo calendar = new MockMimeTypeInfo();
calendar.setMimeTypes(MimeConstants.CT_TEXT_CALENDAR);
calendar.setHandlerClass(TextCalendarHandler.class.getName());
calendar.setIndexingEnabled(true);
addMimeType(MimeConstants.CT_TEXT_CALENDAR, calendar);
MockMimeTypeInfo message = new MockMimeTypeInfo();
message.setMimeTypes(MimeConstants.CT_MESSAGE_RFC822);
message.setHandlerClass(MessageRFC822Handler.class.getName());
message.setIndexingEnabled(true);
addMimeType(MimeConstants.CT_MESSAGE_RFC822, message);
}
public void clearMimeHandlers() {
mimeConfig.clear();
}
@Override
public Config getConfig() {
return config;
}
@Override
public void modifyAttrs(Entry entry, Map<String, ? extends Object> attrs, boolean checkImmutable) {
Map<String, Object> map = entry.getAttrs(false);
for (Map.Entry<String, ? extends Object> attr : attrs.entrySet()) {
if (attr.getValue() != null) {
Object value = attr.getValue();
if (value instanceof List) { // Convert list to string array.
List<?> list = (List<?>) value;
String[] strArray = new String[list.size()];
for (int i = 0; i < list.size(); i++) {
strArray[i] = list.get(i).toString();
}
value = strArray;
}
map.put(attr.getKey(), value);
} else {
map.remove(attr.getKey());
}
}
}
@Override
public Server getLocalServer() {
return localhost;
}
@Override
public void modifyAttrs(Entry e, Map<String, ? extends Object> attrs,
boolean checkImmutable, boolean allowCallback) {
throw new UnsupportedOperationException();
}
@Override
public void reload(Entry e) {
}
@Override
public boolean inDistributionList(Account acct, String zimbraId) {
throw new UnsupportedOperationException();
}
@Override
public Set<String> getDistributionLists(Account acct) {
throw new UnsupportedOperationException();
}
@Override
public Set<String> getDirectDistributionLists(Account acct)
throws ServiceException {
throw new UnsupportedOperationException();
}
@Override
public List<DistributionList> getDistributionLists(Account acct,
boolean directOnly, Map<String, String> via) {
throw new UnsupportedOperationException();
}
@Override
public List<DistributionList> getDistributionLists(DistributionList list,
boolean directOnly, Map<String, String> via) {
throw new UnsupportedOperationException();
}
@Override
public boolean healthCheck() {
throw new UnsupportedOperationException();
}
@Override
public GlobalGrant getGlobalGrant() {
throw new UnsupportedOperationException();
}
@Override
public Account restoreAccount(String emailAddress, String password,
Map<String, Object> attrs, Map<String, Object> origAttrs) {
throw new UnsupportedOperationException();
}
@Override
public void deleteAccount(String zimbraId) {
Account account = id2account.remove(zimbraId);
if (account != null) {
name2account.remove(account.getName());
}
}
@Override
public void renameAccount(String zimbraId, String newName) {
throw new UnsupportedOperationException();
}
@Override
public List<Account> getAllAdminAccounts() {
throw new UnsupportedOperationException();
}
@Override
public void setCOS(Account acct, Cos cos) {
throw new UnsupportedOperationException();
}
@Override
public void modifyAccountStatus(Account acct, String newStatus) {
throw new UnsupportedOperationException();
}
@Override
public void authAccount(Account acct, String password, Protocol proto) {
throw new UnsupportedOperationException();
}
@Override
public void authAccount(Account acct, String password, Protocol proto, Map<String, Object> authCtxt) {
throw new UnsupportedOperationException();
}
@Override
public void preAuthAccount(Account acct, String accountName, String accountBy, long timestamp, long expires,
String preAuth, Map<String, Object> authCtxt) {
throw new UnsupportedOperationException();
}
@Override
public void ssoAuthAccount(Account acct, AuthContext.Protocol proto, Map<String, Object> authCtxt) {
throw new UnsupportedOperationException();
}
@Override
public void changePassword(Account acct, String currentPassword, String newPassword) {
throw new UnsupportedOperationException();
}
@Override
public SetPasswordResult setPassword(Account acct, String newPassword) {
throw new UnsupportedOperationException();
}
@Override
public void checkPasswordStrength(Account acct, String password) {
throw new UnsupportedOperationException();
}
@Override
public void addAlias(Account acct, String alias) {
throw new UnsupportedOperationException();
}
@Override
public void removeAlias(Account acct, String alias) {
throw new UnsupportedOperationException();
}
@Override
public Domain createDomain(String name, Map<String, Object> attrs) throws ServiceException {
name = name.trim().toLowerCase();
if (get(Key.DomainBy.name, name) != null) {
throw AccountServiceException.DOMAIN_EXISTS(name);
}
String id = (String) attrs.get(A_zimbraId);
if (id == null) {
attrs.put(A_zimbraId, id = UUID.randomUUID().toString());
}
if (!attrs.containsKey(A_zimbraSmtpHostname)) {
attrs.put(A_zimbraSmtpHostname, "localhost");
}
Domain domain = new Domain(name, id, attrs, null, this);
id2domain.put(id, domain);
return domain;
}
@Override
public Domain get(Key.DomainBy keyType, String key) {
switch (keyType) {
case id:
return id2domain.get(key);
case name:
for (Domain domain : id2domain.values()) {
if (domain.getName().equals(key)) {
return domain;
}
}
break;
}
return null;
}
@Override
public List<Domain> getAllDomains() {
return new ArrayList<Domain>(id2domain.values());
}
@Override
public void deleteDomain(String zimbraId) {
id2domain.remove(zimbraId);
}
@Override
public Cos createCos(String name, Map<String, Object> attrs) throws ServiceException {
name = name.trim().toLowerCase();
if (get(Key.CosBy.name, name) != null) {
throw AccountServiceException.COS_EXISTS(name);
}
String id = (String) attrs.get(A_zimbraId);
if (id == null) {
attrs.put(A_zimbraId, id = UUID.randomUUID().toString());
}
Cos cos = new Cos(name, id, attrs, this);
id2cos.put(id, cos);
return cos;
}
@Override
public Cos copyCos(String srcCosId, String destCosName) {
throw new UnsupportedOperationException();
}
@Override
public void renameCos(String zimbraId, String newName) {
throw new UnsupportedOperationException();
}
@Override
public Cos get(Key.CosBy keyType, String key) {
switch (keyType) {
case id:
return id2cos.get(key);
case name:
for (Cos cos : id2cos.values()) {
if (cos.getName().equals(key)) {
return cos;
}
}
break;
}
return null;
}
@Override
public List<Cos> getAllCos() {
throw new UnsupportedOperationException();
}
@Override
public void deleteCos(String zimbraId) {
throw new UnsupportedOperationException();
}
@Override
public Server createServer(String name, Map<String, Object> attrs) {
throw new UnsupportedOperationException();
}
@Override
public Server get(Key.ServerBy keyName, String key) {
switch (keyName) {
case id:
return localhost.getId().equals(key) ? localhost : null;
case name:
return localhost.getName().equals(key) ? localhost : null;
default:
throw new UnsupportedOperationException();
}
}
@Override
public List<Server> getAllServers() {
return Arrays.asList(localhost);
}
@Override
public List<Server> getAllServers(String service) {
throw new UnsupportedOperationException();
}
@Override
public void deleteServer(String zimbraId) {
throw new UnsupportedOperationException();
}
@Override
public DistributionList createDistributionList(String listAddress, Map<String, Object> listAttrs) {
throw new UnsupportedOperationException();
}
@Override
public DistributionList get(Key.DistributionListBy keyType, String key) {
throw new UnsupportedOperationException();
}
@Override
public void deleteDistributionList(String zimbraId) {
throw new UnsupportedOperationException();
}
@Override
public void addAlias(DistributionList dl, String alias) {
throw new UnsupportedOperationException();
}
@Override
public void removeAlias(DistributionList dl, String alias) {
throw new UnsupportedOperationException();
}
@Override
public void renameDistributionList(String zimbraId, String newName) {
throw new UnsupportedOperationException();
}
@Override
public Zimlet getZimlet(String name) {
throw new UnsupportedOperationException();
}
@Override
public List<Zimlet> listAllZimlets() {
return Collections.emptyList();
}
@Override
public Zimlet createZimlet(String name, Map<String, Object> attrs) {
throw new UnsupportedOperationException();
}
@Override
public void deleteZimlet(String name) {
throw new UnsupportedOperationException();
}
@Override
public CalendarResource createCalendarResource(String emailAddress, String password, Map<String, Object> attrs) {
throw new UnsupportedOperationException();
}
@Override
public void deleteCalendarResource(String zimbraId) {
throw new UnsupportedOperationException();
}
@Override
public void renameCalendarResource(String zimbraId, String newName) {
throw new UnsupportedOperationException();
}
@Override
public CalendarResource get(Key.CalendarResourceBy keyType, String key) {
throw new UnsupportedOperationException();
}
@Override
public List<?> getAllAccounts(Domain d) {
throw new UnsupportedOperationException();
}
@Override
public void getAllAccounts(Domain d, Visitor visitor) {
throw new UnsupportedOperationException();
}
@Override
public void getAllAccounts(Domain d, Server s, Visitor visitor) {
throw new UnsupportedOperationException();
}
@Override
public List<?> getAllCalendarResources(Domain d) {
throw new UnsupportedOperationException();
}
@Override
public void getAllCalendarResources(Domain d, Visitor visitor) {
throw new UnsupportedOperationException();
}
@Override
public void getAllCalendarResources(Domain d, Server s, Visitor visitor) {
throw new UnsupportedOperationException();
}
@Override
public List<?> getAllDistributionLists(Domain d) {
throw new UnsupportedOperationException();
}
@Override
public void addMembers(DistributionList list, String[] members) {
throw new UnsupportedOperationException();
}
@Override
public void removeMembers(DistributionList list, String[] member) {
throw new UnsupportedOperationException();
}
@Override
public Identity getDefaultIdentity(Account account) {
Map<String, Object> attrs = new HashMap<String, Object>();
attrs.put(A_zimbraPrefIdentityName, ProvisioningConstants.DEFAULT_IDENTITY_NAME);
attrs.put(A_zimbraPrefIdentityId, account.getId());
return new Identity(account, ProvisioningConstants.DEFAULT_IDENTITY_NAME, account.getId(), attrs, this);
}
@Override
public Identity createIdentity(Account account, String identityName, Map<String, Object> attrs) {
throw new UnsupportedOperationException();
}
@Override
public Identity restoreIdentity(Account account, String identityName, Map<String, Object> attrs) {
throw new UnsupportedOperationException();
}
@Override
public void modifyIdentity(Account account, String identityName, Map<String, Object> attrs) {
throw new UnsupportedOperationException();
}
@Override
public void deleteIdentity(Account account, String identityName) {
throw new UnsupportedOperationException();
}
@Override
public List<Identity> getAllIdentities(Account account) {
throw new UnsupportedOperationException();
}
@Override
public Identity get(Account account, Key.IdentityBy keyType, String key) {
throw new UnsupportedOperationException();
}
@Override
public Signature createSignature(Account account, String signatureName, Map<String, Object> attrs) {
throw new UnsupportedOperationException();
}
@Override
public Signature restoreSignature(Account account, String signatureName, Map<String, Object> attrs) {
throw new UnsupportedOperationException();
}
@Override
public void modifySignature(Account account, String signatureId, Map<String, Object> attrs) {
throw new UnsupportedOperationException();
}
@Override
public void deleteSignature(Account account, String signatureId) {
throw new UnsupportedOperationException();
}
@Override
public List<Signature> getAllSignatures(Account account) {
throw new UnsupportedOperationException();
}
@Override
public Signature get(Account account, Key.SignatureBy keyType, String key) {
throw new UnsupportedOperationException();
}
@Override
public DataSource createDataSource(Account account, DataSourceType type, String dataSourceName, Map<String, Object> attrs) {
throw new UnsupportedOperationException();
}
@Override
public DataSource createDataSource(Account account, DataSourceType type, String dataSourceName, Map<String, Object> attrs,
boolean passwdAlreadyEncrypted) {
throw new UnsupportedOperationException();
}
@Override
public DataSource restoreDataSource(Account account, DataSourceType type, String dataSourceName, Map<String, Object> attrs) {
throw new UnsupportedOperationException();
}
@Override
public void modifyDataSource(Account account, String dataSourceId,
Map<String, Object> attrs) {
throw new UnsupportedOperationException();
}
@Override
public void deleteDataSource(Account account, String dataSourceId) {
throw new UnsupportedOperationException();
}
@Override
public List<DataSource> getAllDataSources(Account account) {
// Don't throw UnsupportedOperationException because Mailbox.updateRssDataSource()
// calls this method.
return Collections.emptyList();
}
@Override
public DataSource get(Account account, Key.DataSourceBy keyType, String key) {
throw new UnsupportedOperationException();
}
@Override
public XMPPComponent createXMPPComponent(String name, Domain domain, Server server, Map<String, Object> attrs) {
throw new UnsupportedOperationException();
}
@Override
public XMPPComponent get(Key.XMPPComponentBy keyName, String key) {
throw new UnsupportedOperationException();
}
@Override
public List<XMPPComponent> getAllXMPPComponents() {
throw new UnsupportedOperationException();
}
@Override
public void deleteXMPPComponent(XMPPComponent comp) {
throw new UnsupportedOperationException();
}
@Override
public void flushCache(CacheEntryType type, CacheEntry[] entries) {
throw new UnsupportedOperationException();
}
@Override
public ShareLocator get(ShareLocatorBy keyType, String key) throws ServiceException {
return shareLocators.get(key);
}
@Override
public ShareLocator createShareLocator(String id, Map<String, Object> attrs) throws ServiceException {
ShareLocator shloc = new ShareLocator(id, attrs, this);
shareLocators.put(id, shloc);
return shloc;
}
@Override
public void deleteShareLocator(String id) throws ServiceException {
shareLocators.remove(id);
}
@Override
public UCService createUCService(String name, Map<String, Object> attrs)
throws ServiceException {
throw new UnsupportedOperationException();
}
@Override
public void deleteUCService(String zimbraId) throws ServiceException {
throw new UnsupportedOperationException();
}
@Override
public UCService get(UCServiceBy keyName, String key) throws ServiceException {
throw new UnsupportedOperationException();
}
@Override
public List<UCService> getAllUCServices() throws ServiceException {
throw new UnsupportedOperationException();
}
@Override
public void renameUCService(String zimbraId, String newName) throws ServiceException {
throw new UnsupportedOperationException();
}
}
| |
/**
* Copyright (c) 2014,
* Charles Prud'homme (TASC, INRIA Rennes, LINA CNRS UMR 6241),
* Jean-Guillaume Fages (COSLING S.A.S.).
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* * Neither the name of the <organization> nor the
* names of its contributors may be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> BE LIABLE FOR ANY
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.chocosolver.memory.structure;
import org.chocosolver.memory.EnvironmentException;
import org.chocosolver.memory.IEnvironment;
import org.chocosolver.memory.IStateInt;
import org.chocosolver.memory.IStateIntVector;
/**
* A stored list dedicated to positive integers and three operations :
* - iteration
* - removal of an element
* - check if an element is or not within the list
* It only requires a StoredInt to denote the first element of the list
* and proceeds by swapping element with the first one to remove them and incrementing
* the index of the first element.
* IT DOES NOT PRESERVE THE ORDER OF THE LIST
*/
public class IndexedBipartiteSet extends IStateIntVector {
/**
* The list of values
*/
protected int[] list;
/**
* The position of each element within the list.
* indexes[3] = k <=> list[k] = 3
* we assume that elements ranges from 0 ... list.lenght
* in other words the elements must be indexed.
*/
protected int[] position;
/**
* The first element of the list
*/
protected IStateInt last;
public void buildList(final IEnvironment environment, final int[] values) {
this.list = values;
int maxElt = 0;
for (int i = 0; i < values.length; i++) {
if (values[i] > maxElt) {
maxElt = values[i];
}
}
this.position = new int[maxElt + 1];
for (int i = 0; i < values.length; i++) {
position[values[i]] = i;
}
this.last = environment.makeInt(list.length - 1);
}
/**
* Create a stored bipartite set with a size.
* Thus the value stored will go from 0 to nbValues.
*
* @param environment backtrable environment
* @param nbValues capacity
*/
public IndexedBipartiteSet(final IEnvironment environment, final int nbValues) {
super(environment);
final int[] values = new int[nbValues];
for (int i = 0; i < nbValues; i++) {
values[i] = i;
}
buildList(environment, values);
}
public IndexedBipartiteSet(final IEnvironment environment, final int[] values) {
super(environment);
buildList(environment, values);
}
/**
* Increase the number of value watched.
* BEWARE: be sure your are correctly calling this method.
* It deletes everything already declared
*
* @param gap the gap the reach the expected size
*/
public final void increaseSize(final int gap) {
final int l = list.length;
final int[] newList = new int[l + gap];
for (int i = 0; i < l + gap; i++) {
newList[i] = i;
}
int maxElt = 0;
for (int i = 0; i < newList.length; i++) {
if (newList[i] > maxElt) {
maxElt = newList[i];
}
}
final int[] newPosition = new int[maxElt + 1];
for (int i = 0; i < newList.length; i++) {
newPosition[newList[i]] = i;
}
// record already removed values
final int end = last.get() + 1;
final int[] removed = new int[list.length - end];
System.arraycopy(list, end, removed, 0, list.length - end);
this.list = newList;
this.position = newPosition;
final IEnvironment env = last.getEnvironment();
this.last = null;
this.last = env.makeInt(list.length - 1);
for (int i = 0; i < removed.length; i++) {
remove(removed[i]);
}
}
public final int size() {
return last.get() + 1;
}
public final boolean isEmpty() {
return last.get() == -1;
}
public final void add(final int i) {
throw new UnsupportedOperationException("adding element is not permitted in this structure (the list is only meant to decrease during search)");
}
public final void clear() {
last.set(-1);
}
public final void removeLast() {
remove(list[last.get()]);
}
public void remove(final int object) {
if (contains(object)) {
final int idxToRem = position[object];
if (idxToRem == last.get()) {
last.add(-1);
} else {
final int temp = list[last.get()];
list[last.get()] = object;
list[idxToRem] = temp;
position[object] = last.get();
position[temp] = idxToRem;
last.add(-1);
}
}
}
public boolean contains(final int object) {
// The IStateIntVector interface allows for negatives so need to
// check for negatives.
return object >= 0 && object < position.length && position[object] <= last.get();
}
public final int get(final int index) {
return list[index];
}
@Override
public final int quickGet(final int index) {
return get(index);
}
public final int set(final int index, final int val) {
throw new EnvironmentException("setting an element is not permitted on this structure");
}
@Override
public final int quickSet(final int index, final int val) {
return set(index, val);
}
public final String pretty() {
final StringBuilder s = new StringBuilder("[");
for (int i = 0; i <= last.get(); i++) {
s.append(list[i]).append(i == (last.get()) ? "" : ",");
}
return s.append(']').toString();
}
//a is not in the list, returns its index k in the table from
//the end of the list.
//It basically means that a was the k element to be removed
public final int findIndexOfInt(final int a) {
return list.length - position[a];
}
}
| |
/*
* Copyright 2000-2015 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.ui;
import com.intellij.ide.DataManager;
import com.intellij.openapi.actionSystem.*;
import com.intellij.openapi.ui.popup.JBPopup;
import com.intellij.ui.awt.RelativePoint;
import com.intellij.util.containers.SmartHashSet;
import com.intellij.util.ui.UIUtil;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import java.awt.*;
import java.util.Set;
/**
* @author Konstantin Bulenkov
*/
public abstract class AnActionButton extends AnAction implements ShortcutProvider {
private boolean myEnabled = true;
private boolean myVisible = true;
private ShortcutSet myShortcut;
private AnAction myAction = null;
private JComponent myContextComponent;
private Set<AnActionButtonUpdater> myUpdaters;
public AnActionButton(String text) {
super(text);
}
public AnActionButton(String text, String description, @Nullable Icon icon) {
super(text, description, icon);
}
@SuppressWarnings("NullableProblems")
public AnActionButton(String text, Icon icon) {
this(text, null, icon);
}
public AnActionButton() {
}
public static AnActionButton fromAction(final AnAction action) {
final Presentation presentation = action.getTemplatePresentation();
final AnActionButtonWrapper button = action instanceof CheckedActionGroup ? new CheckedAnActionButton(presentation, action)
: new AnActionButtonWrapper(presentation, action);
button.setShortcut(action.getShortcutSet());
return button;
}
public boolean isEnabled() {
return myEnabled;
}
public void setEnabled(boolean enabled) {
myEnabled = enabled;
}
public boolean isVisible() {
return myVisible;
}
public void setVisible(boolean visible) {
myVisible = visible;
}
@Override
public final void update(AnActionEvent e) {
boolean myActionVisible = true;
boolean myActionEnabled = true;
if (myAction != null) {
myAction.update(e);
myActionEnabled = e.getPresentation().isEnabled();
myActionVisible = e.getPresentation().isVisible();
}
boolean enabled = isEnabled() && isContextComponentOk() && myActionEnabled;
if (enabled && myUpdaters != null) {
for (AnActionButtonUpdater updater : myUpdaters) {
if (!updater.isEnabled(e)) {
enabled = false;
break;
}
}
}
e.getPresentation().setEnabled(enabled);
e.getPresentation().setVisible(isVisible() && myActionVisible);
if (enabled) {
updateButton(e);
}
}
public final void addCustomUpdater(@NotNull AnActionButtonUpdater updater) {
if (myUpdaters == null) {
myUpdaters = new SmartHashSet<>();
}
myUpdaters.add(updater);
}
public void updateButton(AnActionEvent e) {
final JComponent component = getContextComponent();
e.getPresentation().setEnabled(component != null && component.isShowing() && component.isEnabled());
}
@Override
public ShortcutSet getShortcut() {
return myShortcut;
}
public void setShortcut(ShortcutSet shortcut) {
myShortcut = shortcut;
}
public void setContextComponent(JComponent contextComponent) {
myContextComponent = contextComponent;
}
public JComponent getContextComponent() {
return myContextComponent;
}
public DataContext getDataContext() {
return DataManager.getInstance().getDataContext(getContextComponent());
}
private boolean isContextComponentOk() {
return myContextComponent == null
|| (myContextComponent.isVisible() && UIUtil.getParentOfType(JLayeredPane.class, myContextComponent) != null);
}
public final RelativePoint getPreferredPopupPoint() {
Container c = myContextComponent;
ActionToolbar toolbar = null;
while ((c = c.getParent()) != null) {
if (c instanceof JComponent
&& (toolbar = (ActionToolbar)((JComponent)c).getClientProperty(ActionToolbar.ACTION_TOOLBAR_PROPERTY_KEY)) != null) {
break;
}
}
if (toolbar instanceof JComponent) {
for (Component comp : ((JComponent)toolbar).getComponents()) {
if (comp instanceof ActionButtonComponent) {
if (comp instanceof AnActionHolder) {
if (((AnActionHolder)comp).getAction() == this) {
return new RelativePoint(comp.getParent(), new Point(comp.getX(), comp.getY() + comp.getHeight()));
}
}
}
}
}
return null;
}
public static class CheckedAnActionButton extends AnActionButtonWrapper implements CheckedActionGroup {
private final AnAction myDelegate;
public CheckedAnActionButton(Presentation presentation, AnAction action) {
super(presentation, action);
myDelegate = action;
}
public AnAction getDelegate() {
return myDelegate;
}
}
private static class AnActionButtonWrapper extends AnActionButton {
private final AnAction myAction;
public AnActionButtonWrapper(Presentation presentation, AnAction action) {
super(presentation.getText(), presentation.getDescription(), presentation.getIcon());
myAction = action;
}
@Override
public void actionPerformed(AnActionEvent e) {
myAction.actionPerformed(new AnActionEventWrapper(e, this));
}
@Override
public void updateButton(AnActionEvent e) {
myAction.update(e);
final boolean enabled = e.getPresentation().isEnabled();
final boolean visible = e.getPresentation().isVisible();
if (enabled && visible) {
super.updateButton(e);
}
}
@Override
public boolean isDumbAware() {
return myAction.isDumbAware();
}
}
public static class AnActionEventWrapper extends AnActionEvent {
private final AnActionButton myPeer;
private AnActionEventWrapper(AnActionEvent e, AnActionButton peer) {
super(e.getInputEvent(), e.getDataContext(), e.getPlace(), e.getPresentation(), e.getActionManager(), e.getModifiers());
myPeer = peer;
}
public void showPopup(JBPopup popup) {
popup.show(myPeer.getPreferredPopupPoint());
}
}
}
| |
package crazypants.enderio.machine.generator.zombie;
import net.minecraft.block.Block;
import net.minecraft.item.ItemStack;
import net.minecraft.nbt.NBTTagCompound;
import net.minecraftforge.common.util.ForgeDirection;
import net.minecraftforge.fluids.Fluid;
import net.minecraftforge.fluids.FluidContainerRegistry;
import net.minecraftforge.fluids.FluidStack;
import net.minecraftforge.fluids.FluidTankInfo;
import net.minecraftforge.fluids.IFluidHandler;
import crazypants.enderio.EnderIO;
import crazypants.enderio.ModObject;
import crazypants.enderio.config.Config;
import crazypants.enderio.machine.IoMode;
import crazypants.enderio.machine.SlotDefinition;
import crazypants.enderio.machine.generator.AbstractGeneratorEntity;
import crazypants.enderio.network.PacketHandler;
import crazypants.enderio.power.PowerDistributor;
import crazypants.util.BlockCoord;
import crazypants.util.FluidUtil;
public class TileZombieGenerator extends AbstractGeneratorEntity implements IFluidHandler {
private static int IO_MB_TICK = 250;
final NutrientTank fuelTank = new NutrientTank(FluidContainerRegistry.BUCKET_VOLUME * 2);
int outputPerTick = Config.zombieGeneratorRfPerTick;
int tickPerBucketOfFuel = Config.zombieGeneratorTicksPerBucketFuel;
private boolean tanksDirty;
private boolean active = false;
private PowerDistributor powerDis;
private int ticksRemaingFuel;
private boolean inPause;
int pass = 0;
public TileZombieGenerator() {
super(new SlotDefinition(0, 0, 0));
}
@Override
public String getMachineName() {
return ModObject.blockZombieGenerator.unlocalisedName;
}
@Override
public boolean supportsMode(ForgeDirection faceHit, IoMode mode) {
return mode != IoMode.PUSH && mode != IoMode.PUSH_PULL;
}
@Override
protected boolean doPull(ForgeDirection dir) {
boolean res = super.doPull(dir);
BlockCoord loc = getLocation().getLocation(dir);
IFluidHandler target = FluidUtil.getFluidHandler(worldObj, loc);
if(target != null) {
FluidTankInfo[] infos = target.getTankInfo(dir.getOpposite());
if(infos != null) {
for (FluidTankInfo info : infos) {
if(info.fluid != null && info.fluid.amount > 0) {
if(canFill(dir, info.fluid.getFluid())) {
FluidStack canPull = info.fluid.copy();
canPull.amount = Math.min(IO_MB_TICK, canPull.amount);
FluidStack drained = target.drain(dir.getOpposite(), canPull, false);
if(drained != null && drained.amount > 0) {
int filled = fill(dir, drained, false);
if(filled > 0) {
drained = target.drain(dir.getOpposite(), filled, true);
fill(dir, drained, true);
return res;
}
}
}
}
}
}
}
return res;
}
@Override
public int getPowerUsePerTick() {
return outputPerTick;
}
@Override
protected boolean isMachineItemValidForSlot(int i, ItemStack itemstack) {
return false;
}
@Override
public boolean isActive() {
return active;
}
@Override
public float getProgress() {
return 0.5f;
}
@Override
public void onNeighborBlockChange(Block blockId) {
super.onNeighborBlockChange(blockId);
if(powerDis != null) {
powerDis.neighboursChanged();
}
}
@Override
protected boolean processTasks(boolean redstoneCheckPassed) {
boolean res = false;
if(!redstoneCheckPassed) {
if(active) {
active = false;
res = true;
}
return res;
} else {
boolean isActive = generateEnergy();
if(isActive != active) {
active = isActive;
res = true;
}
if(getEnergyStored() >= getMaxEnergyStored()) {
inPause = true;
}
transmitEnergy();
}
if(tanksDirty) {
PacketHandler.sendToAllAround(new PacketZombieTank(this), this);
tanksDirty = false;
}
return res;
}
private boolean generateEnergy() {
//once full, don't start again until we have drained 10 seconds worth of power to prevent
//flickering on and off constantly when powering a machine that draws less than this produces
if(inPause && getEnergyStored() >= (getMaxEnergyStored() - (outputPerTick * 200))) {
return false;
}
inPause = false;
if(fuelTank.getFluidAmount() < fuelTank.getCapacity() * 0.7f) {
return false;
}
ticksRemaingFuel--;
if(ticksRemaingFuel <= 0) {
fuelTank.drain(1, true);
ticksRemaingFuel = tickPerBucketOfFuel/1000;
tanksDirty = true;
}
setEnergyStored(getEnergyStored() + outputPerTick);
return true;
}
private boolean transmitEnergy() {
if(getEnergyStored() <= 0) {
return false;
}
if(powerDis == null) {
powerDis = new PowerDistributor(new BlockCoord(this));
}
int transmitted = powerDis.transmitEnergy(worldObj, Math.min(outputPerTick * 2, getEnergyStored()));
setEnergyStored(getEnergyStored() - transmitted);
return transmitted > 0;
}
@Override
public int fill(ForgeDirection from, FluidStack resource, boolean doFill) {
if(resource == null || resource.getFluid() == null || !canFill(from, resource.getFluid())) {
return 0;
}
int res = fuelTank.fill(resource, doFill);
if(res > 0 && doFill) {
tanksDirty = true;
}
return res;
}
@Override
public FluidStack drain(ForgeDirection from, FluidStack resource, boolean doDrain) {
return null;
}
@Override
public FluidStack drain(ForgeDirection from, int maxDrain, boolean doDrain) {
return null;
}
@Override
public boolean canFill(ForgeDirection from, Fluid fluid) {
return fluid != null && fluid.getID() == EnderIO.fluidNutrientDistillation.getID();
}
@Override
public boolean canDrain(ForgeDirection from, Fluid fluid) {
return false;
}
@Override
public FluidTankInfo[] getTankInfo(ForgeDirection from) {
return new FluidTankInfo[] { fuelTank.getInfo() };
}
public int getFluidStored(ForgeDirection from) {
return fuelTank.getFluidAmount();
}
@Override
public void readCustomNBT(NBTTagCompound nbtRoot) {
super.readCustomNBT(nbtRoot);
active = nbtRoot.getBoolean("active");
}
@Override
public boolean shouldRenderInPass(int pass) {
this.pass = pass;
if(pass == 0) {
return true;
}
if(pass == 1) {
return fuelTank.getFluidAmount() > 0;
}
return false;
}
@Override
public void readCommon(NBTTagCompound nbtRoot) {
super.readCommon(nbtRoot);
if(nbtRoot.hasKey("fuelTank")) {
NBTTagCompound tankRoot = (NBTTagCompound) nbtRoot.getTag("fuelTank");
if(tankRoot != null) {
fuelTank.readFromNBT(tankRoot);
} else {
fuelTank.setFluid(null);
}
} else {
fuelTank.setFluid(null);
}
}
@Override
public void writeCommon(NBTTagCompound nbtRoot) {
super.writeCommon(nbtRoot);
if(fuelTank.getFluidAmount() > 0) {
NBTTagCompound tankRoot = new NBTTagCompound();
fuelTank.writeToNBT(tankRoot);
nbtRoot.setTag("fuelTank", tankRoot);
}
}
@Override
public void writeCustomNBT(NBTTagCompound nbtRoot) {
super.writeCustomNBT(nbtRoot);
nbtRoot.setBoolean("active", active);
}
}
| |
/*
* $Id$
*
* SARL is an general-purpose agent programming language.
* More details on http://www.sarl.io
*
* Copyright (C) 2014-2021 the original authors or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.sarl.lang.ui.labeling;
import javax.inject.Singleton;
import com.google.inject.Inject;
import org.eclipse.jface.resource.ImageDescriptor;
import org.eclipse.xtend.core.jvmmodel.IXtendJvmAssociations;
import org.eclipse.xtend.ide.labeling.XtendDescriptionLabelProvider;
import org.eclipse.xtext.common.types.JvmDeclaredType;
import org.eclipse.xtext.common.types.JvmOperation;
import org.eclipse.xtext.resource.IEObjectDescription;
import org.eclipse.xtext.xbase.ui.labeling.XbaseImageAdornments;
import org.eclipse.xtext.xtype.XImportDeclaration;
import io.sarl.lang.sarl.SarlAction;
import io.sarl.lang.sarl.SarlAgent;
import io.sarl.lang.sarl.SarlBehavior;
import io.sarl.lang.sarl.SarlBehaviorUnit;
import io.sarl.lang.sarl.SarlCapacity;
import io.sarl.lang.sarl.SarlCapacityUses;
import io.sarl.lang.sarl.SarlConstructor;
import io.sarl.lang.sarl.SarlEvent;
import io.sarl.lang.sarl.SarlField;
import io.sarl.lang.sarl.SarlRequiredCapacity;
import io.sarl.lang.sarl.SarlScript;
import io.sarl.lang.sarl.SarlSkill;
/**
* Provides labels for a IEObjectDescriptions and IResourceDescriptions.
*
* @author $Author: sgalland$
* @version $FullVersion$
* @mavengroupid $GroupId$
* @mavenartifactid $ArtifactId$
* @see "http://www.eclipse.org/Xtext/documentation.html#labelProvider"
*/
@Singleton
public class SARLDescriptionLabelProvider extends XtendDescriptionLabelProvider {
@Inject
private IXtendJvmAssociations jvmModelAssociations;
@Inject
private SARLImages images;
@Inject
private XbaseImageAdornments adornments;
/** Replies the image for a SARL script.
*
* @param script the SARL script.
* @return the image descriptor.
*/
public ImageDescriptor image(SarlScript script) {
return this.images.forFile();
}
/** Replies the image for an import declaration.
*
* @param declaration describes the import declaration.
* @return the image descriptor.
*/
public ImageDescriptor image(XImportDeclaration declaration) {
return this.images.forImport();
}
/** Replies the image for an agent.
*
* @param agent describes the agent.
* @return the image descriptor.
*/
public ImageDescriptor image(SarlAgent agent) {
final JvmDeclaredType jvmElement = this.jvmModelAssociations.getInferredType(agent);
return this.images.forAgent(
agent.getVisibility(),
this.adornments.get(jvmElement));
}
/** Replies the image for a behavior.
*
* @param behavior describes the behavior.
* @return the image descriptor.
*/
public ImageDescriptor image(SarlBehavior behavior) {
final JvmDeclaredType jvmElement = this.jvmModelAssociations.getInferredType(behavior);
return this.images.forBehavior(
behavior.getVisibility(),
this.adornments.get(jvmElement));
}
/** Replies the image for a capacity.
*
* @param capacity describes the capacity.
* @return the image descriptor.
*/
public ImageDescriptor image(SarlCapacity capacity) {
final JvmDeclaredType jvmElement = this.jvmModelAssociations.getInferredType(capacity);
return this.images.forCapacity(
capacity.getVisibility(),
this.adornments.get(jvmElement));
}
/** Replies the image for a skill.
*
* @param skill describes the skill.
* @return the image descriptor.
*/
public ImageDescriptor image(SarlSkill skill) {
final JvmDeclaredType jvmElement = this.jvmModelAssociations.getInferredType(skill);
return this.images.forSkill(
skill.getVisibility(),
this.adornments.get(jvmElement));
}
/** Replies the image for an event.
*
* @param event describes the event.
* @return the image descriptor.
*/
public ImageDescriptor image(SarlEvent event) {
final JvmDeclaredType jvmElement = this.jvmModelAssociations.getInferredType(event);
return this.images.forEvent(
event.getVisibility(),
this.adornments.get(jvmElement));
}
/** Replies the image for an action.
*
* @param action describes the action.
* @return the image descriptor.
*/
public ImageDescriptor image(SarlAction action) {
final JvmOperation jvmElement = this.jvmModelAssociations.getDirectlyInferredOperation(action);
return this.images.forOperation(
action.getVisibility(),
this.adornments.get(jvmElement));
}
/** Replies the image for a capacity use.
*
* @param uses describes the capacity use.
* @return the image descriptor.
*/
public ImageDescriptor image(SarlCapacityUses uses) {
return this.images.forCapacityUses();
}
/** Replies the image for a required capacity.
*
* @param capacity describes the required capacity.
* @return the image descriptor.
*/
public ImageDescriptor image(SarlRequiredCapacity capacity) {
return this.images.forCapacityRequirements();
}
/** Replies the image for a behavior unit.
*
* @param unit describes the behavior unit.
* @return the image descriptor.
*/
public ImageDescriptor image(SarlBehaviorUnit unit) {
return this.images.forBehaviorUnit();
}
/** Replies the image for an attribute.
*
* @param attribute describes the attribute.
* @return the image descriptor.
*/
public ImageDescriptor image(SarlField attribute) {
return this.images.forField(
attribute.getVisibility(),
this.adornments.get(this.jvmModelAssociations.getJvmField(attribute)));
}
/** Replies the image for a constructor.
*
* @param constructor describes the constructor.
* @return the image descriptor.
*/
public ImageDescriptor image(SarlConstructor constructor) {
if (constructor.isStatic()) {
return this.images.forStaticConstructor();
}
return this.images.forConstructor(
constructor.getVisibility(),
this.adornments.get(this.jvmModelAssociations.getInferredConstructor(constructor)));
}
@Override
public Object image(IEObjectDescription element) {
return doGetImage(element.getEObjectOrProxy());
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.geode.internal.cache;
import static org.apache.geode.distributed.ConfigurationProperties.*;
import static org.junit.Assert.*;
import java.io.File;
import java.util.Arrays;
import java.util.Properties;
import org.junit.After;
import org.junit.Before;
import org.junit.FixMethodOrder;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.runners.MethodSorters;
import org.apache.geode.cache.*;
import org.apache.geode.distributed.DistributedSystem;
import org.apache.geode.distributed.internal.DistributionManager;
import org.apache.geode.internal.ByteArrayDataInput;
import org.apache.geode.internal.InternalStatisticsDisabledException;
import org.apache.geode.internal.Version;
import org.apache.geode.internal.cache.DistributedRegion.DiskPosition;
import org.apache.geode.internal.cache.InitialImageOperation.Entry;
import org.apache.geode.internal.cache.eviction.EvictableEntry;
import org.apache.geode.internal.cache.eviction.EvictionController;
import org.apache.geode.internal.cache.eviction.EvictionCounters;
import org.apache.geode.internal.cache.eviction.EvictionList;
import org.apache.geode.internal.cache.eviction.EvictionNode;
import org.apache.geode.internal.cache.persistence.DiskRecoveryStore;
import org.apache.geode.internal.cache.versions.VersionSource;
import org.apache.geode.internal.cache.versions.VersionStamp;
import org.apache.geode.internal.cache.versions.VersionTag;
import org.apache.geode.test.junit.categories.IntegrationTest;
/**
* This is a test verifies region is LIFO enabled by MEMORY verifies correct stats updating and
* faultin is not evicting another entry - not strict LIFO
*
* @since GemFire 5.7
*/
@FixMethodOrder(MethodSorters.NAME_ASCENDING)
@Category(IntegrationTest.class)
public class LIFOEvictionAlgoMemoryEnabledRegionJUnitTest {
/** The cache instance */
private static Cache cache = null;
/** Stores LIFO Related Statistics */
private static EvictionCounters lifoStats = null;
/** The distributedSystem instance */
private static DistributedSystem distributedSystem = null;
private static String regionName = "LIFOMemoryEvictionEnabledRegion";
private static int maximumMegabytes = 1;
private static int byteArraySize = 20480;
private static long memEntryCountForFirstPutOperation;
private int deltaSize = 20738;
private static EvictionList lifoClockHand = null;
@Before
public void setUp() throws Exception {
initializeVM();
}
@After
public void tearDown() throws Exception {
assertNotNull(cache);
Region rgn = cache.getRegion(Region.SEPARATOR + regionName);
assertNotNull(rgn);
rgn.localDestroyRegion();
cache.close();
}
/**
* Method for intializing the VM and create region with LIFO attached
*/
private static void initializeVM() throws Exception {
Properties props = new Properties();
props.setProperty(MCAST_PORT, "0");
props.setProperty(LOCATORS, "");
props.setProperty(LOG_LEVEL, "info"); // to keep diskPerf logs smaller
distributedSystem = DistributedSystem.connect(props);
cache = CacheFactory.create(distributedSystem);
assertNotNull(cache);
DiskStoreFactory dsf = cache.createDiskStoreFactory();
AttributesFactory factory = new AttributesFactory();
factory.setScope(Scope.LOCAL);
File dir = new File("testingDirectoryDefault");
dir.mkdir();
dir.deleteOnExit();
File[] dirs = {dir};
dsf.setDiskDirsAndSizes(dirs, new int[] {Integer.MAX_VALUE});
dsf.setAutoCompact(false);
DirectoryHolder.SET_DIRECTORY_SIZE_IN_BYTES_FOR_TESTING_PURPOSES = true;
try {
factory.setDiskStoreName(dsf.create(regionName).getName());
} finally {
DirectoryHolder.SET_DIRECTORY_SIZE_IN_BYTES_FOR_TESTING_PURPOSES = false;
}
factory.setDiskSynchronous(true);
factory.setDataPolicy(DataPolicy.NORMAL);
/* setting LIFO MEMORY related eviction attributes */
factory.setEvictionAttributes(EvictionAttributes.createLIFOMemoryAttributes(maximumMegabytes,
EvictionAction.OVERFLOW_TO_DISK));
RegionAttributes attr = factory.create();
((GemFireCacheImpl) cache).createRegion(regionName, attr);
lifoClockHand =
((VMLRURegionMap) ((LocalRegion) cache.getRegion(Region.SEPARATOR + regionName)).entries)
.getEvictionList();
/* storing stats reference */
lifoStats = lifoClockHand.getStatistics();
}
/**
* This test does the following :<br>
* 1)Perform put operation <br>
* 2)Verify count of faultin entries <br>
*/
@Test
public void test000EntryFaultinCount() {
try {
assertNotNull(cache);
LocalRegion rgn = (LocalRegion) cache.getRegion(Region.SEPARATOR + regionName);
assertNotNull(rgn);
DiskRegionStats diskRegionStats = rgn.getDiskRegion().getStats();
assertTrue("Entry count not 0 ", new Long(0).equals(new Long(lifoStats.getCounter())));
// put 60 entries into the region
for (long i = 0L; i < 60L; i++) {
rgn.put("key" + i, newDummyObject(i));
}
assertEquals(
"LRU eviction entry count and entries overflown to disk count from diskstats is not equal ",
lifoStats.getEvictions(), diskRegionStats.getNumOverflowOnDisk());
assertNull("Entry value in VM is not null", rgn.getValueInVM("key59"));
// used to get number of entries required to reach the limit of memory assign
memEntryCountForFirstPutOperation = diskRegionStats.getNumEntriesInVM();
rgn.get("key59");
assertEquals("Not equal to number of entries present in VM : ", 51L,
diskRegionStats.getNumEntriesInVM());
assertEquals("Not equal to number of entries present on disk : ", 9L,
diskRegionStats.getNumOverflowOnDisk());
} catch (Exception ex) {
ex.printStackTrace();
fail("Test failed");
}
}
/**
* This test does the following :<br>
* 1)Varify region is LIFO Enabled <br>
* 2)Perform put operation <br>
* 3)perform get operation <br>
* 4)Varify value retrived <br>
* 5)Verify count (entries present in memory) after put operations <br>
* 6)Verify count (entries present in memory) after get (performs faultin) operation <br>
* 7)Verify count (entries present in memory) after remove operation <br>
*/
@Test
public void test001LIFOStatsUpdation() {
try {
assertNotNull(cache);
LocalRegion rgn = (LocalRegion) cache.getRegion(Region.SEPARATOR + regionName);
assertNotNull(rgn);
// check for is LIFO Enable
assertTrue("Eviction Algorithm is not LIFO",
(((EvictionAttributesImpl) rgn.getAttributes().getEvictionAttributes()).isLIFO()));
// put 60 entries into the region
for (long i = 0L; i < 60L; i++) {
rgn.put(new Long(i), newDummyObject(i));
}
// verifies evicted entry values are null in memory
assertTrue("In memory ", rgn.entries.getEntry(new Long(51)).isValueNull());
assertTrue("In memory ", rgn.entries.getEntry(new Long(52)).isValueNull());
assertTrue("In memory ", rgn.entries.getEntry(new Long(53)).isValueNull());
// get an entry back
rgn.get(new Long(46));
rgn.get(new Long(51));
rgn.get(new Long(56));
// gets stuck in while loop
rgn.put(new Long(60), newDummyObject(60));
rgn.put(new Long(61), newDummyObject(61));
assertNull("Entry value in VM is not null", rgn.getValueInVM(new Long(58)));
} catch (Exception ex) {
ex.printStackTrace();
fail("Test failed");
}
}
/**
* This test does the following :<br>
* 1)Perform put operation <br>
* 2)Verify entry evicted is LIFO Entry and is not present in vm<br>
*/
@Test
public void test002LIFOEntryEviction() {
try {
assertNotNull(cache);
LocalRegion rgn = (LocalRegion) cache.getRegion(Region.SEPARATOR + regionName);
assertNotNull(rgn);
assertEquals("Region is not properly cleared ", 0, rgn.size());
assertTrue("Entry count not 0 ", new Long(0).equals(new Long(lifoStats.getCounter())));
// put sixty entries into the region
for (long i = 0L; i < 60L; i++) {
rgn.put(new Long(i), newDummyObject(i));
if (i < memEntryCountForFirstPutOperation) {
// entries are in memory
assertNotNull("Entry is not in VM ", rgn.getValueInVM(new Long(i)));
} else {
/*
* assertTrue("LIFO Entry is not evicted", lifoClockHand.getLRUEntry() .testEvicted());
*/
assertTrue("Entry is not null ", rgn.entries.getEntry(new Long(i)).isValueNull());
}
}
} catch (Exception ex) {
ex.printStackTrace();
fail("Test failed");
}
}
/**
* This test does the following :<br>
* 1)Perform put operation <br>
* 2)Verify count of evicted entries <br>
*/
@Test
public void test003EntryEvictionCount() {
try {
assertNotNull(cache);
Region rgn = cache.getRegion(Region.SEPARATOR + regionName);
assertNotNull(rgn);
assertTrue("Entry count not 0 ", new Long(0).equals(new Long(lifoStats.getCounter())));
// put 60 entries into the region
for (long i = 0L; i < 60L; i++) {
rgn.put(new Long(i), newDummyObject(i));
}
assertTrue("1)Total eviction count is not correct ",
new Long(10).equals(new Long(lifoStats.getEvictions())));
rgn.put(new Long(60), newDummyObject(60));
rgn.get(new Long(55));
assertTrue("2)Total eviction count is not correct ",
new Long(11).equals(new Long(lifoStats.getEvictions())));
} catch (Exception ex) {
ex.printStackTrace();
fail("Test failed");
}
}
// Basic checks to validate lifo queue implementation works as expected
@Test
public void testLIFOQueue() {
try {
assertNotNull(cache);
Region rgn = cache.getRegion(Region.SEPARATOR + regionName);
assertNotNull(rgn);
// insert data
lifoClockHand.appendEntry(new TestLRUNode(1));
lifoClockHand.appendEntry(new TestLRUNode(2));
lifoClockHand.appendEntry(new TestLRUNode(3));
assertTrue(lifoClockHand.size() == 3);
// make sure data is removed in LIFO fashion
TestLRUNode tailValue = (TestLRUNode) lifoClockHand.getEvictableEntry();
assertTrue("Value = " + tailValue.getValue(), tailValue.value == 3);
assertTrue("LIFO Queue Size = " + lifoClockHand.size(), lifoClockHand.size() == 2);
tailValue = (TestLRUNode) lifoClockHand.getEvictableEntry();
assertTrue("Value = " + tailValue.getValue(), tailValue.value == 2);
assertTrue("LIFO Queue Size = " + lifoClockHand.size(), lifoClockHand.size() == 1);
tailValue = (TestLRUNode) lifoClockHand.getEvictableEntry();
assertTrue("Value = " + tailValue.getValue(), tailValue.value == 1);
assertTrue("LIFO Queue Size = " + lifoClockHand.size(), lifoClockHand.size() == 0);
tailValue = (TestLRUNode) lifoClockHand.getEvictableEntry();
assertTrue("No Value - null", tailValue == null);
assertTrue("LIFO Queue Size = " + lifoClockHand.size(), lifoClockHand.size() == 0);
// check that entries not available or already evicted are skipped and removed
TestLRUNode testlrunode = new TestLRUNode(1);
lifoClockHand.appendEntry(testlrunode);
testlrunode = new TestLRUNode(2);
testlrunode.setEvicted();
lifoClockHand.appendEntry(testlrunode);
testlrunode = new TestLRUNode(3);
testlrunode.setEvicted();
lifoClockHand.appendEntry(testlrunode);
tailValue = (TestLRUNode) lifoClockHand.getEvictableEntry();
assertTrue("Value = " + tailValue.getValue(), tailValue.value == 1);
assertTrue("LIFO Queue Size = " + lifoClockHand.size(), lifoClockHand.size() == 0);
tailValue = (TestLRUNode) lifoClockHand.getEvictableEntry();
assertTrue("No Value - null", tailValue == null);
assertTrue("LIFO Queue Size = " + lifoClockHand.size(), lifoClockHand.size() == 0);
// TODO : need tests for data still part of transaction
} catch (Exception ex) {
ex.printStackTrace();
fail(ex.getMessage());
}
}
// purpose to create object ,size of byteArraySize
private Object newDummyObject(long i) {
byte[] value = new byte[byteArraySize];
Arrays.fill(value, (byte) i);
return value;
}
// test class for validating LIFO queue
static class TestLRUNode implements EvictableEntry {
EvictionNode next = null;
EvictionNode prev = null;
boolean evicted = false;
boolean recentlyUsed = false;
int value = 0;
public TestLRUNode(int value) {
this.value = value;
}
@Override
public Token getValueAsToken() {
return null;
}
@Override
public void setValueWithTombstoneCheck(final Object value, final EntryEvent event)
throws RegionClearedException {
}
@Override
public Object getTransformedValue() {
return null;
}
@Override
public Object getValueInVM(final RegionEntryContext context) {
return null;
}
@Override
public Object getValueOnDisk(final InternalRegion region) throws EntryNotFoundException {
return null;
}
@Override
public Object getValueOnDiskOrBuffer(final InternalRegion region)
throws EntryNotFoundException {
return null;
}
@Override
public boolean initialImagePut(final InternalRegion region, final long lastModified,
final Object newValue, final boolean wasRecovered, final boolean acceptedVersionTag)
throws RegionClearedException {
return false;
}
@Override
public boolean initialImageInit(final InternalRegion region, final long lastModified,
final Object newValue, final boolean create, final boolean wasRecovered,
final boolean acceptedVersionTag) throws RegionClearedException {
return false;
}
@Override
public boolean destroy(final InternalRegion region, final EntryEventImpl event,
final boolean inTokenMode, final boolean cacheWrite, final Object expectedOldValue,
final boolean forceDestroy, final boolean removeRecoveredEntry) throws CacheWriterException,
EntryNotFoundException, TimeoutException, RegionClearedException {
return false;
}
@Override
public boolean getValueWasResultOfSearch() {
return false;
}
@Override
public void setValueResultOfSearch(final boolean value) {
}
@Override
public Object getSerializedValueOnDisk(final InternalRegion region) {
return null;
}
@Override
public Object getValueInVMOrDiskWithoutFaultIn(final InternalRegion region) {
return null;
}
@Override
public Object getValueOffHeapOrDiskWithoutFaultIn(final InternalRegion region) {
return null;
}
@Override
public boolean isUpdateInProgress() {
return false;
}
@Override
public void setUpdateInProgress(final boolean underUpdate) {
}
@Override
public boolean isCacheListenerInvocationInProgress() {
return false;
}
@Override
public void setCacheListenerInvocationInProgress(final boolean isListenerInvoked) {
}
@Override
public boolean isValueNull() {
return false;
}
@Override
public boolean isInvalid() {
return false;
}
@Override
public boolean isDestroyed() {
return false;
}
@Override
public boolean isDestroyedOrRemoved() {
return false;
}
@Override
public boolean isDestroyedOrRemovedButNotTombstone() {
return false;
}
@Override
public boolean isInvalidOrRemoved() {
return false;
}
@Override
public void setValueToNull() {
}
@Override
public void returnToPool() {
}
@Override
public void setNext(EvictionNode next) {
this.next = next;
}
@Override
public void setPrevious(EvictionNode previous) {
this.prev = previous;
}
@Override
public EvictionNode next() {
return next;
}
@Override
public EvictionNode previous() {
return prev;
}
@Override
public int updateEntrySize(EvictionController ccHelper) {
return 0;
}
@Override
public int updateEntrySize(EvictionController ccHelper, Object value) {
return 0;
}
@Override
public int getEntrySize() {
return 0;
}
@Override
public boolean isRecentlyUsed() {
return recentlyUsed;
}
@Override
public void setRecentlyUsed(final RegionEntryContext context) {
recentlyUsed = true;
context.incRecentlyUsed();
}
@Override
public long getLastModified() {
return 0;
}
@Override
public boolean hasStats() {
return false;
}
@Override
public long getLastAccessed() throws InternalStatisticsDisabledException {
return 0;
}
@Override
public long getHitCount() throws InternalStatisticsDisabledException {
return 0;
}
@Override
public long getMissCount() throws InternalStatisticsDisabledException {
return 0;
}
@Override
public void updateStatsForPut(final long lastModifiedTime, final long lastAccessedTime) {
}
@Override
public VersionStamp getVersionStamp() {
return null;
}
@Override
public VersionTag generateVersionTag(final VersionSource member, final boolean withDelta,
final InternalRegion region, final EntryEventImpl event) {
return null;
}
@Override
public boolean dispatchListenerEvents(final EntryEventImpl event) throws InterruptedException {
return false;
}
@Override
public void updateStatsForGet(final boolean hit, final long time) {
}
@Override
public void txDidDestroy(final long currentTime) {
}
@Override
public void resetCounts() throws InternalStatisticsDisabledException {
}
@Override
public void makeTombstone(final InternalRegion region, final VersionTag version)
throws RegionClearedException {
}
@Override
public void removePhase1(final InternalRegion region, final boolean clear)
throws RegionClearedException {
}
@Override
public void removePhase2() {
}
@Override
public boolean isRemoved() {
return false;
}
@Override
public boolean isRemovedPhase2() {
return false;
}
@Override
public boolean isTombstone() {
return false;
}
@Override
public boolean fillInValue(final InternalRegion region, final Entry entry,
final ByteArrayDataInput in, final DistributionManager distributionManager,
final Version version) {
return false;
}
@Override
public boolean isOverflowedToDisk(final InternalRegion region,
final DiskPosition diskPosition) {
return false;
}
@Override
public Object getKey() {
return null;
}
@Override
public Object getValue(final RegionEntryContext context) {
return null;
}
@Override
public Object getValueRetain(final RegionEntryContext context) {
return null;
}
@Override
public void setValue(final RegionEntryContext context, final Object value)
throws RegionClearedException {
}
@Override
public void setValue(final RegionEntryContext context, final Object value,
final EntryEventImpl event) throws RegionClearedException {
}
@Override
public Object getValueRetain(final RegionEntryContext context, final boolean decompress) {
return null;
}
@Override
public Object getValue() {
return null;
}
@Override
public void unsetRecentlyUsed() {
recentlyUsed = false;
}
@Override
public void setEvicted() {
evicted = true;
}
@Override
public void unsetEvicted() {
evicted = false;
}
@Override
public boolean isEvicted() {
return evicted;
}
@Override
public boolean isInUseByTransaction() {
return false;
}
@Override
public void incRefCount() {
}
@Override
public void decRefCount(final EvictionList lruList, final InternalRegion region) {
}
@Override
public void resetRefCount(final EvictionList lruList) {
}
@Override
public Object prepareValueForCache(final RegionEntryContext context, final Object value,
final boolean isEntryUpdate) {
return null;
}
@Override
public Object prepareValueForCache(final RegionEntryContext context, final Object value,
final EntryEventImpl event, final boolean isEntryUpdate) {
return null;
}
@Override
public Object getKeyForSizing() {
return null;
}
@Override
public void setDelayedDiskId(final DiskRecoveryStore diskRecoveryStore) {
}
}
}
| |
package com.aviary.android.feather.widget;
import it.sephiroth.android.library.imagezoom.ImageViewTouch;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.BlurMaskFilter;
import android.graphics.BlurMaskFilter.Blur;
import android.graphics.Canvas;
import android.graphics.Matrix;
import android.graphics.Paint;
import android.graphics.Rect;
import android.graphics.RectF;
import android.util.AttributeSet;
import android.util.Log;
import android.view.GestureDetector;
import android.view.GestureDetector.OnGestureListener;
import android.view.MotionEvent;
import android.view.ScaleGestureDetector;
import android.view.ScaleGestureDetector.OnScaleGestureListener;
import com.aviary.android.feather.library.services.DragControllerService.DragSource;
import com.aviary.android.feather.library.services.drag.DragView;
import com.aviary.android.feather.library.services.drag.DropTarget;
import com.aviary.android.feather.widget.DrawableHighlightView.Mode;
public class ImageViewDrawableOverlay extends ImageViewTouch implements DropTarget {
/**
* The listener interface for receiving onLayout events. The class that is interested in processing a onLayout event implements
* this interface, and the object created with that class is registered with a component using the component's
* <code>addOnLayoutListener<code> method. When
* the onLayout event occurs, that object's appropriate
* method is invoked.
*
* @see OnLayoutEvent
*/
public interface OnLayoutListener {
/**
* On layout changed.
*
* @param changed
* the changed
* @param left
* the left
* @param top
* the top
* @param right
* the right
* @param bottom
* the bottom
*/
void onLayoutChanged( boolean changed, int left, int top, int right, int bottom );
}
/**
* The listener interface for receiving onDrawableEvent events. The class that is interested in processing a onDrawableEvent
* event implements this interface, and the object created with that class is registered with a component using the component's
* <code>addOnDrawableEventListener<code> method. When
* the onDrawableEvent event occurs, that object's appropriate
* method is invoked.
*
* @see OnDrawableEventEvent
*/
public static interface OnDrawableEventListener {
/**
* On focus change.
*
* @param newFocus
* the new focus
* @param oldFocus
* the old focus
*/
void onFocusChange( DrawableHighlightView newFocus, DrawableHighlightView oldFocus );
/**
* On down.
*
* @param view
* the view
*/
void onDown( DrawableHighlightView view );
/**
* On move.
*
* @param view
* the view
*/
void onMove( DrawableHighlightView view );
/**
* On click.
*
* @param view
* the view
*/
void onClick( DrawableHighlightView view );
};
/** The m motion edge. */
private int mMotionEdge = DrawableHighlightView.GROW_NONE;
/** The m overlay views. */
private List<DrawableHighlightView> mOverlayViews = new ArrayList<DrawableHighlightView>();
/** The m overlay view. */
private DrawableHighlightView mOverlayView;
/** The m layout listener. */
private OnLayoutListener mLayoutListener;
/** The m drawable listener. */
private OnDrawableEventListener mDrawableListener;
/** The m force single selection. */
private boolean mForceSingleSelection = true;
private DropTargetListener mDropTargetListener;
private Paint mDropPaint;
private Rect mTempRect = new Rect();
private boolean mScaleWithContent = false;
/**
* Instantiates a new image view drawable overlay.
*
* @param context
* the context
* @param attrs
* the attrs
*/
public ImageViewDrawableOverlay( Context context, AttributeSet attrs ) {
super( context, attrs );
}
/*
* (non-Javadoc)
*
* @see it.sephiroth.android.library.imagezoom.ImageViewTouch#init()
*/
@Override
protected void init() {
super.init();
mTouchSlop = 20 * 20;
mGestureDetector.setIsLongpressEnabled( false );
}
/**
* How overlay content will be scaled/moved when zomming/panning the base image
*
* @param value
* - if true then the content will be scale according to the image
*/
public void setScaleWithContent( boolean value ) {
mScaleWithContent = value;
}
public boolean getScaleWithContent() {
return mScaleWithContent;
}
/**
* If true, when the user tap outside the drawable overlay and there is only one active overlay selection is not changed.
*
* @param value
* the new force single selection
*/
public void setForceSingleSelection( boolean value ) {
mForceSingleSelection = value;
}
public void setDropTargetListener( DropTargetListener listener ) {
mDropTargetListener = listener;
}
/*
* (non-Javadoc)
*
* @see it.sephiroth.android.library.imagezoom.ImageViewTouch#getGestureListener()
*/
@Override
protected OnGestureListener getGestureListener() {
return new CropGestureListener();
}
/*
* (non-Javadoc)
*
* @see it.sephiroth.android.library.imagezoom.ImageViewTouch#getScaleListener()
*/
@Override
protected OnScaleGestureListener getScaleListener() {
return new CropScaleListener();
}
/**
* Sets the on layout listener.
*
* @param listener
* the new on layout listener
*/
public void setOnLayoutListener( OnLayoutListener listener ) {
mLayoutListener = listener;
}
/**
* Sets the on drawable event listener.
*
* @param listener
* the new on drawable event listener
*/
public void setOnDrawableEventListener( OnDrawableEventListener listener ) {
mDrawableListener = listener;
}
/*
* (non-Javadoc)
*
* @see it.sephiroth.android.library.imagezoom.ImageViewTouchBase#setImageBitmap(android.graphics.Bitmap, boolean,
* android.graphics.Matrix)
*/
@Override
public void setImageBitmap( final Bitmap bitmap, final boolean reset, Matrix matrix ) {
clearOverlays();
super.setImageBitmap( bitmap, reset, matrix );
}
/*
* (non-Javadoc)
*
* @see it.sephiroth.android.library.imagezoom.ImageViewTouchBase#onLayout(boolean, int, int, int, int)
*/
@Override
protected void onLayout( boolean changed, int left, int top, int right, int bottom ) {
super.onLayout( changed, left, top, right, bottom );
if ( mLayoutListener != null ) mLayoutListener.onLayoutChanged( changed, left, top, right, bottom );
if ( getDrawable() != null && changed ) {
Iterator<DrawableHighlightView> iterator = mOverlayViews.iterator();
while ( iterator.hasNext() ) {
DrawableHighlightView view = iterator.next();
view.getMatrix().set( getImageMatrix() );
view.invalidate();
}
}
}
/*
* (non-Javadoc)
*
* @see it.sephiroth.android.library.imagezoom.ImageViewTouchBase#postTranslate(float, float)
*/
@Override
protected void postTranslate( float deltaX, float deltaY ) {
super.postTranslate( deltaX, deltaY );
Iterator<DrawableHighlightView> iterator = mOverlayViews.iterator();
while ( iterator.hasNext() ) {
DrawableHighlightView view = iterator.next();
if ( getScale() != 1 ) {
float[] mvalues = new float[9];
getImageMatrix().getValues( mvalues );
final float scale = mvalues[Matrix.MSCALE_X];
if ( !mScaleWithContent ) view.getCropRectF().offset( -deltaX / scale, -deltaY / scale );
}
view.getMatrix().set( getImageMatrix() );
view.invalidate();
}
}
/*
* (non-Javadoc)
*
* @see it.sephiroth.android.library.imagezoom.ImageViewTouchBase#postScale(float, float, float)
*/
@Override
protected void postScale( float scale, float centerX, float centerY ) {
if ( mOverlayViews.size() > 0 ) {
Iterator<DrawableHighlightView> iterator = mOverlayViews.iterator();
Matrix oldMatrix = new Matrix( getImageViewMatrix() );
super.postScale( scale, centerX, centerY );
while ( iterator.hasNext() ) {
DrawableHighlightView view = iterator.next();
if ( !mScaleWithContent ) {
RectF cropRect = view.getCropRectF();
RectF rect1 = view.getDisplayRect( oldMatrix, view.getCropRectF() );
RectF rect2 = view.getDisplayRect( getImageViewMatrix(), view.getCropRectF() );
float[] mvalues = new float[9];
getImageViewMatrix().getValues( mvalues );
final float currentScale = mvalues[Matrix.MSCALE_X];
cropRect.offset( ( rect1.left - rect2.left ) / currentScale, ( rect1.top - rect2.top ) / currentScale );
cropRect.right += -( rect2.width() - rect1.width() ) / currentScale;
cropRect.bottom += -( rect2.height() - rect1.height() ) / currentScale;
view.getMatrix().set( getImageMatrix() );
view.getCropRectF().set( cropRect );
} else {
view.getMatrix().set( getImageMatrix() );
}
view.invalidate();
}
} else {
super.postScale( scale, centerX, centerY );
}
}
/**
* Ensure visible.
*
* @param hv
* the hv
*/
private void ensureVisible( DrawableHighlightView hv, float deltaX, float deltaY ) {
RectF r = hv.getDrawRect();
int panDeltaX1 = 0, panDeltaX2 = 0;
int panDeltaY1 = 0, panDeltaY2 = 0;
if ( deltaX > 0 ) panDeltaX1 = (int) Math.max( 0, getLeft() - r.left );
if ( deltaX < 0 ) panDeltaX2 = (int) Math.min( 0, getRight() - r.right );
if ( deltaY > 0 ) panDeltaY1 = (int) Math.max( 0, getTop() - r.top );
if ( deltaY < 0 ) panDeltaY2 = (int) Math.min( 0, getBottom() - r.bottom );
int panDeltaX = panDeltaX1 != 0 ? panDeltaX1 : panDeltaX2;
int panDeltaY = panDeltaY1 != 0 ? panDeltaY1 : panDeltaY2;
if ( panDeltaX != 0 || panDeltaY != 0 ) {
panBy( panDeltaX, panDeltaY );
}
}
/*
* (non-Javadoc)
*
* @see android.widget.ImageView#onDraw(android.graphics.Canvas)
*/
@Override
public void onDraw( Canvas canvas ) {
super.onDraw( canvas );
for ( int i = 0; i < mOverlayViews.size(); i++ ) {
canvas.save( Canvas.MATRIX_SAVE_FLAG );
mOverlayViews.get( i ).draw( canvas );
canvas.restore();
}
if ( null != mDropPaint ) {
getDrawingRect( mTempRect );
canvas.drawRect( mTempRect, mDropPaint );
}
}
/**
* Clear overlays.
*/
public void clearOverlays() {
setSelectedHighlightView( null );
while ( mOverlayViews.size() > 0 ) {
DrawableHighlightView hv = mOverlayViews.remove( 0 );
hv.dispose();
}
mOverlayView = null;
mMotionEdge = DrawableHighlightView.GROW_NONE;
}
/**
* Adds the highlight view.
*
* @param hv
* the hv
* @return true, if successful
*/
public boolean addHighlightView( DrawableHighlightView hv ) {
for ( int i = 0; i < mOverlayViews.size(); i++ ) {
if ( mOverlayViews.get( i ).equals( hv ) ) return false;
}
mOverlayViews.add( hv );
postInvalidate();
if ( mOverlayViews.size() == 1 ) {
setSelectedHighlightView( hv );
}
return true;
}
/**
* Gets the highlight count.
*
* @return the highlight count
*/
public int getHighlightCount() {
return mOverlayViews.size();
}
/**
* Gets the highlight view at.
*
* @param index
* the index
* @return the highlight view at
*/
public DrawableHighlightView getHighlightViewAt( int index ) {
return mOverlayViews.get( index );
}
/**
* Removes the hightlight view.
*
* @param view
* the view
* @return true, if successful
*/
public boolean removeHightlightView( DrawableHighlightView view ) {
for ( int i = 0; i < mOverlayViews.size(); i++ ) {
if ( mOverlayViews.get( i ).equals( view ) ) {
DrawableHighlightView hv = mOverlayViews.remove( i );
if ( hv.equals( mOverlayView ) ) {
setSelectedHighlightView( null );
}
hv.dispose();
return true;
}
}
return false;
}
@Override
protected void onZoomAnimationCompleted( float scale ) {
Log.i( LOG_TAG, "onZoomAnimationCompleted: " + scale );
super.onZoomAnimationCompleted( scale );
if ( mOverlayView != null ) {
mOverlayView.setMode( Mode.Move );
mMotionEdge = DrawableHighlightView.MOVE;
}
}
/**
* Return the current selected highlight view.
*
* @return the selected highlight view
*/
public DrawableHighlightView getSelectedHighlightView() {
return mOverlayView;
}
/*
* (non-Javadoc)
*
* @see it.sephiroth.android.library.imagezoom.ImageViewTouch#onTouchEvent(android.view.MotionEvent)
*/
@Override
public boolean onTouchEvent( MotionEvent event ) {
int action = event.getAction() & MotionEvent.ACTION_MASK;
mScaleDetector.onTouchEvent( event );
if ( !mScaleDetector.isInProgress() ) mGestureDetector.onTouchEvent( event );
switch ( action ) {
case MotionEvent.ACTION_UP:
if ( mOverlayView != null ) {
mOverlayView.setMode( DrawableHighlightView.Mode.None );
}
mMotionEdge = DrawableHighlightView.GROW_NONE;
if ( getScale() < 1f ) {
zoomTo( 1f, 50 );
}
break;
}
return true;
}
/*
* (non-Javadoc)
*
* @see it.sephiroth.android.library.imagezoom.ImageViewTouch#onDoubleTapPost(float, float)
*/
@Override
protected float onDoubleTapPost( float scale, float maxZoom ) {
return super.onDoubleTapPost( scale, maxZoom );
}
private boolean onDoubleTap( MotionEvent e ) {
float scale = getScale();
float targetScale = scale;
targetScale = ImageViewDrawableOverlay.this.onDoubleTapPost( scale, getMaxZoom() );
targetScale = Math.min( getMaxZoom(), Math.max( targetScale, 1 ) );
mCurrentScaleFactor = targetScale;
zoomTo( targetScale, e.getX(), e.getY(), DEFAULT_ANIMATION_DURATION );
invalidate();
return true;
}
/**
* Check selection.
*
* @param e
* the e
* @return the drawable highlight view
*/
private DrawableHighlightView checkSelection( MotionEvent e ) {
Iterator<DrawableHighlightView> iterator = mOverlayViews.iterator();
DrawableHighlightView selection = null;
while ( iterator.hasNext() ) {
DrawableHighlightView view = iterator.next();
int edge = view.getHit( e.getX(), e.getY() );
if ( edge != DrawableHighlightView.GROW_NONE ) {
selection = view;
}
}
return selection;
}
/**
* Check up selection.
*
* @param e
* the e
* @return the drawable highlight view
*/
private DrawableHighlightView checkUpSelection( MotionEvent e ) {
Iterator<DrawableHighlightView> iterator = mOverlayViews.iterator();
DrawableHighlightView selection = null;
while ( iterator.hasNext() ) {
DrawableHighlightView view = iterator.next();
if ( view.getSelected() ) {
view.onSingleTapConfirmed( e.getX(), e.getY() );
}
}
return selection;
}
/**
* Sets the selected highlight view.
*
* @param newView
* the new selected highlight view
*/
public void setSelectedHighlightView( DrawableHighlightView newView ) {
final DrawableHighlightView oldView = mOverlayView;
if ( mOverlayView != null && !mOverlayView.equals( newView ) ) {
mOverlayView.setSelected( false );
}
if ( newView != null ) {
newView.setSelected( true );
}
mOverlayView = newView;
if ( mDrawableListener != null ) {
mDrawableListener.onFocusChange( newView, oldView );
}
}
/**
* The listener interface for receiving cropGesture events. The class that is interested in processing a cropGesture event
* implements this interface, and the object created with that class is registered with a component using the component's
* <code>addCropGestureListener<code> method. When
* the cropGesture event occurs, that object's appropriate
* method is invoked.
*
* @see CropGestureEvent
*/
class CropGestureListener extends GestureDetector.SimpleOnGestureListener {
boolean mScrollStarted;
float mLastMotionX, mLastMotionY;
@Override
public boolean onDown( MotionEvent e ) {
mScrollStarted = false;
mLastMotionX = e.getX();
mLastMotionY = e.getY();
DrawableHighlightView newSelection = checkSelection( e );
DrawableHighlightView realNewSelection = newSelection;
if ( newSelection == null && mOverlayViews.size() == 1 && mForceSingleSelection ) {
newSelection = mOverlayViews.get( 0 );
}
setSelectedHighlightView( newSelection );
if ( realNewSelection != null && mScaleWithContent ) {
RectF displayRect = realNewSelection.getDisplayRect( realNewSelection.getMatrix(), realNewSelection.getCropRectF() );
boolean invalidSize = realNewSelection.getContent().validateSize( displayRect );
if ( !invalidSize ) {
float minW = realNewSelection.getContent().getMinWidth();
float minH = realNewSelection.getContent().getMinHeight();
float minSize = Math.min( minW, minH ) * 1.1f;
float minRectSize = Math.min( displayRect.width(), displayRect.height() );
float diff = minSize / minRectSize;
Log.d( LOG_TAG, "drawable too small!!!" );
Log.d( LOG_TAG, "min.size: " + minW + "x" + minH );
Log.d( LOG_TAG, "cur.size: " + displayRect.width() + "x" + displayRect.height() );
zoomTo( getScale() * diff, displayRect.centerX(), displayRect.centerY(), DEFAULT_ANIMATION_DURATION * 1.5f );
return true;
}
}
if ( mOverlayView != null ) {
int edge = mOverlayView.getHit( e.getX(), e.getY() );
if ( edge != DrawableHighlightView.GROW_NONE ) {
mMotionEdge = edge;
mOverlayView
.setMode( ( edge == DrawableHighlightView.MOVE ) ? DrawableHighlightView.Mode.Move
: ( edge == DrawableHighlightView.ROTATE ? DrawableHighlightView.Mode.Rotate
: DrawableHighlightView.Mode.Grow ) );
if ( mDrawableListener != null ) {
mDrawableListener.onDown( mOverlayView );
}
}
}
return super.onDown( e );
}
/*
* (non-Javadoc)
*
* @see android.view.GestureDetector.SimpleOnGestureListener#onSingleTapConfirmed(android.view.MotionEvent)
*/
@Override
public boolean onSingleTapConfirmed( MotionEvent e ) {
checkUpSelection( e );
return super.onSingleTapConfirmed( e );
}
/*
* (non-Javadoc)
*
* @see android.view.GestureDetector.SimpleOnGestureListener#onSingleTapUp(android.view.MotionEvent)
*/
@Override
public boolean onSingleTapUp( MotionEvent e ) {
if ( mOverlayView != null ) {
int edge = mOverlayView.getHit( e.getX(), e.getY() );
if ( ( edge & DrawableHighlightView.MOVE ) == DrawableHighlightView.MOVE ) {
if ( mDrawableListener != null ) mDrawableListener.onClick( mOverlayView );
return true;
}
mOverlayView.setMode( Mode.None );
if ( mOverlayViews.size() != 1 ) setSelectedHighlightView( null );
}
return super.onSingleTapUp( e );
}
/*
* (non-Javadoc)
*
* @see android.view.GestureDetector.SimpleOnGestureListener#onDoubleTap(android.view.MotionEvent)
*/
@Override
public boolean onDoubleTap( MotionEvent e ) {
if ( !mDoubleTapEnabled ) return false;
return ImageViewDrawableOverlay.this.onDoubleTap( e );
}
/*
* (non-Javadoc)
*
* @see android.view.GestureDetector.SimpleOnGestureListener#onScroll(android.view.MotionEvent, android.view.MotionEvent,
* float, float)
*/
@Override
public boolean onScroll( MotionEvent e1, MotionEvent e2, float distanceX, float distanceY ) {
if ( !mScrollEnabled ) return false;
if ( e1 == null || e2 == null ) return false;
if ( e1.getPointerCount() > 1 || e2.getPointerCount() > 1 ) return false;
if ( mScaleDetector.isInProgress() ) return false;
// remove the touch slop lag ( see bug @1084 )
float x = e2.getX();
float y = e2.getY();
if( !mScrollStarted ){
distanceX = 0;
distanceY = 0;
mScrollStarted = true;
} else {
distanceX = mLastMotionX - x;
distanceY = mLastMotionY - y;
}
mLastMotionX = x;
mLastMotionY = y;
if ( mOverlayView != null && mMotionEdge != DrawableHighlightView.GROW_NONE ) {
mOverlayView.onMouseMove( mMotionEdge, e2, -distanceX, -distanceY );
if ( mDrawableListener != null ) {
mDrawableListener.onMove( mOverlayView );
}
if ( mMotionEdge == DrawableHighlightView.MOVE ) {
if ( !mScaleWithContent ) {
ensureVisible( mOverlayView, distanceX, distanceY );
}
}
return true;
} else {
scrollBy( -distanceX, -distanceY );
invalidate();
return true;
}
}
/*
* (non-Javadoc)
*
* @see android.view.GestureDetector.SimpleOnGestureListener#onFling(android.view.MotionEvent, android.view.MotionEvent,
* float, float)
*/
@Override
public boolean onFling( MotionEvent e1, MotionEvent e2, float velocityX, float velocityY ) {
if ( !mScrollEnabled ) return false;
if ( e1.getPointerCount() > 1 || e2.getPointerCount() > 1 ) return false;
if ( mScaleDetector.isInProgress() ) return false;
if ( mOverlayView != null && mOverlayView.getMode() != Mode.None ) return false;
float diffX = e2.getX() - e1.getX();
float diffY = e2.getY() - e1.getY();
if ( Math.abs( velocityX ) > 800 || Math.abs( velocityY ) > 800 ) {
scrollBy( diffX / 2, diffY / 2, 300 );
invalidate();
}
return super.onFling( e1, e2, velocityX, velocityY );
}
}
/**
* The listener interface for receiving cropScale events. The class that is interested in processing a cropScale event implements
* this interface, and the object created with that class is registered with a component using the component's
* <code>addCropScaleListener<code> method. When
* the cropScale event occurs, that object's appropriate
* method is invoked.
*
* @see CropScaleEvent
*/
class CropScaleListener extends ScaleListener {
/*
* (non-Javadoc)
*
* @see
* it.sephiroth.android.library.imagezoom.ScaleGestureDetector.SimpleOnScaleGestureListener#onScaleBegin(it.sephiroth.android
* .library.imagezoom.ScaleGestureDetector)
*/
@Override
public boolean onScaleBegin( ScaleGestureDetector detector ) {
if ( !mScaleEnabled ) return false;
return super.onScaleBegin( detector );
}
/*
* (non-Javadoc)
*
* @see
* it.sephiroth.android.library.imagezoom.ScaleGestureDetector.SimpleOnScaleGestureListener#onScaleEnd(it.sephiroth.android
* .library.imagezoom.ScaleGestureDetector)
*/
@Override
public void onScaleEnd( ScaleGestureDetector detector ) {
if ( !mScaleEnabled ) return;
super.onScaleEnd( detector );
}
/*
* (non-Javadoc)
*
* @see it.sephiroth.android.library.imagezoom.ImageViewTouch.ScaleListener#onScale(it.sephiroth.android.library.imagezoom.
* ScaleGestureDetector)
*/
@Override
public boolean onScale( ScaleGestureDetector detector ) {
if ( !mScaleEnabled ) return false;
return super.onScale( detector );
}
}
@Override
public void onDrop( DragSource source, int x, int y, int xOffset, int yOffset, DragView dragView, Object dragInfo ) {
if ( mDropTargetListener != null ) {
mDropTargetListener.onDrop( source, x, y, xOffset, yOffset, dragView, dragInfo );
}
}
@Override
public void onDragEnter( DragSource source, int x, int y, int xOffset, int yOffset, DragView dragView, Object dragInfo ) {
mDropPaint = new Paint();
mDropPaint.setColor( 0xff33b5e5 );
mDropPaint.setStrokeWidth( 2 );
mDropPaint.setMaskFilter( new BlurMaskFilter( 4.0f, Blur.NORMAL ) );
mDropPaint.setStyle( Paint.Style.STROKE );
invalidate();
}
@Override
public void onDragOver( DragSource source, int x, int y, int xOffset, int yOffset, DragView dragView, Object dragInfo ) {}
@Override
public void onDragExit( DragSource source, int x, int y, int xOffset, int yOffset, DragView dragView, Object dragInfo ) {
mDropPaint = null;
invalidate();
}
@Override
public boolean acceptDrop( DragSource source, int x, int y, int xOffset, int yOffset, DragView dragView, Object dragInfo ) {
if ( mDropTargetListener != null ) {
return mDropTargetListener.acceptDrop( source, x, y, xOffset, yOffset, dragView, dragInfo );
}
return false;
}
@Override
public Rect estimateDropLocation( DragSource source, int x, int y, int xOffset, int yOffset, DragView dragView, Object dragInfo,
Rect recycle ) {
return null;
}
}
| |
package crazypants.enderio.conduits.conduit;
import java.util.ArrayList;
import java.util.Collection;
import java.util.EnumSet;
import java.util.HashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Set;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import crazypants.enderio.base.EnderIO;
import crazypants.enderio.base.Log;
import crazypants.enderio.base.conduit.ConduitUtil;
import crazypants.enderio.base.conduit.ConduitUtil.UnloadedBlockException;
import crazypants.enderio.base.conduit.IConduit;
import crazypants.enderio.base.conduit.IConduitBundle;
import crazypants.enderio.base.conduit.IConduitNetwork;
import crazypants.enderio.base.conduit.IServerConduit;
import crazypants.enderio.base.diagnostics.ConduitNeighborUpdateTracker;
import crazypants.enderio.base.handler.ServerTickHandler;
import net.minecraft.block.Block;
import net.minecraft.block.state.IBlockState;
import net.minecraft.init.Blocks;
import net.minecraft.profiler.Profiler;
import net.minecraft.tileentity.TileEntity;
import net.minecraft.util.EnumFacing;
import net.minecraft.util.math.BlockPos;
import net.minecraft.world.World;
import net.minecraftforge.event.ForgeEventFactory;
import net.minecraftforge.fml.common.gameevent.TickEvent.ServerTickEvent;
//I=base type, I is the base class of the implementations accepted by the network
public abstract class AbstractConduitNetwork<T extends IServerConduit, I extends T> implements IConduitNetwork<T, I> {
private final @Nonnull List<I> conduits = new ArrayList<I>();
private long lastConduitListCheck = -1L; // server tick of the last time a full check on the conduit list was run. Used to limit the full check to once per
// tick.
protected final @Nonnull Class<I> implClass;
protected final @Nonnull Class<T> baseConduitClass;
protected AbstractConduitNetwork(@Nonnull Class<I> implClass, @Nonnull Class<T> baseConduitClass) {
this.implClass = implClass;
this.baseConduitClass = baseConduitClass;
}
@Override
public void init(@Nonnull IConduitBundle tile, Collection<I> connections, @Nonnull World world) throws UnloadedBlockException {
if (world.isRemote) {
throw new UnsupportedOperationException();
}
// Destroy all existing networks around this block
for (I con : connections) {
IConduitNetwork<?, ?> network = con.getNetwork();
if (network != null) {
network.destroyNetwork();
}
}
setNetwork(world, tile);
}
@Override
public final @Nonnull Class<T> getBaseConduitType() {
return baseConduitClass;
}
@Override
public void setNetwork(@Nonnull World world, @Nonnull IConduitBundle tile) throws UnloadedBlockException {
List<T> candidates = new LinkedList<>();
candidates.add(tile.getConduit(getBaseConduitType()));
while (!candidates.isEmpty()) {
T conduit = candidates.remove(0);
if (conduit == null || !implClass.isAssignableFrom(conduit.getClass())) {
continue;
}
IConduitNetwork<?, ?> network = conduit.getNetwork();
if (network == this) {
continue;
} else if (network != null) {
network.destroyNetwork();
}
if (conduit.setNetwork(this)) {
addConduit(implClass.cast(conduit));
candidates.addAll(ConduitUtil.getConnectedConduits(world, conduit.getBundle().getEntity().getPos(), getBaseConduitType()));
}
}
}
private boolean isSameTick() {
long temp = EnderIO.proxy.getServerTickCount();
if (lastConduitListCheck != temp) {
lastConduitListCheck = temp;
return false;
}
return true;
}
@Override
public void addConduit(@Nonnull I newConduit) {
if (conduits.isEmpty()) {
ServerTickHandler.addListener(this);
}
boolean doFullCheck = !isSameTick();
BlockPos newPos = null;
boolean error = false;
// Step 1: Is the new conduit attached to a TE that is valid?
final IConduitBundle newBundle = newConduit.getBundle();
final TileEntity newte = newBundle.getEntity();
if (!newte.hasWorld()) {
Log.info("Tried to add invalid (no world) conduit to network: ", newConduit);
error = true;
}
if (newte.isInvalid()) {
Log.info("Tried to add invalid (invalidated) conduit to network: ", newConduit);
error = true;
}
newPos = newte.getPos();
final World newworld = newte.getWorld();
if (!newworld.isBlockLoaded(newPos)) {
Log.info("Tried to add invalid (unloaded) conduit to network: ", newConduit);
error = true;
}
if (newworld.getTileEntity(newte.getPos()) != newte) {
Log.info("Tried to add invalid (world disagrees) conduit to network: ", newConduit);
error = true;
}
if (error) {
new Exception("trace for message above").printStackTrace();
return;
}
// Step 2: Check for duplicates and other errors (short variant)
if (!doFullCheck) {
for (I oldConduit : conduits) {
if (newConduit == oldConduit) {
// real dupe, ignore it
return;
}
if (oldConduit.getBundle().getEntity().getPos().equals(newPos)) {
// Something fishy is happening, we need to do the full check
doFullCheck = true;
break;
}
}
if (!doFullCheck) {
conduits.add(newConduit);
return;
}
}
// Step 2: Check for duplicates and other errors (full variant)
List<I> old = new ArrayList<I>(conduits);
conduits.clear();
boolean newConduitIsBad = false;
for (I oldConduit : old) {
// Step 2.1: Fast skip if we have a real dupe
if (newConduit == oldConduit) {
continue;
}
// Step 2.2: Check if the old conduit's TE is valid
final IConduitBundle oldBundle = oldConduit.getBundle();
final TileEntity oldTe = oldBundle.getEntity();
if (oldTe.isInvalid() || !oldTe.hasWorld()) {
oldConduit.clearNetwork();
continue; // bad conduit, skip it
}
// Step 2.2b: Check if the target position is loaded
final World oldWorld = oldBundle.getBundleworld();
final BlockPos oldPos = oldTe.getPos();
if (!oldWorld.isBlockLoaded(oldPos)) {
Log.info("Removed unloaded but valid conduit from network: " + oldConduit);
oldConduit.clearNetwork();
continue; // bad conduit, skip it
}
// Step 2.3: Check if the old conduit's TE matches what its world has
if (oldWorld.getTileEntity(oldPos) != oldTe) {
oldConduit.clearNetwork();
continue; // bad conduit, skip it
}
// Step 2.4: Check if the new conduit is for the same position as the old. This should not happen, as the new conduit should have been gotten from the
// world and the old conduit already was checked against the world...
if (newPos.equals(oldPos)) {
Log.info("Tried to add invalid conduit to network! Old conduit: ", oldConduit, "/", oldBundle, " New conduit: ", newConduit, "/", oldBundle,
" World says: ", oldWorld.getTileEntity(newPos));
newConduitIsBad = true;
}
// Step 2.5: Old conduit is good and can stay
conduits.add(oldConduit);
}
// Step 3: Add the new conduit
if (!newConduitIsBad) {
conduits.add(newConduit);
}
}
@Override
public void destroyNetwork() {
for (I con : conduits) {
con.clearNetwork();
}
conduits.clear();
ServerTickHandler.removeListener(this);
}
@Override
@Nonnull
public List<I> getConduits() {
return conduits;
}
private static final EnumFacing[] WEDUNS = new EnumFacing[] { EnumFacing.WEST, EnumFacing.EAST, EnumFacing.DOWN, EnumFacing.UP, EnumFacing.NORTH,
EnumFacing.SOUTH };
@Override
public void sendBlockUpdatesForEntireNetwork() {
ConduitNeighborUpdateTracker tracker = null;
Set<BlockPos> notified = new HashSet<BlockPos>();
for (I con : conduits) {
TileEntity te = con.getBundle().getEntity();
if (con.hasExternalConnections()) {
final BlockPos pos = te.getPos();
final Block blockType = te.getBlockType();
final World world = te.getWorld();
if (world.isBlockLoaded(pos)) {
IBlockState bs = world.getBlockState(pos);
if (tracker == null) {
tracker = new ConduitNeighborUpdateTracker("Conduit network " + this.getClass() + " was interrupted while notifying neighbors of changes");
}
tracker.start("World.notifyBlockUpdate() at " + pos);
world.notifyBlockUpdate(pos, bs, bs, 3);
tracker.stop();
// the following is a fancy version of world.notifyNeighborsOfStateChange(pos, blockType);
// don't notify other conduits and don't notify the same block twice
EnumSet<EnumFacing> sidesToNotify = EnumSet.noneOf(EnumFacing.class);
for (EnumFacing side : WEDUNS) {
if (side != null) {
final BlockPos offset = pos.offset(side);
if (con.containsExternalConnection(side) && !notified.contains(offset) && world.isBlockLoaded(offset)) {
IBlockState blockState = world.getBlockState(offset);
if (blockState.getBlock() != blockType && blockState.getBlock() != Blocks.AIR) {
sidesToNotify.add(side);
notified.add(offset);
}
}
}
}
if (!sidesToNotify.isEmpty()) {
tracker.start("ForgeEventFactory.onNeighborNotify() at " + pos);
// TODO Set the 4th parameter to only update the redstone state when the conduit network has a redstone conduit network in it
boolean canceled = ForgeEventFactory.onNeighborNotify(world, pos, bs, sidesToNotify, false).isCanceled();
tracker.stop();
if (!canceled) {
for (EnumFacing side : sidesToNotify) {
if (side != null) {
final BlockPos offset = pos.offset(side);
tracker.start("World.notifyNeighborsOfStateChange() from " + pos + " to " + offset + " (" + world.getBlockState(offset) + ")");
world.neighborChanged(offset, blockType, pos);
tracker.stop();
}
}
}
}
}
}
}
if (tracker != null) {
tracker.discard();
}
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
for (IConduit con : conduits) {
sb.append(con.getBundle().getLocation());
sb.append(", ");
}
return "AbstractConduitNetwork@" + Integer.toHexString(hashCode()) + " [conduits=" + sb.toString() + "]";
}
@Override
public void tickStart(ServerTickEvent event, @Nullable Profiler profiler) {
}
@Override
public void tickEnd(ServerTickEvent event, @Nullable Profiler profiler) {
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.